diff --git a/docs/lite/api/_custom/sphinx_builder_html b/docs/lite/api/_custom/sphinx_builder_html
index 453a52fea86bbe95cd49a2090bc25b2db53c3901..f6679d50b884b4aa2ec1771a62c5a25c27da3424 100644
--- a/docs/lite/api/_custom/sphinx_builder_html
+++ b/docs/lite/api/_custom/sphinx_builder_html
@@ -1116,7 +1116,7 @@ class StandaloneHTMLBuilder(Builder):
# Add links to the Python operator interface.
if "mindspore.ops." in output:
- output = re.sub(r'(mindspore\.ops\.\w+) ', r'\1 ', output, count=0)
+ output = re.sub(r'(mindspore\.ops\.\w+) ', r'\1 ', output, count=0)
except UnicodeError:
logger.warning(__("a Unicode error occurred when rendering the page %s. "
diff --git a/docs/lite/api/source_en/api_c/lite_c_example.rst b/docs/lite/api/source_en/api_c/lite_c_example.rst
index c4588f1ec970840d314f8e45510ff1786f997a4d..c35a97a3ffddce6c4f2168c58374f28f1eaff841 100644
--- a/docs/lite/api/source_en/api_c/lite_c_example.rst
+++ b/docs/lite/api/source_en/api_c/lite_c_example.rst
@@ -4,4 +4,4 @@ Example
.. toctree::
:maxdepth: 1
- Simple Demo↗
+ Simple Demo↗
diff --git a/docs/lite/api/source_en/api_cpp/lite_cpp_example.rst b/docs/lite/api/source_en/api_cpp/lite_cpp_example.rst
index 41711025f8bb1b2b6ac69fe714c5f4a3c7612e20..be1c0c9d812ae05eaa91707d3c4f6d35183de3d3 100644
--- a/docs/lite/api/source_en/api_cpp/lite_cpp_example.rst
+++ b/docs/lite/api/source_en/api_cpp/lite_cpp_example.rst
@@ -4,6 +4,6 @@ Example
.. toctree::
:maxdepth: 1
- Simple Demo↗
- Android Application Development Based on JNI Interface↗
- High-level Usage↗
\ No newline at end of file
+ Simple Demo↗
+ Android Application Development Based on JNI Interface↗
+ High-level Usage↗
\ No newline at end of file
diff --git a/docs/lite/api/source_en/api_java/ascend_device_info.md b/docs/lite/api/source_en/api_java/ascend_device_info.md
index d103b925a1b642797c78d11ec2fa8da2f375abd5..19018c0d3b795dcd87d8547ed988a1f67dd3d165 100644
--- a/docs/lite/api/source_en/api_java/ascend_device_info.md
+++ b/docs/lite/api/source_en/api_java/ascend_device_info.md
@@ -1,6 +1,6 @@
# AscendDeviceInfo
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/ascend_device_info.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/ascend_device_info.md)
```java
import com.mindspore.config.AscendDeviceInfo;
diff --git a/docs/lite/api/source_en/api_java/class_list.md b/docs/lite/api/source_en/api_java/class_list.md
index 03d1c1b3a48342ddb2f0b120315fc21b2afda6b3..4c3912610200c881df9db83d7a56847f636d4f72 100644
--- a/docs/lite/api/source_en/api_java/class_list.md
+++ b/docs/lite/api/source_en/api_java/class_list.md
@@ -1,20 +1,20 @@
# Class List
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/class_list.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/class_list.md)
| Package | Class Name | Description | Supported At Cloud-side Inference | Supported At Device-side Inference |
| ------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ |--------|--------|
-| com.mindspore | [Model](https://www.mindspore.cn/lite/api/en/master/api_java/model.html) | Model defines model in MindSpore for compiling and running compute graph. | √ | √ |
-| com.mindspore.config | [MSContext](https://www.mindspore.cn/lite/api/en/master/api_java/mscontext.html) | MSContext is used to save the context during execution. | √ | √ |
-| com.mindspore | [MSTensor](https://www.mindspore.cn/lite/api/en/master/api_java/mstensor.html) | MSTensor defines the tensor in MindSpore. | √ | √ |
-| com.mindspore | [ModelParallelRunner](https://www.mindspore.cn/lite/api/en/master/api_java/model_parallel_runner.html) | Defines MindSpore Lite concurrent inference. | √ | ✕ |
-| com.mindspore.config | [RunnerConfig](https://www.mindspore.cn/lite/api/en/master/api_java/runner_config.html) | RunnerConfig defines configuration parameters for concurrent inference. | √ | ✕ |
-| com.mindspore | [Graph](https://www.mindspore.cn/lite/api/en/master/api_java/graph.html) | Graph defines the compute graph in MindSpore. | ✕ | √ |
-| com.mindspore.config | [CpuBindMode](https://www.mindspore.cn/lite/api/en/master/api_java/mscontext.html#cpubindmode) | CpuBindMode defines the CPU binding mode. | √ | √ |
-| com.mindspore.config | [DeviceType](https://www.mindspore.cn/lite/api/en/master/api_java/mscontext.html#devicetype) | DeviceType defines the back-end device type. | √ | √ |
-| com.mindspore.config | [DataType](https://www.mindspore.cn/lite/api/en/master/api_java/mstensor.html#datatype) | DataType defines the supported data types. | √ | √ |
-| com.mindspore.config | [Version](https://www.mindspore.cn/lite/api/en/master/api_java/version.html) | Version is used to obtain the version information of MindSpore. | ✕ | √ |
-| com.mindspore.config | [ModelType](https://www.mindspore.cn/lite/api/en/master/api_java/model.html#modeltype) | ModelType defines the model file type. | √ | √ |
-| com.mindspore.config | [AscendDeviceInfo](https://www.mindspore.cn/lite/api/en/master/api_java/ascend_device_info.html) | The AscendDeviceInfo class is used to configure MindSpore Lite Ascend device options. | √ | ✕ |
-| com.mindspore.config | [TrainCfg](https://www.mindspore.cn/lite/api/en/master/api_java/train_cfg.html) | Configuration parameters used for model training on the device. | ✕ | √ |
+| com.mindspore | [Model](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/model.html) | Model defines model in MindSpore for compiling and running compute graph. | √ | √ |
+| com.mindspore.config | [MSContext](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/mscontext.html) | MSContext is used to save the context during execution. | √ | √ |
+| com.mindspore | [MSTensor](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/mstensor.html) | MSTensor defines the tensor in MindSpore. | √ | √ |
+| com.mindspore | [ModelParallelRunner](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/model_parallel_runner.html) | Defines MindSpore Lite concurrent inference. | √ | ✕ |
+| com.mindspore.config | [RunnerConfig](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/runner_config.html) | RunnerConfig defines configuration parameters for concurrent inference. | √ | ✕ |
+| com.mindspore | [Graph](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/graph.html) | Graph defines the compute graph in MindSpore. | ✕ | √ |
+| com.mindspore.config | [CpuBindMode](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/mscontext.html#cpubindmode) | CpuBindMode defines the CPU binding mode. | √ | √ |
+| com.mindspore.config | [DeviceType](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/mscontext.html#devicetype) | DeviceType defines the back-end device type. | √ | √ |
+| com.mindspore.config | [DataType](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/mstensor.html#datatype) | DataType defines the supported data types. | √ | √ |
+| com.mindspore.config | [Version](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/version.html) | Version is used to obtain the version information of MindSpore. | ✕ | √ |
+| com.mindspore.config | [ModelType](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/model.html#modeltype) | ModelType defines the model file type. | √ | √ |
+| com.mindspore.config | [AscendDeviceInfo](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/ascend_device_info.html) | The AscendDeviceInfo class is used to configure MindSpore Lite Ascend device options. | √ | ✕ |
+| com.mindspore.config | [TrainCfg](https://www.mindspore.cn/lite/api/en/r2.7.0/api_java/train_cfg.html) | Configuration parameters used for model training on the device. | ✕ | √ |
diff --git a/docs/lite/api/source_en/api_java/graph.md b/docs/lite/api/source_en/api_java/graph.md
index b5a7b2b8bd5f5c7503f247651d22d3cedab63023..ff1f5a1a664d6bb6f5681aff8e28dc32e039dee4 100644
--- a/docs/lite/api/source_en/api_java/graph.md
+++ b/docs/lite/api/source_en/api_java/graph.md
@@ -1,6 +1,6 @@
# Graph
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/graph.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/graph.md)
```java
import com.mindspore.Graph;
diff --git a/docs/lite/api/source_en/api_java/lite_java_example.rst b/docs/lite/api/source_en/api_java/lite_java_example.rst
index 01f76f0495b7394007f45abde2213d365095ba6e..38e9ab333fc71684088aefad6a6717817d26d8e5 100644
--- a/docs/lite/api/source_en/api_java/lite_java_example.rst
+++ b/docs/lite/api/source_en/api_java/lite_java_example.rst
@@ -4,6 +4,6 @@ Example
.. toctree::
:maxdepth: 1
- Simple Demo↗
- Android Application Development Based on Java Interface↗
- High-level Usage↗
\ No newline at end of file
+ Simple Demo↗
+ Android Application Development Based on Java Interface↗
+ High-level Usage↗
\ No newline at end of file
diff --git a/docs/lite/api/source_en/api_java/model.md b/docs/lite/api/source_en/api_java/model.md
index 8a170203a5479356d44fc770c6ddc0adf10bb38a..e6b6613ca8f198caa139127e70cc71afa3b88859 100644
--- a/docs/lite/api/source_en/api_java/model.md
+++ b/docs/lite/api/source_en/api_java/model.md
@@ -1,6 +1,6 @@
# Model
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/model.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/model.md)
```java
import com.mindspore.model;
diff --git a/docs/lite/api/source_en/api_java/model_parallel_runner.md b/docs/lite/api/source_en/api_java/model_parallel_runner.md
index 1525eaf8bf9f70b5e0c30e9a3cfcf12a53d22987..05f29c8ee8006a0713c4a64889c5338ea1c9b9e6 100644
--- a/docs/lite/api/source_en/api_java/model_parallel_runner.md
+++ b/docs/lite/api/source_en/api_java/model_parallel_runner.md
@@ -1,6 +1,6 @@
# ModelParallelRunner
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/model_parallel_runner.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/model_parallel_runner.md)
```java
import com.mindspore.config.RunnerConfig;
diff --git a/docs/lite/api/source_en/api_java/mscontext.md b/docs/lite/api/source_en/api_java/mscontext.md
index 1f0b87b9035c02a5e55f825106c3e17ab8e66377..b5a328b42b77eaa3f761d310628a62c4c5dbfcc4 100644
--- a/docs/lite/api/source_en/api_java/mscontext.md
+++ b/docs/lite/api/source_en/api_java/mscontext.md
@@ -1,6 +1,6 @@
# MSContext
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/mscontext.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/mscontext.md)
```java
import com.mindspore.config.MSContext;
@@ -54,7 +54,7 @@ Initialize MSContext for cpu.
- Parameters
- `threadNum`: Thread number config for thread pool.
- - `cpuBindMode`: A **[CpuBindMode](https://gitee.com/mindspore/mindspore-lite/blob/master/mindspore-lite/java/src/main/java/com/mindspore/config/CpuBindMode.java)** **enum** variable.
+ - `cpuBindMode`: A **[CpuBindMode](https://gitee.com/mindspore/mindspore-lite/blob/r2.7.0/mindspore-lite/java/src/main/java/com/mindspore/config/CpuBindMode.java)** **enum** variable.
- Returns
@@ -69,7 +69,7 @@ Initialize MSContext.
- Parameters
- `threadNum`: Thread number config for thread pool.
- - `cpuBindMode`: A **[CpuBindMode](https://gitee.com/mindspore/mindspore-lite/blob/master/mindspore-lite/java/src/main/java/com/mindspore/config/CpuBindMode.java)** **enum** variable.
+ - `cpuBindMode`: A **[CpuBindMode](https://gitee.com/mindspore/mindspore-lite/blob/r2.7.0/mindspore-lite/java/src/main/java/com/mindspore/config/CpuBindMode.java)** **enum** variable.
- `isEnableParallel`: Is enable parallel in different device.
- Returns
@@ -86,7 +86,7 @@ Add device info for mscontext.
- Parameters
- - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore-lite/blob/master/mindspore-lite/java/src/main/java/com/mindspore/config/DeviceType.java)** **enum** type.
+ - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore-lite/blob/r2.7.0/mindspore-lite/java/src/main/java/com/mindspore/config/DeviceType.java)** **enum** type.
- `isEnableFloat16`: Is enable fp16.
- Returns
@@ -101,7 +101,7 @@ Add device info for mscontext.
- Parameters
- - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore-lite/blob/master/mindspore-lite/java/src/main/java/com/mindspore/config/DeviceType.java)** **enum** type.
+ - `deviceType`: A **[DeviceType](https://gitee.com/mindspore/mindspore-lite/blob/r2.7.0/mindspore-lite/java/src/main/java/com/mindspore/config/DeviceType.java)** **enum** type.
- `isEnableFloat16`: is enable fp16.
- `npuFreq`: Npu frequency.
diff --git a/docs/lite/api/source_en/api_java/mstensor.md b/docs/lite/api/source_en/api_java/mstensor.md
index bd24f364d4387f69efd74b003d36930ad1ef8bf8..84f4e3455fe18280289b48a5786a2c500600ca66 100644
--- a/docs/lite/api/source_en/api_java/mstensor.md
+++ b/docs/lite/api/source_en/api_java/mstensor.md
@@ -1,6 +1,6 @@
# MSTensor
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/mstensor.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/mstensor.md)
```java
import com.mindspore.MSTensor;
@@ -86,7 +86,7 @@ Get the shape of the MindSpore MSTensor.
public int getDataType()
```
-DataType is defined in [com.mindspore.DataType](https://gitee.com/mindspore/mindspore-lite/blob/master/mindspore-lite/java/src/main/java/com/mindspore/config/DataType.java).
+DataType is defined in [com.mindspore.DataType](https://gitee.com/mindspore/mindspore-lite/blob/r2.7.0/mindspore-lite/java/src/main/java/com/mindspore/config/DataType.java).
- Returns
diff --git a/docs/lite/api/source_en/api_java/runner_config.md b/docs/lite/api/source_en/api_java/runner_config.md
index 052ac6dc933e6b37245370e5c94b270b79a96d6d..0822b7f7f04aabb5e88d6ada4b759f53516f4829 100644
--- a/docs/lite/api/source_en/api_java/runner_config.md
+++ b/docs/lite/api/source_en/api_java/runner_config.md
@@ -1,6 +1,6 @@
# RunnerConfig
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/runner_config.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/runner_config.md)
RunnerConfig defines the configuration parameters of MindSpore Lite concurrent inference.
diff --git a/docs/lite/api/source_en/api_java/train_cfg.md b/docs/lite/api/source_en/api_java/train_cfg.md
index 2b044eb479975fb39facb91b6e63ec1106c5096d..4f6f9d8b9fed5e1e6939dca079c4c772171b2754 100644
--- a/docs/lite/api/source_en/api_java/train_cfg.md
+++ b/docs/lite/api/source_en/api_java/train_cfg.md
@@ -1,6 +1,6 @@
# TrainCfg
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/train_cfg.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/train_cfg.md)
```java
import com.mindspore.config.TrainCfg;
diff --git a/docs/lite/api/source_en/api_java/version.md b/docs/lite/api/source_en/api_java/version.md
index 99903c00f88851d726b3c6e050568927be42da82..d09235b27f06ab6d106f5bad462a5bd5457638c0 100644
--- a/docs/lite/api/source_en/api_java/version.md
+++ b/docs/lite/api/source_en/api_java/version.md
@@ -1,6 +1,6 @@
# Version
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_en/api_java/version.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_en/api_java/version.md)
```java
import com.mindspore.config.Version;
diff --git a/docs/lite/api/source_en/index.rst b/docs/lite/api/source_en/index.rst
index 5c486f06cc88dd6c4058a7f792c5077af55b121f..065a29fc7f81480580240547861a7aeb658ee28a 100644
--- a/docs/lite/api/source_en/index.rst
+++ b/docs/lite/api/source_en/index.rst
@@ -12,21 +12,21 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Class | Description | C++ API | Python API |
+=========================================================+===================================================================================================================================+==========================================================================================================================================================================================================================+============================================================================================================================================================================================================================================================================================================================================================================+
-| Context | Set the number of threads at runtime | void SetThreadNum(int32_t thread_num) | `Context.cpu.thread_num `__ |
+| Context | Set the number of threads at runtime | void SetThreadNum(int32_t thread_num) | `Context.cpu.thread_num `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Get the current thread number setting | int32_t GetThreadNum() const | `Context.cpu.thread_num `__ |
+| Context | Get the current thread number setting | int32_t GetThreadNum() const | `Context.cpu.thread_num `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Set the parallel number of operators at runtime | void SetInterOpParallelNum(int32_t parallel_num) | `Context.cpu.inter_op_parallel_num `__ |
+| Context | Set the parallel number of operators at runtime | void SetInterOpParallelNum(int32_t parallel_num) | `Context.cpu.inter_op_parallel_num `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Get the current operators parallel number setting | int32_t GetInterOpParallelNum() const | `Context.cpu.inter_op_parallel_num `__ |
+| Context | Get the current operators parallel number setting | int32_t GetInterOpParallelNum() const | `Context.cpu.inter_op_parallel_num `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Set the thread affinity to CPU cores | void SetThreadAffinity(int mode) | `Context.cpu.thread_affinity_mode `__ |
+| Context | Set the thread affinity to CPU cores | void SetThreadAffinity(int mode) | `Context.cpu.thread_affinity_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Get the thread affinity of CPU cores | int GetThreadAffinityMode() const | `Context.cpu.thread_affinity_mode `__ |
+| Context | Get the thread affinity of CPU cores | int GetThreadAffinityMode() const | `Context.cpu.thread_affinity_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Set the thread lists to CPU cores | void SetThreadAffinity(const std::vector &core_list) | `Context.cpu.thread_affinity_core_list `__ |
+| Context | Set the thread lists to CPU cores | void SetThreadAffinity(const std::vector &core_list) | `Context.cpu.thread_affinity_core_list `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Get the thread lists of CPU cores | std::vector GetThreadAffinityCoreList() const | `Context.cpu.thread_affinity_core_list `__ |
+| Context | Get the thread lists of CPU cores | std::vector GetThreadAffinityCoreList() const | `Context.cpu.thread_affinity_core_list `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Context | Set the status whether to perform model inference or training in parallel | void SetEnableParallel(bool is_parallel) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -44,7 +44,7 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Context | Get the mode of the model run | bool GetMultiModalHW() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Context | Get a mutable reference of DeviceInfoContext vector in this context | std::vector> &MutableDeviceInfo() | Wrapped in `Context.target `__ |
+| Context | Get a mutable reference of DeviceInfoContext vector in this context | std::vector> &MutableDeviceInfo() | Wrapped in `Context.target `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| DeviceInfoContext | Get the type of this DeviceInfoContext | enum DeviceType GetDeviceType() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -62,29 +62,29 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| DeviceInfoContext | obtain memory allocator | std::shared_ptr GetAllocator() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| CPUDeviceInfo | Get the type of this DeviceInfoContext | enum DeviceType GetDeviceType() const | `context.cpu `__ |
+| CPUDeviceInfo | Get the type of this DeviceInfoContext | enum DeviceType GetDeviceType() const | `context.cpu `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| CPUDeviceInfo | Set enables to perform the float16 inference | void SetEnableFP16(bool is_fp16) | `Context.cpu.precision_mode `__ |
+| CPUDeviceInfo | Set enables to perform the float16 inference | void SetEnableFP16(bool is_fp16) | `Context.cpu.precision_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| CPUDeviceInfo | Get enables to perform the float16 inference | bool GetEnableFP16() const | `Context.cpu.precision_mode `__ |
+| CPUDeviceInfo | Get enables to perform the float16 inference | bool GetEnableFP16() const | `Context.cpu.precision_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| GPUDeviceInfo | Get the type of this DeviceInfoContext | enum DeviceType GetDeviceType() const | `Context.gpu `__ |
+| GPUDeviceInfo | Get the type of this DeviceInfoContext | enum DeviceType GetDeviceType() const | `Context.gpu `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| GPUDeviceInfo | Set device id | void SetDeviceID(uint32_t device_id) | `Context.gpu.device_id `__ |
+| GPUDeviceInfo | Set device id | void SetDeviceID(uint32_t device_id) | `Context.gpu.device_id `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| GPUDeviceInfo | Get the device id | uint32_t GetDeviceID() const | `Context.gpu.device_id `__ |
+| GPUDeviceInfo | Get the device id | uint32_t GetDeviceID() const | `Context.gpu.device_id `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| GPUDeviceInfo | Get the distribution rank id | int GetRankID() const | `Context.gpu.rank_id `__ |
+| GPUDeviceInfo | Get the distribution rank id | int GetRankID() const | `Context.gpu.rank_id `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| GPUDeviceInfo | Get the distribution group size | int GetGroupSize() const | `Context.gpu.group_size `__ |
+| GPUDeviceInfo | Get the distribution group size | int GetGroupSize() const | `Context.gpu.group_size `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| GPUDeviceInfo | Set the precision mode | void SetPrecisionMode(const std::string &precision_mode) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| GPUDeviceInfo | Get the precision mode | std::string GetPrecisionMode() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| GPUDeviceInfo | Set enables to perform the float16 inference | void SetEnableFP16(bool is_fp16) | `Context.gpu.precision_mode `__ |
+| GPUDeviceInfo | Set enables to perform the float16 inference | void SetEnableFP16(bool is_fp16) | `Context.gpu.precision_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| GPUDeviceInfo | Get enables to perform the float16 inference | bool GetEnableFP16() const | `Context.gpu.precision_mode `__ |
+| GPUDeviceInfo | Get enables to perform the float16 inference | bool GetEnableFP16() const | `Context.gpu.precision_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| GPUDeviceInfo | Set enables to sharing mem with OpenGL | void SetEnableGLTexture(bool is_enable_gl_texture) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -98,11 +98,11 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| GPUDeviceInfo | Get current OpenGL display | void \*GetGLDisplay() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| AscendDeviceInfo | Get the type of this DeviceInfoContext | enum DeviceType GetDeviceType() const | `Context.ascend `__ |
+| AscendDeviceInfo | Get the type of this DeviceInfoContext | enum DeviceType GetDeviceType() const | `Context.ascend `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| AscendDeviceInfo | Set device id | void SetDeviceID(uint32_t device_id) | `Context.ascend.device_id `__ |
+| AscendDeviceInfo | Set device id | void SetDeviceID(uint32_t device_id) | `Context.ascend.device_id `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| AscendDeviceInfo | Get the device id | uint32_t GetDeviceID() const | `Context.ascend.device_id `__ |
+| AscendDeviceInfo | Get the device id | uint32_t GetDeviceID() const | `Context.ascend.device_id `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| AscendDeviceInfo | Set AIPP configuration file path | void SetInsertOpConfigPath(const std::string &cfg_path) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -132,9 +132,9 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| AscendDeviceInfo | Get type of model outputs | enum DataType GetOutputType() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| AscendDeviceInfo | Set precision mode of model | void SetPrecisionMode(const std::string &precision_mode) | `Context.ascend.precision_mode `__ |
+| AscendDeviceInfo | Set precision mode of model | void SetPrecisionMode(const std::string &precision_mode) | `Context.ascend.precision_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| AscendDeviceInfo | Get precision mode of model | std::string GetPrecisionMode() const | `Context.ascend.precision_mode `__ |
+| AscendDeviceInfo | Get precision mode of model | std::string GetPrecisionMode() const | `Context.ascend.precision_mode `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| AscendDeviceInfo | Set op select implementation mode | void SetOpSelectImplMode(const std::string &op_select_impl_mode) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -160,7 +160,7 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Build a model from model buffer so that it can run on a device | Status Build(const void \*model_data, size_t data_size, ModelType model_type, const std::shared_ptr &model_context = nullptr) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Model | Load and build a model from model buffer so that it can run on a device | Status Build(const std::string &model_path, ModelType model_type, const std::shared_ptr &model_context = nullptr) | `Model.build_from_file `__ |
+| Model | Load and build a model from model buffer so that it can run on a device | Status Build(const std::string &model_path, ModelType model_type, const std::shared_ptr &model_context = nullptr) | `Model.build_from_file `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Build a model from model buffer so that it can run on a device | Status Build(const void \*model_data, size_t data_size, ModelType model_type, const std::shared_ptr &model_context, const Key &dec_key, const std::string &dec_mode, const std::string &cropto_lib_path) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -172,11 +172,11 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Build a Transfer Learning model where the backbone weights are fixed and the head weights are trainable | Status BuildTransferLearning(GraphCell backbone, GraphCell head, const std::shared_ptr &context, const std::shared_ptr &train_cfg = nullptr) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Model | Resize the shapes of inputs | Status Resize(const std::vector &inputs, const std::vector > &dims) | `Model.resize `__ |
+| Model | Resize the shapes of inputs | Status Resize(const std::vector &inputs, const std::vector > &dims) | `Model.resize `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Change the size and or content of weight tensors | Status UpdateWeights(const std::vector &new_weights) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Model | Inference model API | Status Predict(const std::vector &inputs, std::vector \*outputs, const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr) | `Model.predict `__ |
+| Model | Inference model API | Status Predict(const std::vector &inputs, std::vector \*outputs, const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr) | `Model.predict `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Inference model API only with callback | Status Predict(const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -188,11 +188,11 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Check if data preprocess exists in model | bool HasPreprocess() | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Model | Load config file | Status LoadConfig(const std::string &config_path) | Wrapped in the parameter `config_path` of `Model.build_from_file `__ |
+| Model | Load config file | Status LoadConfig(const std::string &config_path) | Wrapped in the parameter `config_path` of `Model.build_from_file `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Update config | Status UpdateConfig(const std::string §ion, const std::pair &config) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Model | Obtains all input tensors of the model | std::vector GetInputs() | `Model.get_inputs `__ |
+| Model | Obtains all input tensors of the model | std::vector GetInputs() | `Model.get_inputs `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Obtains the input tensor of the model by name | MSTensor GetInputByTensorName(const std::string &tensor_name) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -220,7 +220,7 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Accessor to TrainLoop metric objects | std::vector GetMetrics() | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| Model | Obtains all output tensors of the model | std::vector GetOutputs() | Wrapped in the return value of `Model.predict `__ |
+| Model | Obtains all output tensors of the model | std::vector GetOutputs() | Wrapped in the return value of `Model.predict `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Obtains names of all output tensors of the model | std::vector GetOutputTensorNames() | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -240,33 +240,33 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Model | Check if the device supports the model | static bool CheckModelSupport(enum DeviceType device_type, ModelType model_type) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Set the number of workers at runtime | void SetWorkersNum(int32_t workers_num) | `Context.parallel.workers_num `__ |
+| RunnerConfig | Set the number of workers at runtime | void SetWorkersNum(int32_t workers_num) | `Context.parallel.workers_num `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Get the current operators parallel workers number setting | int32_t GetWorkersNum() const | `Context.parallel.workers_num `__ |
+| RunnerConfig | Get the current operators parallel workers number setting | int32_t GetWorkersNum() const | `Context.parallel.workers_num `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Set the context at runtime | void SetContext(const std::shared_ptr &context) | Wrapped in `Context.parallel `__ |
+| RunnerConfig | Set the context at runtime | void SetContext(const std::shared_ptr &context) | Wrapped in `Context.parallel `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Get the current context setting | std::shared_ptr GetContext() const | Wrapped in `Context.parallel `__ |
+| RunnerConfig | Get the current context setting | std::shared_ptr GetContext() const | Wrapped in `Context.parallel `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Set the config before runtime | void SetConfigInfo(const std::string §ion, const std::map &config) | `Context.parallel.config_info `__ |
+| RunnerConfig | Set the config before runtime | void SetConfigInfo(const std::string §ion, const std::map &config) | `Context.parallel.config_info `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Get the current config setting | std::map> GetConfigInfo() const | `Context.parallel.config_info `__ |
+| RunnerConfig | Get the current config setting | std::map> GetConfigInfo() const | `Context.parallel.config_info `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Set the config path before runtime | void SetConfigPath(const std::string &config_path) | `Context.parallel.config_path `__ |
+| RunnerConfig | Set the config path before runtime | void SetConfigPath(const std::string &config_path) | `Context.parallel.config_path `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| RunnerConfig | Get the current config path | std::string GetConfigPath() const | `Context.parallel.config_path `__ |
+| RunnerConfig | Get the current config path | std::string GetConfigPath() const | `Context.parallel.config_path `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelParallelRunner | build a model parallel runner from model path so that it can run on a device | Status Init(const std::string &model_path, const std::shared_ptr &runner_config = nullptr) | `Model.parallel_runner.build_from_file `__ |
+| ModelParallelRunner | build a model parallel runner from model path so that it can run on a device | Status Init(const std::string &model_path, const std::shared_ptr &runner_config = nullptr) | `Model.parallel_runner.build_from_file `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| ModelParallelRunner | build a model parallel runner from model buffer so that it can run on a device | Status Init(const void \*model_data, const size_t data_size, const std::shared_ptr &runner_config = nullptr) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelParallelRunner | Obtains all input tensors information of the model | std::vector GetInputs() | `Model.parallel_runner.get_inputs `__ |
+| ModelParallelRunner | Obtains all input tensors information of the model | std::vector GetInputs() | `Model.parallel_runner.get_inputs `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelParallelRunner | Obtains all output tensors information of the model | std::vector GetOutputs() | Wrapped in the return value of `Model.parallel_runner.predict `__ |
+| ModelParallelRunner | Obtains all output tensors information of the model | std::vector GetOutputs() | Wrapped in the return value of `Model.parallel_runner.predict `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelParallelRunner | Inference ModelParallelRunner | Status Predict(const std::vector &inputs, std::vector \*outputs,const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr) | `Model.parallel_runner.predict `__ |
+| ModelParallelRunner | Inference ModelParallelRunner | Status Predict(const std::vector &inputs, std::vector \*outputs,const MSKernelCallBack &before = nullptr, const MSKernelCallBack &after = nullptr) | `Model.parallel_runner.predict `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Creates a MSTensor object, whose data need to be copied before accessed by Model | static inline MSTensor \*CreateTensor(const std::string &name, DataType type, const std::vector &shape, const void \*data, size_t data_len) noexcept | `Tensor `__ |
+| MSTensor | Creates a MSTensor object, whose data need to be copied before accessed by Model | static inline MSTensor \*CreateTensor(const std::string &name, DataType type, const std::vector &shape, const void \*data, size_t data_len) noexcept | `Tensor `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Creates a MSTensor object, whose data can be directly accessed by Model | static inline MSTensor \*CreateRefTensor(const std::string &name, DataType type, const std::vector &shape, const void \*data, size_t data_len, bool own_data = true) noexcept | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -280,19 +280,19 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Destroy an object created by `Clone` , `StringsToTensor` , `CreateRefTensor` or `CreateTensor` | static void DestroyTensorPtr(MSTensor \*tensor) noexcept | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Obtains the name of the MSTensor | std::string Name() const | `Tensor.name `__ |
+| MSTensor | Obtains the name of the MSTensor | std::string Name() const | `Tensor.name `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Obtains the data type of the MSTensor | enum DataType DataType() const | `Tensor.dtype `__ |
+| MSTensor | Obtains the data type of the MSTensor | enum DataType DataType() const | `Tensor.dtype `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Obtains the shape of the MSTensor | const std::vector &Shape() const | `Tensor.shape `__ |
+| MSTensor | Obtains the shape of the MSTensor | const std::vector &Shape() const | `Tensor.shape `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Obtains the number of elements of the MSTensor | int64_t ElementNum() const | `Tensor.element_num `__ |
+| MSTensor | Obtains the number of elements of the MSTensor | int64_t ElementNum() const | `Tensor.element_num `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Obtains a shared pointer to the copy of data of the MSTensor | std::shared_ptr Data() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Obtains the pointer to the data of the MSTensor | void \*MutableData() | Wrapped in `Tensor.get_data_to_numpy `__ and `Tensor.set_data_from_numpy `__ |
+| MSTensor | Obtains the pointer to the data of the MSTensor | void \*MutableData() | Wrapped in `Tensor.get_data_to_numpy `__ and `Tensor.set_data_from_numpy `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Obtains the length of the data of the MSTensor, in bytes | size_t DataSize() const | `Tensor.data_size `__ |
+| MSTensor | Obtains the length of the data of the MSTensor, in bytes | size_t DataSize() const | `Tensor.data_size `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Get whether the MSTensor data is const data | bool IsConst() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -308,19 +308,19 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Get the boolean value that indicates whether the MSTensor not equals tensor | bool operator!=(const MSTensor &tensor) const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Set the shape of for the MSTensor | void SetShape(const std::vector &shape) | `Tensor.shape `__ |
+| MSTensor | Set the shape of for the MSTensor | void SetShape(const std::vector &shape) | `Tensor.shape `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Set the data type for the MSTensor | void SetDataType(enum DataType data_type) | `Tensor.dtype `__ |
+| MSTensor | Set the data type for the MSTensor | void SetDataType(enum DataType data_type) | `Tensor.dtype `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Set the name for the MSTensor | void SetTensorName(const std::string &name) | `Tensor.name `__ |
+| MSTensor | Set the name for the MSTensor | void SetTensorName(const std::string &name) | `Tensor.name `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Set the Allocator for the MSTensor | void SetAllocator(std::shared_ptr allocator) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Obtain the Allocator of the MSTensor | std::shared_ptr allocator() const | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Set the format for the MSTensor | void SetFormat(mindspore::Format format) | `Tensor.format `__ |
+| MSTensor | Set the format for the MSTensor | void SetFormat(mindspore::Format format) | `Tensor.format `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| MSTensor | Obtain the format of the MSTensor | mindspore::Format format() const | `Tensor.format `__ |
+| MSTensor | Obtain the format of the MSTensor | mindspore::Format format() const | `Tensor.format `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Set the data for the MSTensor | void SetData(void \*data, bool own_data = true) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@@ -332,15 +332,15 @@ Summary of MindSpore Lite API support
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| MSTensor | Set the quantization parameters for the MSTensor | void SetQuantParams(std::vector quant_params) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelGroup | Construct a ModelGroup object and indicate shared workspace memory or shared weight memory, with default shared workspace memory | ModelGroup(ModelGroupFlag flags = ModelGroupFlag::kShareWorkspace) | `ModelGroup `__ |
+| ModelGroup | Construct a ModelGroup object and indicate shared workspace memory or shared weight memory, with default shared workspace memory | ModelGroup(ModelGroupFlag flags = ModelGroupFlag::kShareWorkspace) | `ModelGroup `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelGroup | When sharing weight memory, add model objects that require shared weight memory | Status AddModel(const std::vector &model_list) | `ModelGroup.add_model `__ |
+| ModelGroup | When sharing weight memory, add model objects that require shared weight memory | Status AddModel(const std::vector &model_list) | `ModelGroup.add_model `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelGroup | When sharing workspace memory, add the path of the model that requires shared workspace memory | Status AddModel(const std::vector &model_path_list) | `ModelGroup.add_model `__ |
+| ModelGroup | When sharing workspace memory, add the path of the model that requires shared workspace memory | Status AddModel(const std::vector &model_path_list) | `ModelGroup.add_model `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| ModelGroup | When sharing workspace memory, add a model buffer that requires shared workspace memory | Status AddModel(const std::vector> &model_buff_list) | |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
-| ModelGroup | When sharing workspace memory, calculate the maximum workspace memory size | Status CalMaxSizeOfWorkspace(ModelType model_type, const std::shared_ptr &ms_context) | `ModelGroup.cal_max_size_of_workspace `__ |
+| ModelGroup | When sharing workspace memory, calculate the maximum workspace memory size | Status CalMaxSizeOfWorkspace(ModelType model_type, const std::shared_ptr &ms_context) | `ModelGroup.cal_max_size_of_workspace `__ |
+---------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/docs/lite/api/source_zh_cn/api_c/context_c.md b/docs/lite/api/source_zh_cn/api_c/context_c.md
index 5e265507a5a4c1cc37fd482fbc57e7a65742b397..561b3623383fa085e32bdf2474d19c0686895ce3 100644
--- a/docs/lite/api/source_zh_cn/api_c/context_c.md
+++ b/docs/lite/api/source_zh_cn/api_c/context_c.md
@@ -1,6 +1,6 @@
# context_c
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_zh_cn/api_c/context_c.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_zh_cn/api_c/context_c.md)
```c
#include
@@ -198,7 +198,7 @@ MSDeviceInfoHandle MSDeviceInfoCreate(MSDeviceType device_type)
新建运行设备信息,若创建失败则会返回`nullptr`,并日志中输出信息。
- 参数
- - `device_type`: 设备类型,具体见[MSDeviceType](https://www.mindspore.cn/lite/api/zh-CN/master/api_c/types_c.html#msdevicetype)。
+ - `device_type`: 设备类型,具体见[MSDeviceType](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/types_c.html#msdevicetype)。
- 返回值
diff --git a/docs/lite/api/source_zh_cn/api_c/data_type_c.md b/docs/lite/api/source_zh_cn/api_c/data_type_c.md
index ac6c4b6384887fd3d36aa458e855831904af7d07..a1d3e81f11939f435b8deeaecea8468f29d02a6f 100644
--- a/docs/lite/api/source_zh_cn/api_c/data_type_c.md
+++ b/docs/lite/api/source_zh_cn/api_c/data_type_c.md
@@ -1,6 +1,6 @@
# data_type_c
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_zh_cn/api_c/data_type_c.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_zh_cn/api_c/data_type_c.md)
```C
#include
diff --git a/docs/lite/api/source_zh_cn/api_c/format_c.md b/docs/lite/api/source_zh_cn/api_c/format_c.md
index 3b57375f73ee2225e474b006a018edc8a550f79b..684ea81c68b866794242e9951a4df5f1b9661891 100644
--- a/docs/lite/api/source_zh_cn/api_c/format_c.md
+++ b/docs/lite/api/source_zh_cn/api_c/format_c.md
@@ -1,6 +1,6 @@
# format_c
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_zh_cn/api_c/format_c.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_zh_cn/api_c/format_c.md)
```C
#include
diff --git a/docs/lite/api/source_zh_cn/api_c/lite_c_example.rst b/docs/lite/api/source_zh_cn/api_c/lite_c_example.rst
index 9def15a73ba9657997156d0608ff819af596b3df..868ea3a867405516c57f75fc40115c909bc69c97 100644
--- a/docs/lite/api/source_zh_cn/api_c/lite_c_example.rst
+++ b/docs/lite/api/source_zh_cn/api_c/lite_c_example.rst
@@ -4,4 +4,4 @@
.. toctree::
:maxdepth: 1
- 极简Demo↗
+ 极简Demo↗
diff --git a/docs/lite/api/source_zh_cn/api_c/model_c.md b/docs/lite/api/source_zh_cn/api_c/model_c.md
index 5822f01b7ade74f89e95e509b71742c68227f54a..6a15327d26f988435036f2553f6fb1355b92b68e 100644
--- a/docs/lite/api/source_zh_cn/api_c/model_c.md
+++ b/docs/lite/api/source_zh_cn/api_c/model_c.md
@@ -1,6 +1,6 @@
# model_c
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_zh_cn/api_c/model_c.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_zh_cn/api_c/model_c.md)
```C
#include
@@ -145,8 +145,8 @@ MSStatus MSModelBuild(MSModelHandle model, const void* model_data, size_t data_s
- `model`: 指向模型对象的指针。
- `model_data`: 内存中已经加载的模型数据地址。
- `data_size`: 模型数据的长度。
- - `model_type`: 模型文件类型,具体见: [MSModelType](https://mindspore.cn/lite/api/zh-CN/master/api_c/types_c.html#msmodeltype)。
- - `model_context`: 模型的上下文环境,具体见: [Context](https://mindspore.cn/lite/api/zh-CN/master/api_c/context_c.html)。
+ - `model_type`: 模型文件类型,具体见: [MSModelType](https://mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/types_c.html#msmodeltype)。
+ - `model_context`: 模型的上下文环境,具体见: [Context](https://mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/context_c.html)。
- 返回值
@@ -165,8 +165,8 @@ MSStatus MSModelBuildFromFile(MSModelHandle model, const char* model_path, MSMod
- `model`: 指向模型对象的指针。
- `model_path`: 模型文件路径。
- - `model_type`: 模型文件类型,具体见: [MSModelType](https://mindspore.cn/lite/api/zh-CN/master/api_c/types_c.html#msmodeltype)。
- - `model_context`: 模型的上下文环境,具体见: [Context](https://mindspore.cn/lite/api/zh-CN/master/api_c/context_c.html)。
+ - `model_type`: 模型文件类型,具体见: [MSModelType](https://mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/types_c.html#msmodeltype)。
+ - `model_context`: 模型的上下文环境,具体见: [Context](https://mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/context_c.html)。
- 返回值
diff --git a/docs/lite/api/source_zh_cn/api_c/tensor_c.md b/docs/lite/api/source_zh_cn/api_c/tensor_c.md
index bf50fa6d563e667d5c74de6af3a7ad405e85db39..ac6140e8b90924b8fb878ca65cbf7384837f8016 100644
--- a/docs/lite/api/source_zh_cn/api_c/tensor_c.md
+++ b/docs/lite/api/source_zh_cn/api_c/tensor_c.md
@@ -1,6 +1,6 @@
# tensor_c
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_zh_cn/api_c/tensor_c.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_zh_cn/api_c/tensor_c.md)
```C
#include
@@ -123,7 +123,7 @@ void MSTensorSetDataType(MSTensorHandle tensor, MSDataType type)
MSDataType MSTensorGetDataType(const MSTensorHandle tensor)
```
-获取MSTensor的数据类型,具体数据类型见[MSDataType](https://www.mindspore.cn/lite/api/zh-CN/master/api_c/data_type_c.html#msdatatype)。
+获取MSTensor的数据类型,具体数据类型见[MSDataType](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/data_type_c.html#msdatatype)。
- 参数
- `tensor`: 指向MSTensor的指针。
@@ -171,7 +171,7 @@ void MSTensorSetFormat(MSTensorHandle tensor, MSFormat format)
- 参数
- `tensor`: 指向MSTensor的指针。
- - `format`: 张量的数据排列,具体见[MSFormat](https://www.mindspore.cn/lite/api/zh-CN/master/api_c/format_c.html#msformat)。
+ - `format`: 张量的数据排列,具体见[MSFormat](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/format_c.html#msformat)。
### MSTensorGetFormat
@@ -183,7 +183,7 @@ MSFormat MSTensorGetFormat(const MSTensorHandle tensor)
- 返回值
- 张量的数据排列,具体见[MSFormat](https://www.mindspore.cn/lite/api/zh-CN/master/api_c/format_c.html#msformat)。
+ 张量的数据排列,具体见[MSFormat](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_c/format_c.html#msformat)。
### MSTensorSetData
diff --git a/docs/lite/api/source_zh_cn/api_c/types_c.md b/docs/lite/api/source_zh_cn/api_c/types_c.md
index c9f2421eba7911ad44696fbc5eb81af2f6d9b337..60bd19d91b1a07f0a6d24a695ca67f5de679e1f6 100644
--- a/docs/lite/api/source_zh_cn/api_c/types_c.md
+++ b/docs/lite/api/source_zh_cn/api_c/types_c.md
@@ -1,6 +1,6 @@
# types_c
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_zh_cn/api_c/types_c.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_zh_cn/api_c/types_c.md)
```C
#include
diff --git a/docs/lite/api/source_zh_cn/api_cpp/lite_cpp_example.rst b/docs/lite/api/source_zh_cn/api_cpp/lite_cpp_example.rst
index ecdf9d26248719343aa45cd2d7217615cced2eb9..ef2640b7ba8318ac00fe3dad15506f6c21dbf1b6 100644
--- a/docs/lite/api/source_zh_cn/api_cpp/lite_cpp_example.rst
+++ b/docs/lite/api/source_zh_cn/api_cpp/lite_cpp_example.rst
@@ -4,6 +4,6 @@
.. toctree::
:maxdepth: 1
- 极简Demo↗
- 基于JNI接口的Android应用开发↗
- 高阶用法↗
\ No newline at end of file
+ 极简Demo↗
+ 基于JNI接口的Android应用开发↗
+ 高阶用法↗
\ No newline at end of file
diff --git a/docs/lite/api/source_zh_cn/api_cpp/mindspore.md b/docs/lite/api/source_zh_cn/api_cpp/mindspore.md
index 4e6bc8c3b0c2c329bc9861ac53a460a0642d4d90..132653aa999569b6956142ef837295768d716542 100644
--- a/docs/lite/api/source_zh_cn/api_cpp/mindspore.md
+++ b/docs/lite/api/source_zh_cn/api_cpp/mindspore.md
@@ -1,6 +1,6 @@
# mindspore
-[](https://gitee.com/mindspore/docs/blob/master/docs/lite/api/source_zh_cn/api_cpp/mindspore.md)
+[](https://gitee.com/mindspore/docs/blob/r2.7.0/docs/lite/api/source_zh_cn/api_cpp/mindspore.md)
## 接口汇总
@@ -36,8 +36,8 @@
|--------------------------------------------------|---------------------------------------------------|--------|--------|
| [MSTensor](#mstensor) | MindSpore中的张量。 | √ | √ |
| [QuantParam](#quantparam) | MSTensor中的一组量化参数。 | √ | √ |
-| [DataType](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore_datatype.html) | MindSpore MSTensor保存的数据支持的类型。 | √ | √ |
-| [Format](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore_format.html) | MindSpore MSTensor保存的数据支持的排列格式。 | √ | √ |
+| [DataType](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore_datatype.html) | MindSpore MSTensor保存的数据支持的类型。 | √ | √ |
+| [Format](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore_format.html) | MindSpore MSTensor保存的数据支持的排列格式。 | √ | √ |
| [Allocator](#allocator-1) | 内存管理基类。 | √ | √ |
### 模型分组
@@ -117,7 +117,7 @@
## Context
-\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)>
+\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/context.h)>
Context类用于保存执行中的环境变量。
@@ -155,9 +155,9 @@ Context的数据。
| [bool GetEnableParallel() const](#getenableparallel) | ✕ | √ |
| [void SetBuiltInDelegate(DelegateMode mode)](#setbuiltindelegate) | ✕ | √ |
| [DelegateMode GetBuiltInDelegate() const](#getbuiltindelegate) | ✕ | √ |
-| [void set_delegate(const std::shared_ptr\ &delegate)](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#set-delegate) | ✕ | √ |
+| [void set_delegate(const std::shared_ptr\ &delegate)](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#set-delegate) | ✕ | √ |
| [void SetDelegate(const std::shared_ptr\ &delegate)](#setdelegate) | ✕ | √ |
-| [std::shared_ptr\ get_delegate() const](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#get-delegate) | ✕ | √ |
+| [std::shared_ptr\ get_delegate() const](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#get-delegate) | ✕ | √ |
| [std::shared_ptr\ GetDelegate() const](#getdelegate) | ✕ | √ |
| [void SetMultiModalHW(bool float_mode)](#setmultimodalhw) | ✕ | √ |
| [bool GetMultiModalHW() const](#getmultimodalhw) | ✕ | √ |
@@ -405,7 +405,7 @@ std::vector> &MutableDeviceInfo()
## DelegateMode
-\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)>
+\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/context.h)>
```cpp
enum DelegateMode {
@@ -418,7 +418,7 @@ Delegate模式。
## DeviceInfoContext
-\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)>
+\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/context.h)>
DeviceInfoContext类定义不同硬件设备的环境信息。
@@ -549,7 +549,7 @@ std::shared_ptr GetAllocator() const
## CPUDeviceInfo
-\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)>
+\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/context.h)>
派生自[DeviceInfoContext](#deviceinfocontext),模型运行在CPU上的配置。
@@ -594,7 +594,7 @@ bool GetEnableFP16() const
## GPUDeviceInfo
-\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)>
+\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/context.h)>
派生自[DeviceInfoContext](#deviceinfocontext),模型运行在GPU上的配置。
@@ -781,7 +781,7 @@ void *GetGLDisplay() const
## KirinNPUDeviceInfo
-\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)>
+\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/context.h)>
派生自[DeviceInfoContext](#deviceinfocontext),模型运行在NPU上的配置。
@@ -797,7 +797,7 @@ void *GetGLDisplay() const
## AscendDeviceInfo
-\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/context.h)>
+\#include <[context.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/context.h)>
派生自[DeviceInfoContext](#deviceinfocontext),模型运行在Atlas 200/300/500推理产品、Atlas推理系列产品上的配置。
@@ -849,7 +849,7 @@ using Key = struct MS_API Key {
## Serialization
-\#include <[serialization.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/serialization.h)>
+\#include <[serialization.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/serialization.h)>
Serialization类汇总了模型文件读写的方法。
@@ -1119,7 +1119,7 @@ Buffer Clone() const;
## Model
-\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/model.h)>
+\#include <[model.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/model.h)>
Model定义了MindSpore中的模型,便于计算图管理。
@@ -1890,7 +1890,7 @@ Status Finalize();
## MSTensor
-\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/types.h)>
+\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/types.h)>
`MSTensor`定义了MindSpore中的张量。
@@ -2085,10 +2085,10 @@ void DestroyTensorPtr(MSTensor *tensor) noexcept;
| [bool IsConst() const](#isconst) | √ | √ |
| [bool IsDevice() const](#isdevice) | √ | ✕ |
| [MSTensor *Clone() const](#clone) | √ | √ |
-| [bool operator==(std::nullptr_t) const](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#operatorstd-nullptr-t) | √ | √ |
-| [bool operator!=(std::nullptr_t) const](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#operatorstd-nullptr-t-1) | √ | √ |
-| [bool operator!=(const MSTensor &tensor) const](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#operatorconst-mstensor-tensor) | √ | √ |
-| [bool operator==(const MSTensor &tensor) const](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#operatorconst-mstensor-tensor-1) | √ | √ |
+| [bool operator==(std::nullptr_t) const](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#operatorstd-nullptr-t) | √ | √ |
+| [bool operator!=(std::nullptr_t) const](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#operatorstd-nullptr-t-1) | √ | √ |
+| [bool operator!=(const MSTensor &tensor) const](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#operatorconst-mstensor-tensor) | √ | √ |
+| [bool operator==(const MSTensor &tensor) const](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#operatorconst-mstensor-tensor-1) | √ | √ |
| [void SetShape(const std::vector\ &shape)](#setshape) | √ | √ |
| [void SetDataType(enum DataType data_type)](#setdatatype) | √ | √ |
| [void SetTensorName(const std::string &name)](#settensorname) | √ | √ |
@@ -2414,7 +2414,7 @@ const std::shared_ptr impl()
## QuantParam
-\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/types.h)>
+\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/types.h)>
一个结构体。QuantParam定义了MSTensor的一组量化参数。
@@ -2462,7 +2462,7 @@ max
## MSKernelCallBack
-\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/types.h)>
+\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/types.h)>
```cpp
using MSKernelCallBack = std::function &inputs, const std::vector &outputs, const MSCallBackParam &opInfo)>
@@ -2472,7 +2472,7 @@ using MSKernelCallBack = std::function &inputs,
## MSCallBackParam
-\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/types.h)>
+\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/types.h)>
一个结构体。MSCallBackParam定义了回调函数的输入参数。
@@ -2504,7 +2504,7 @@ execute_time
## Delegate
-\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/delegate.h)>
+\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/delegate.h)>
`Delegate`定义了第三方AI框架接入MindSpore Lite的代理接口。
@@ -2591,7 +2591,7 @@ void ReplaceNodes(const std::shared_ptr &graph) override {}
## CoreMLDelegate
-\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/delegate.h)>
+\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/delegate.h)>
`CoreMLDelegate`继承自`Delegate`类,定义了CoreML框架接入MindSpore Lite的代理接口。
@@ -2633,7 +2633,7 @@ CoreMLDelegate在线构图,仅在内部图编译阶段调用。
## SchemaVersion
-\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/delegate.h)>
+\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/delegate.h)>
定义了MindSpore Lite执行在线推理时模型文件的版本。
@@ -2647,9 +2647,9 @@ typedef enum {
## KernelIter
-\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/delegate.h)>
+\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/delegate.h)>
-定义了MindSpore Lite [Kernel](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore_kernel.html#mindspore-kernel)列表的迭代器。
+定义了MindSpore Lite [Kernel](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore_kernel.html#mindspore-kernel)列表的迭代器。
```cpp
using KernelIter = std::vector::iterator;
@@ -2657,7 +2657,7 @@ using KernelIter = std::vector::iterator;
## DelegateModel
-\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/delegate.h)>
+\#include <[delegate.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/delegate.h)>
`DelegateModel`定义了MindSpore Lite Delegate机制操作的的模型对象。
@@ -2683,7 +2683,7 @@ DelegateModel(std::vector *kernels, const std::vector *kernels_;
```
-[**Kernel**](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore_kernel.html#kernel)的列表,保存模型的所有算子。
+[**Kernel**](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore_kernel.html#kernel)的列表,保存模型的所有算子。
#### inputs_
@@ -2691,7 +2691,7 @@ std::vector *kernels_;
const std::vector &inputs_;
```
-[**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#mstensor)的列表,保存这个算子的输入tensor。
+[**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#mstensor)的列表,保存这个算子的输入tensor。
#### outputs_
@@ -2699,7 +2699,7 @@ const std::vector &inputs_;
const std::vector &outputs;
```
-[**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#mstensor)的列表,保存这个算子的输出tensor。
+[**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#mstensor)的列表,保存这个算子的输出tensor。
#### primitives_
@@ -2707,7 +2707,7 @@ const std::vector &outputs;
const std::map &primitives_;
```
-[**Kernel**](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore_kernel.html#kernel)和**schema::Primitive**的Map,保存所有算子的属性。
+[**Kernel**](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore_kernel.html#kernel)和**schema::Primitive**的Map,保存所有算子的属性。
#### version_
@@ -2799,7 +2799,7 @@ const std::vector &inputs()
- 返回值
- [**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#mstensor)的列表。
+ [**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#mstensor)的列表。
#### outputs
@@ -2811,7 +2811,7 @@ const std::vector &outputs()
- 返回值
- [**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#mstensor)的列表。
+ [**MSTensor**](https://www.mindspore.cn/lite/api/zh-CN/r2.7.0/api_cpp/mindspore.html#mstensor)的列表。
#### GetVersion
@@ -2827,7 +2827,7 @@ const SchemaVersion GetVersion()
## AbstractDelegate
-\#include <[delegate_api.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/delegate_api.h)>
+\#include <[delegate_api.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/delegate_api.h)>
`AbstractDelegate`定义了MindSpore Lite 创建Delegate(抽象类)。
@@ -2883,7 +2883,7 @@ std::vector outputs_
std::vector outputs_
```
-\#include <[delegate_api.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/delegate_api.h)>
+\#include <[delegate_api.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/delegate_api.h)>
`IDelegate`定义了MindSpore Lite 创建Delegate(模板类)。
@@ -2929,7 +2929,7 @@ virtual std::shared_ptr CreateKernel(const std::shared_ptr &node)
## TrainCfg
-\#include <[cfg.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/cfg.h)>
+\#include <[cfg.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/cfg.h)>
`TrainCfg`MindSpore Lite训练的相关配置参数。
@@ -3018,7 +3018,7 @@ inline void SetLossName(const std::vector &loss_name);
## MixPrecisionCfg
-\#include <[cfg.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/cfg.h)>
+\#include <[cfg.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/cfg.h)>
`MixPrecisionCfg`MindSpore Lite训练混合精度配置类。
@@ -3082,7 +3082,7 @@ bool keep_batchnorm_fp32_ = true;
## AccuracyMetrics
-\#include <[accuracy.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/metrics/accuracy.h)>
+\#include <[accuracy.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/metrics/accuracy.h)>
`AccuracyMetrics`MindSpore Lite训练精度类。
@@ -3130,7 +3130,7 @@ float Eval() override;
## Metrics
-\#include <[metrics.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/metrics/metrics.h)>
+\#include <[metrics.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/metrics/metrics.h)>
`Metrics`MindSpore Lite训练指标类。
@@ -3177,7 +3177,7 @@ virtual void Update(std::vector inputs, std::vector outp
## TrainCallBack
-\#include <[callback.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/callback.h)>
+\#include <[callback.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/callback.h)>
`Metrics`MindSpore Lite训练回调类。
@@ -3276,7 +3276,7 @@ virtual void Begin(const TrainCallBackData &cb_data) {}
## TrainCallBackData
-\#include <[callback.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/callback.h)>
+\#include <[callback.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/callback.h)>
一个结构体。TrainCallBackData定义了训练回调的一组参数。
@@ -3316,7 +3316,7 @@ model_
## CkptSaver
-\#include <[ckpt_saver.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/ckpt_saver.h)>
+\#include <[ckpt_saver.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/ckpt_saver.h)>
`Metrics`MindSpore Lite训练模型文件保存类。
@@ -3329,7 +3329,7 @@ model_
## LossMonitor
-\#include <[loss_monitor.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/loss_monitor.h)>
+\#include <[loss_monitor.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/loss_monitor.h)>
`Metrics`MindSpore Lite训练损失函数类。
@@ -3356,7 +3356,7 @@ model_
## LRScheduler
-\#include <[lr_scheduler.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/lr_scheduler.h)>
+\#include <[lr_scheduler.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/lr_scheduler.h)>
`Metrics`MindSpore Lite训练学习率调度类。
@@ -3369,7 +3369,7 @@ model_
## StepLRLambda
-\#include <[lr_scheduler.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/lr_scheduler.h)>
+\#include <[lr_scheduler.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/lr_scheduler.h)>
一个结构体。StepLRLambda定义了训练学习率的一组参数。
@@ -3393,7 +3393,7 @@ gamma
## MultiplicativeLRLambda
-\#include <[lr_scheduler.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/lr_scheduler.h)>
+\#include <[lr_scheduler.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/lr_scheduler.h)>
每个epoch将学习率乘以一个因子。
@@ -3421,7 +3421,7 @@ int MultiplicativeLRLambda(float *lr, int epoch, void *multiplication)
## TimeMonitor
-\#include <[time_monitor.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/time_monitor.h)>
+\#include <[time_monitor.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/time_monitor.h)>
`Metrics`MindSpore Lite训练时间监测类。
@@ -3467,7 +3467,7 @@ int MultiplicativeLRLambda(float *lr, int epoch, void *multiplication)
## TrainAccuracy
-\#include <[train_accuracy.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/callback/train_accuracy.h)>
+\#include <[train_accuracy.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/callback/train_accuracy.h)>
`Metrics`MindSpore Lite训练学习率调度类。
@@ -3526,7 +3526,7 @@ std::vector CharVersion()
|-----------------------|--------|--------|
| [std::string Version()](#version) | ✕ | √ |
-\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/types.h)>
+\#include <[types.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/types.h)>
```cpp
std::string Version()
@@ -3540,7 +3540,7 @@ std::string Version()
## Allocator
-\#include <[allocator.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/allocator.h)>
+\#include <[allocator.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/allocator.h)>
内存管理基类。
@@ -3694,11 +3694,11 @@ inline Status(const StatusCode code, int line_of_code, const char *file_name, co
| [inline std::string GetErrDescription() const](#geterrdescription) | √ | √ |
| [inline std::string SetErrDescription(const std::string &err_description)](#seterrdescription) | √ | √ |
| [inline void SetStatusMsg(const std::string &status_msg)](#setstatusmsg) | √ | √ |
-| [friend std::ostream &operator\<\<(std::ostream &os, const Status &s)](https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html#operator< Construct(const std::vector &inputs) {return
## Cell
-\#include <[cell.h](https://gitee.com/mindspore/mindspore/blob/master/include/api/cell.h)>
+\#include <[cell.h](https://gitee.com/mindspore/mindspore/blob/v2.7.0/include/api/cell.h)>
### 析构函数
@@ -4187,7 +4187,7 @@ std::vector