diff --git a/docs/mindspore/migration_guide/source_en/api_mapping/tensorflow_api_mapping.md b/docs/mindspore/migration_guide/source_en/api_mapping/tensorflow_api_mapping.md
index ca1fce5b6e7d43bc28cfd8baa295bab8d5af67d4..13000bf4b8e66937a65cc26c4df088ced2a34e33 100644
--- a/docs/mindspore/migration_guide/source_en/api_mapping/tensorflow_api_mapping.md
+++ b/docs/mindspore/migration_guide/source_en/api_mapping/tensorflow_api_mapping.md
@@ -79,3 +79,16 @@ Mapping between TensorFlow APIs and MindSpore APIs, which is provided by the com
| [tf.zeros_like](http://www.tensorflow.org/versions/r1.15/api_docs/python/tf/zeros_like) |[mindspore.ops.ZerosLike](https://mindspore.cn/docs/api/en/master/api_python/ops/mindspore.ops.ZerosLike.html) | same |
| [tf.distribute.Strategy](http://www.tensorflow.org/versions/r1.15/api_docs/python/tf/distribute/Strategy) |[context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)](https://mindspore.cn/docs/api/zh-CN/master/api_python/mindspore.context.html#mindspore.context.set_auto_parallel_context) | [diff](https://www.mindspore.cn/docs/migration_guide/en/master/api_mapping/tensorflow_diff/DistributedTrain.html)|
+
+
+# TensorFlow Probability and MindSpore
+
+
+
+Mapping between TensorFlow Probability APIs and MindSpore APIs, which is provided by the community.
+
+| TensorFlow Probability 0.14.1 APIs | MindSpore APIs
+ | Description |
+|-----------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------|------|
+
+| [tfp.bijectors.Softplus](http://www.tensorflow.org/probability/api_docs/python/tfp/bijectors/Softplus) |[mindspore.nn.probability.bijector.Softplus](https://www.mindspore.cn/docs/api/en/master/api_python/nn_probability/mindspore.nn.probability.bijector.Softplus.html)|[diff](https://www.mindspore.cn/docs/migration_guide/en/master/api_mapping/tensorflow_probability_diff/BijectorSoftplus.html)|
diff --git a/docs/mindspore/migration_guide/source_en/api_mapping/tensorflow_probability_diff/BijectorSoftplus.md b/docs/mindspore/migration_guide/source_en/api_mapping/tensorflow_probability_diff/BijectorSoftplus.md
new file mode 100644
index 0000000000000000000000000000000000000000..703da8f3809a5b32fb16bd2614ecb3554d523a7e
--- /dev/null
+++ b/docs/mindspore/migration_guide/source_en/api_mapping/tensorflow_probability_diff/BijectorSoftplus.md
@@ -0,0 +1,88 @@
+# Function Differences with tfp.bijectors.Softplus
+
+
+
+## tfp.bijectors.Softplus
+
+```python
+class tfp.bijectors.Softplus(
+ hinge_softness=None,
+ low=None,
+ validate_args=False,
+ name='softplus'
+)
+```
+
+For more information, see[tfp.bijectors.Softplus](http://www.tensorflow.org/probability/api_docs/python/tfp/bijectors/Softplus).
+
+## mindspore.nn.probability.bijector.Softplus
+
+```python
+class mindspore.nn.probability.bijector.Softplus(
+ sharpness=1.0,
+ name="Softplus"
+)
+```
+
+For more information, see[mindspore.nn.probability.bijector.Softplus](https://www.mindspore.cn/docs/api/en/master/api_python/nn_probability/mindspore.nn.probability.bijector.Softplus.html).
+
+## Differences
+
+TensorFlow: The formula: Y = g(X) = Log[1 + exp(X)].
+
+MindSpore:The formula: $Y = g(X) = log(1 + e ^ {kX}) / k $. Therefore, when sharpness = 1./hinge_softness, the calculation results of mindspore and tensorflow are equal.
+
+## Code Example
+
+```python
+# The following implements bijector.Softplus with MindSpore.
+import tensorflow as tf
+import tensorflow_probability.python as tfp
+import mindspore
+import mindspore.nn as nn
+from mindspore import Tensor
+import mindspore.nn.probability.bijector as msb
+
+# To initialize a Softplus bijector of sharpness 2.0.
+softplus = msb.Softplus(2.0)
+value = Tensor([2], dtype=mindspore.float32)
+ans1 = softplus.forward(value)
+print(ans1)
+#Out:
+#[2.009075]
+ans2 = softplus.inverse(value)
+print(ans2)
+#Out:
+#[1.9907573]
+ans3 = softplus.forward_log_jacobian(value)
+print(ans3)
+#Out:
+#[-0.01814996]
+ans4 = softplus.inverse_log_jacobian(value)
+print(ans4)
+#Out:
+#[0.01848531]
+
+
+# The following implements bijectors.Softplus with TensorFlow_Probability.
+value_tf = tf.constant([2], dtype=tf.float32)
+# sharpness = 2.0, sharpness = 1./hinge_softness, so hinge_softness = 0.5
+output = tfp.bijectors.Softplus(0.5)
+out1 = output.forward(value_tf)
+out2 = output.inverse(value_tf)
+out3 = output.forward_log_det_jacobian(value_tf, event_ndims=0)
+out4 = output.inverse_log_det_jacobian(value_tf, event_ndims=0)
+ss = tf.Session()
+ss.run(out1)
+# out1
+# array([2.009075], dtype=float32)
+ss.run(out2)
+# out2
+# array([1.9907573], dtype=float32)
+ss.run(out3)
+# out3
+# array([-0.01814996], dtype=float32)
+ss.run(out4)
+# out4
+# array([0.01848542], dtype=float32)
+```
diff --git a/docs/mindspore/migration_guide/source_zh_cn/api_mapping/tensorflow_api_mapping.md b/docs/mindspore/migration_guide/source_zh_cn/api_mapping/tensorflow_api_mapping.md
index d22da72bc139fab9d42c1a328353841c1b4e5cd1..0e7870c5ea2acdba53cef991bb8909a31818f513 100644
--- a/docs/mindspore/migration_guide/source_zh_cn/api_mapping/tensorflow_api_mapping.md
+++ b/docs/mindspore/migration_guide/source_zh_cn/api_mapping/tensorflow_api_mapping.md
@@ -80,3 +80,13 @@
| [tf.distribute.Strategy](http://www.tensorflow.org/versions/r1.15/api_docs/python/tf/distribute/Strategy) |[context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL)](https://mindspore.cn/docs/api/zh-CN/master/api_python/mindspore.context.html#mindspore.context.set_auto_parallel_context) | [差异对比](https://www.mindspore.cn/docs/migration_guide/zh-CN/master/api_mapping/tensorflow_diff/DistributedTrain.html)|
+
+# TensorFlow Probability与MindSpore
+
+
+
+由社区提供的TensorFlow Probability APIs和MindSpore APIs之间的映射。
+
+| TensorFlow Probability 0.14.1 APIs | MindSpore APIs | 说明 |
+|-----------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------|
+| [tfp.bijectors.Softplus](http://www.tensorflow.org/probability/api_docs/python/tfp/bijectors/Softplus) |[mindspore.nn.probability.bijector.Softplus](https://www.mindspore.cn/docs/api/zh-CN/master/api_python/nn_probability/mindspore.nn.probability.bijector.Softplus.html)|[差异对比](https://www.mindspore.cn/docs/migration_guide/zh-CN/master/api_mapping/tensorflow_probability_diff/BijectorSoftplus.html)|
diff --git a/docs/mindspore/migration_guide/source_zh_cn/api_mapping/tensorflow_probability_diff/BijectorSoftplus.md b/docs/mindspore/migration_guide/source_zh_cn/api_mapping/tensorflow_probability_diff/BijectorSoftplus.md
new file mode 100644
index 0000000000000000000000000000000000000000..f350f29fb1ea7162a4f5f12ef800687b9f56eea0
--- /dev/null
+++ b/docs/mindspore/migration_guide/source_zh_cn/api_mapping/tensorflow_probability_diff/BijectorSoftplus.md
@@ -0,0 +1,88 @@
+# 比较与tfp.bijectors.Softplus的功能差异
+
+
+
+## tfp.bijectors.Softplus
+
+```python
+class tfp.bijectors.Softplus(
+ hinge_softness=None,
+ low=None,
+ validate_args=False,
+ name='softplus'
+)
+```
+
+更多内容详见[tfp.bijectors.Softplus](http://www.tensorflow.org/probability/api_docs/python/tfp/bijectors/Softplus)。
+
+## mindspore.nn.probability.bijector.Softplus
+
+```python
+class mindspore.nn.probability.bijector.Softplus(
+ sharpness=1.0,
+ name="Softplus"
+)
+```
+
+更多内容详见[mindspore.nn.probability.bijector.Softplus](https://www.mindspore.cn/docs/api/zh-CN/master/api_python/nn_probability/mindspore.nn.probability.bijector.Softplus.html)。
+
+## 使用方式
+
+TensorFlow:公式:Y = g(X) = Log[1 + exp(X)]
+
+MindSpore:公式:$Y = g(X) = log(1 + e ^ {kX}) / k $, 所以当sharpness = 1./hinge_softness的时候,MindSpore与TensorFlow的计算结果是一致的。
+
+## 代码示例
+
+```python
+# The following implements bijector.Softplus with MindSpore.
+import tensorflow as tf
+import tensorflow_probability.python as tfp
+import mindspore
+import mindspore.nn as nn
+from mindspore import Tensor
+import mindspore.nn.probability.bijector as msb
+
+# To initialize a Softplus bijector of sharpness 2.0.
+softplus = msb.Softplus(2.0)
+value = Tensor([2], dtype=mindspore.float32)
+ans1 = softplus.forward(value)
+print(ans1)
+#Out:
+#[2.009075]
+ans2 = softplus.inverse(value)
+print(ans2)
+#Out:
+#[1.9907573]
+ans3 = softplus.forward_log_jacobian(value)
+print(ans3)
+#Out:
+#[-0.01814996]
+ans4 = softplus.inverse_log_jacobian(value)
+print(ans4)
+#Out:
+#[0.01848531]
+
+
+# The following implements bijectors.Softplus with TensorFlow_Probability.
+value_tf = tf.constant([2], dtype=tf.float32)
+# sharpness = 2.0, sharpness = 1./hinge_softness, so hinge_softness = 0.5
+output = tfp.bijectors.Softplus(0.5)
+out1 = output.forward(value_tf)
+out2 = output.inverse(value_tf)
+out3 = output.forward_log_det_jacobian(value_tf, event_ndims=0)
+out4 = output.inverse_log_det_jacobian(value_tf, event_ndims=0)
+ss = tf.Session()
+ss.run(out1)
+# out1
+# array([2.009075], dtype=float32)
+ss.run(out2)
+# out2
+# array([1.9907573], dtype=float32)
+ss.run(out3)
+# out3
+# array([-0.01814996], dtype=float32)
+ss.run(out4)
+# out4
+# array([0.01848542], dtype=float32)
+```