From a9d906aa57e3a6a53604f176445c421654efce91 Mon Sep 17 00:00:00 2001 From: 7aY <389354380@qq.com> Date: Tue, 1 Jun 2021 23:35:55 +0800 Subject: [PATCH 1/2] translation modify translation --- .../source_en/initializer.md | 195 +++++++++++++++++- 1 file changed, 190 insertions(+), 5 deletions(-) diff --git a/docs/programming_guide/source_en/initializer.md b/docs/programming_guide/source_en/initializer.md index cd934f345f..420faa1258 100644 --- a/docs/programming_guide/source_en/initializer.md +++ b/docs/programming_guide/source_en/initializer.md @@ -1,5 +1,190 @@ -# Initialization of Network Parameters - -No English version right now, welcome to contribute. - - \ No newline at end of file +# Initialization of Network Parameters + +## summarize + +MindSpore provides a weight initialization module, which allows users to initialize network parameters by encapsulating operators and initializer methods to call strings, initializer subclasses, or custom Tensors. +The Initializer class is the basic data structure used for initialization in MindSpore. Its subclasses contain several different types of data distribution (Zero, One, XavierUniform, Heuniform, Henormal, Constant, Uniform,Normal,TruncatedNormal).The following is a detailed description of The encapsulation operator and the initializer method. + + +## Use packing operator to initialize parameters + +MindSpore provide multiple parameter initialization methods and encapsulating the function of parameter initialization in some operators.This section will introduce the method of initializing the parameters of the operator with parameter initialization function.In the case of the operators of `Conv2d`,there introduces the initialization of parameters in the network by string, `Initializer` subclass and user-defined `Tensor`. The following code take the subclasses of `Initializer`:`Normal` for instance and can be replaced with any of the subclasses of `Initializer` in the code examples. +  +### String +If you use string to initialize network parameters,the content of the string needs to be consistent with the name of the `Initializer` subclass. Initialization using string mode will use the default parameters in the `Initializer` subclass. For example, using the string `Normal` is equivalent to using the subclass of Initializer. Class `Normal()`, the code sample is as follows: + + import numpy as np + import mindspore.nn as nn + from mindspore import Tensor + from mindspore.common import set_seed + + set_seed(1) + + input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) + net = nn.Conv2d(3, 64, 3, weight_init='Normal') + output = net(input_data) + print(output) +  + + [[[[ 3.10382620e-02 4.38603461e-02 4.38603461e-02 ... 4.38603461e-02 + 4.38603461e-02 1.38719045e-02] + [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 + 3.54298912e-02 -5.54019120e-03] + [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 + 3.54298912e-02 -5.54019120e-03] + + ... + + [[ 3.98553275e-02 -1.35465711e-03 -1.35465711e-03 ... -1.35465711e-03 + -1.35465711e-03 -1.00310734e-02] + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + ... + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + [ 1.33139016e-02 6.74417242e-05 6.74417242e-05 ... 6.74417242e-05 + 6.74417242e-05 -2.27325838e-02]]]] + + +### subclass of Initializer +`Initializer` subclass is used to initialize network parameters, which is similar to the effect of using string to initialize parameters. The difference is that using string to initialize parameters is the default parameter of using the `Initializer` subclass. If you want to use the parameters in the `Initializer` subclass,The `Initializer` subclass must be used to initialize the parameters. Take `Normal(0.2)` as an example. The code sample is as follows: + + import numpy as np + import mindspore.nn as nn + from mindspore import Tensor + from mindspore.common import set_seed + from mindspore.common.initializer import Normal + + set_seed(1) + + input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) + net = nn.Conv2d(3, 64, 3, weight_init=Normal(0.2)) + output = net(input_data) + print(output) +  + + [[[[ 6.2076533e-01 8.7720710e-01 8.7720710e-01 ... 8.7720710e-01 + 8.7720710e-01 2.7743810e-01] + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + 7.0859784e-01 -1.1080378e-01] + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + 7.0859784e-01 -1.1080378e-01] + ... + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + 7.0859784e-01 -1.1080378e-01] + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + 7.0859784e-01 -1.1080378e-01] + [ 1.9323981e-01 2.4820906e-01 2.4820906e-01 ... 2.4820906e-01 + 2.4820906e-01 -2.7795550e-01] + [[ 7.9710668e-01 -2.7093157e-02 -2.7093157e-02 ... -2.7093157e-02 + -2.7093157e-02 -2.0062150e-01] + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + ... + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + [ 2.6627803e-01 1.3488382e-03 1.3488382e-03 ... 1.3488382e-03 + 1.3488382e-03 -4.5465171e-01]]]] + +### User-defined Tensor +In addition to the above two initialization methods, when the network wants to use data types not available in MindSpore to initialize the parameters, users can customize Tensor to initialize the parameters. The code sample is as follows: + + import numpy as np + import mindspore.nn as nn + from mindspore import Tensor + from mindspore import dtype as mstype + + weight = Tensor(np.ones([64, 3, 3, 3]), dtype=mstype.float32) + input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) + net = nn.Conv2d(3, 64, 3, weight_init=weight) + output = net(input_data) + print(output) +  + + [[[[12. 18. 18. ... 18. 18. 12.] + [18. 27. 27. ... 27. 27. 18.] + ... + [18. 27. 27. ... 27. 27. 18.] + [18. 27. 27. ... 27. 27. 18.] + [12. 18. 18. ... 18. 18. 12.]] + + ... + + [[12. 18. 18. ... 18. 18. 12.] + [18. 27. 27. ... 27. 27. 18.] + [18. 27. 27. ... 27. 27. 18.] + ... + [18. 27. 27. ... 27. 27. 18.] + [18. 27. 27. ... 27. 27. 18.] + [12. 18. 18. ... 18. 18. 12.]]]] + +### The init parameter is a subclass of Initializer +The code example as follow: + + import numpy as np + from mindspore import Tensor + from mindspore import dtype as mstype + from mindspore.common import set_seed + from mindspore.ops.operations import nn_ops as nps + from mindspore.common.initializer import Normal, initializer + + set_seed(1) + + input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32) + weight = initializer(Normal(0.2), shape=[32, 3, 4, 3, 3], dtype=mstype.float32) + conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3)) + output = conv3d(input_data, weight) + print(output) +  + + [[[[[0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]] + ... + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]] + ... + [[0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]] + ... + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]]]]] + +### Application in Parameter +The code example as follow: + + import numpy as np + from mindspore import dtype as mstype + from mindspore.common import set_seed + from mindspore.ops import operations as ops + from mindspore import Tensor, Parameter, context + from mindspore.common.initializer import Normal, initializer + + set_seed(1) + + weight1 = Parameter(initializer('Normal', [5, 4], mstype.float32), name="w1") + weight2 = Parameter(initializer(Normal(0.2), [5, 4], mstype.float32), name="w2") + input_data = Tensor(np.arange(20).reshape(5, 4), dtype=mstype.float32) + net = ops.Add() + output = net(input_data, weight1) + output = net(output, weight2) + print(output) + +  + + [[-0.3305102 1.0412874 2.0412874 3.0412874] + [ 4.0412874 4.9479127 5.9479127 6.9479127] + [ 7.947912 9.063009 10.063009 11.063009 ] + [12.063009 13.536987 14.536987 14.857441 ] + [15.751231 17.073082 17.808317 19.364822 ]] -- Gitee From a61ac5a82e0b2530efee27aa034fdff1a950d156 Mon Sep 17 00:00:00 2001 From: 7aY <389354380@qq.com> Date: Wed, 2 Jun 2021 01:00:26 +0800 Subject: [PATCH 2/2] modify translation 1 --- .../source_en/initializer.md | 393 ++++++++++++------ 1 file changed, 257 insertions(+), 136 deletions(-) diff --git a/docs/programming_guide/source_en/initializer.md b/docs/programming_guide/source_en/initializer.md index 420faa1258..44f792681a 100644 --- a/docs/programming_guide/source_en/initializer.md +++ b/docs/programming_guide/source_en/initializer.md @@ -1,151 +1,268 @@ # Initialization of Network Parameters - -## summarize - +## Overview MindSpore provides a weight initialization module, which allows users to initialize network parameters by encapsulating operators and initializer methods to call strings, initializer subclasses, or custom Tensors. The Initializer class is the basic data structure used for initialization in MindSpore. Its subclasses contain several different types of data distribution (Zero, One, XavierUniform, Heuniform, Henormal, Constant, Uniform,Normal,TruncatedNormal).The following is a detailed description of The encapsulation operator and the initializer method. +## Parameter initialization using wrapper operator -## Use packing operator to initialize parameters +MindSpore provide multiple parameter initialization methods and encapsulating the function of parameter initialization in some operators.This section will introduce the method of initializing the parameters of the operator with parameter initialization function.In the case of the operators of `Conv2d`,and introduces the initialization of parameters in the network by string, `Initializer` subclass and user-defined `Tensor`. The following code take the subclasses of `Initializer`:`Normal` for instance and can be replaced with any of the subclasses of `Initializer` in the code examples. -MindSpore provide multiple parameter initialization methods and encapsulating the function of parameter initialization in some operators.This section will introduce the method of initializing the parameters of the operator with parameter initialization function.In the case of the operators of `Conv2d`,there introduces the initialization of parameters in the network by string, `Initializer` subclass and user-defined `Tensor`. The following code take the subclasses of `Initializer`:`Normal` for instance and can be replaced with any of the subclasses of `Initializer` in the code examples. -  ### String -If you use string to initialize network parameters,the content of the string needs to be consistent with the name of the `Initializer` subclass. Initialization using string mode will use the default parameters in the `Initializer` subclass. For example, using the string `Normal` is equivalent to using the subclass of Initializer. Class `Normal()`, the code sample is as follows: - import numpy as np - import mindspore.nn as nn - from mindspore import Tensor - from mindspore.common import set_seed +Using a string to initialize network parameters,the content of the string needs to be consistent with the name of the `Initializer` subclass. Initialization using string mode will use the default parameters in the `Initializer` subclass. For example, using the string `Normal` is equivalent to using the subclass of Initializer. Class of `Normal()`, the code sample is as follows: + +```python +import numpy as np +import mindspore.nn as nn +from mindspore import Tensor +from mindspore.common import set_seed - set_seed(1) +set_seed(1) - input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) - net = nn.Conv2d(3, 64, 3, weight_init='Normal') - output = net(input_data) - print(output) -  +input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) +net = nn.Conv2d(3, 64, 3, weight_init='Normal') +output = net(input_data) +print(output) +``` - [[[[ 3.10382620e-02 4.38603461e-02 4.38603461e-02 ... 4.38603461e-02 +```python +[[[[ 3.10382620e-02 4.38603461e-02 4.38603461e-02 ... 4.38603461e-02 4.38603461e-02 1.38719045e-02] - [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 + [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 3.54298912e-02 -5.54019120e-03] - [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 + [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 3.54298912e-02 -5.54019120e-03] - - ... - - [[ 3.98553275e-02 -1.35465711e-03 -1.35465711e-03 ... -1.35465711e-03 - -1.35465711e-03 -1.00310734e-02] - [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 - -3.60766202e-02 -2.95619294e-02] - [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 - -3.60766202e-02 -2.95619294e-02] - ... - [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 - -3.60766202e-02 -2.95619294e-02] - [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 - -3.60766202e-02 -2.95619294e-02] - [ 1.33139016e-02 6.74417242e-05 6.74417242e-05 ... 6.74417242e-05 + ... + [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 + 3.54298912e-02 -5.54019120e-03] + [ 3.26051228e-02 3.54298912e-02 3.54298912e-02 ... 3.54298912e-02 + 3.54298912e-02 -5.54019120e-03] + [ 9.66199022e-03 1.24104535e-02 1.24104535e-02 ... 1.24104535e-02 + 1.24104535e-02 -1.38977719e-02]] + + ... + + [[ 3.98553275e-02 -1.35465711e-03 -1.35465711e-03 ... -1.35465711e-03 + -1.35465711e-03 -1.00310734e-02] + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + ... + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + [ 4.38403059e-03 -3.60766202e-02 -3.60766202e-02 ... -3.60766202e-02 + -3.60766202e-02 -2.95619294e-02] + [ 1.33139016e-02 6.74417242e-05 6.74417242e-05 ... 6.74417242e-05 6.74417242e-05 -2.27325838e-02]]]] +``` +### Initializer subclass -### subclass of Initializer -`Initializer` subclass is used to initialize network parameters, which is similar to the effect of using string to initialize parameters. The difference is that using string to initialize parameters is the default parameter of using the `Initializer` subclass. If you want to use the parameters in the `Initializer` subclass,The `Initializer` subclass must be used to initialize the parameters. Take `Normal(0.2)` as an example. The code sample is as follows: +`Initializer` subclass is used to initialize network parameters, which is similar to the effect of using string to initialize parameters. The difference is that using string to initialize parameters is the default parameter of using the `Initializer` subclass. If you want to use the parameters in the `Initializer` subclass,The subclass of `Initializer` must be used to initialize the parameters. Take `Normal(0.2)` as an example. The code sample is as follows: - import numpy as np - import mindspore.nn as nn - from mindspore import Tensor - from mindspore.common import set_seed - from mindspore.common.initializer import Normal +```python +import numpy as np +import mindspore.nn as nn +from mindspore import Tensor +from mindspore.common import set_seed +from mindspore.common.initializer import Normal - set_seed(1) +set_seed(1) - input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) - net = nn.Conv2d(3, 64, 3, weight_init=Normal(0.2)) - output = net(input_data) - print(output) -  +input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) +net = nn.Conv2d(3, 64, 3, weight_init=Normal(0.2)) +output = net(input_data) +print(output) +``` - [[[[ 6.2076533e-01 8.7720710e-01 8.7720710e-01 ... 8.7720710e-01 +```python +[[[[ 6.2076533e-01 8.7720710e-01 8.7720710e-01 ... 8.7720710e-01 8.7720710e-01 2.7743810e-01] - [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 7.0859784e-01 -1.1080378e-01] - [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 7.0859784e-01 -1.1080378e-01] - ... - [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 - 7.0859784e-01 -1.1080378e-01] - [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 - 7.0859784e-01 -1.1080378e-01] - [ 1.9323981e-01 2.4820906e-01 2.4820906e-01 ... 2.4820906e-01 - 2.4820906e-01 -2.7795550e-01] - [[ 7.9710668e-01 -2.7093157e-02 -2.7093157e-02 ... -2.7093157e-02 - -2.7093157e-02 -2.0062150e-01] - [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 - -7.2153252e-01 -5.9123868e-01] - [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 - -7.2153252e-01 -5.9123868e-01] - ... - [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 - -7.2153252e-01 -5.9123868e-01] - [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 - -7.2153252e-01 -5.9123868e-01] - [ 2.6627803e-01 1.3488382e-03 1.3488382e-03 ... 1.3488382e-03 + ... + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + 7.0859784e-01 -1.1080378e-01] + [ 6.5210247e-01 7.0859784e-01 7.0859784e-01 ... 7.0859784e-01 + 7.0859784e-01 -1.1080378e-01] + [ 1.9323981e-01 2.4820906e-01 2.4820906e-01 ... 2.4820906e-01 + 2.4820906e-01 -2.7795550e-01]] + + ... + + [[ 7.9710668e-01 -2.7093157e-02 -2.7093157e-02 ... -2.7093157e-02 + -2.7093157e-02 -2.0062150e-01] + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + ... + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + [ 8.7680638e-02 -7.2153252e-01 -7.2153252e-01 ... -7.2153252e-01 + -7.2153252e-01 -5.9123868e-01] + [ 2.6627803e-01 1.3488382e-03 1.3488382e-03 ... 1.3488382e-03 1.3488382e-03 -4.5465171e-01]]]] +``` + +### Customized Tensor -### User-defined Tensor In addition to the above two initialization methods, when the network wants to use data types not available in MindSpore to initialize the parameters, users can customize Tensor to initialize the parameters. The code sample is as follows: - import numpy as np - import mindspore.nn as nn - from mindspore import Tensor - from mindspore import dtype as mstype +```python +import numpy as np +import mindspore.nn as nn +from mindspore import Tensor +from mindspore import dtype as mstype + +weight = Tensor(np.ones([64, 3, 3, 3]), dtype=mstype.float32) +input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) +net = nn.Conv2d(3, 64, 3, weight_init=weight) +output = net(input_data) +print(output) +``` + +```python +[[[[12. 18. 18. ... 18. 18. 12.] + [18. 27. 27. ... 27. 27. 18.] + [18. 27. 27. ... 27. 27. 18.] + ... + [18. 27. 27. ... 27. 27. 18.] + [18. 27. 27. ... 27. 27. 18.] + [12. 18. 18. ... 18. 18. 12.]] + + ... + + [[12. 18. 18. ... 18. 18. 12.] + [18. 27. 27. ... 27. 27. 18.] + [18. 27. 27. ... 27. 27. 18.] + ... + [18. 27. 27. ... 27. 27. 18.] + [18. 27. 27. ... 27. 27. 18.] + [12. 18. 18. ... 18. 18. 12.]]]] +``` + +## Initializing parameters using the initializer method + +In the above code sample, the method of parameter initialization in the network is given. For example, NN layer is used to encapsulate a `Conv2D` operator in the network, and the parameter:`weight_init` is passed into a `Conv2D` operator as the data type to be initialized. The operator will be initialized by calling parameter class.Then the initializer method encapsulated in the parameter class is called to initialize the parameters.However, some operators do not encapsulate the function of parameter initialization internally like `Conv2D`. For example, the weights of `Conv3D` operators are passed to `Conv3D` operators as parameters. In this case, it is necessary to manually define the initialization of weights. +When initializing parameters, you can use the initializer method to call different data types in the Initializer subclass to initialize the parameters and thus generate different types of data. +When using initializer for parameter initialization, the parameters passed in are 'init', 'shape', and 'dtype': + +'init'>: support passing in Tensor, str, and subclass of Initializer. + +'shape': support passing list, tuple, int. + +'dtype': support passing mindspore.dtype. + +### init parameter is Tensor + +Sample code is as follows. + +```python +import numpy as np +from mindspore import Tensor +from mindspore import dtype as mstype +from mindspore.common import set_seed +from mindspore.common.initializer import initializer +from mindspore.ops.operations import nn_ops as nps + +set_seed(1) + +input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32) +weight_init = Tensor(np.ones([32, 3, 4, 3, 3]), dtype=mstype.float32) +weight = initializer(weight_init, shape=[32, 3, 4, 3, 3]) +conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3)) +output = conv3d(input_data, weight) +print(output) +``` + +The output is as follows: + +```python +[[[[[108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108] + ... + [108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108]] + ... + [[108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108] + ... + [108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108] + [108 108 108 ... 108 108 108]]]]] +``` - weight = Tensor(np.ones([64, 3, 3, 3]), dtype=mstype.float32) - input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32)) - net = nn.Conv2d(3, 64, 3, weight_init=weight) - output = net(input_data) - print(output) -  +### init parameter is str - [[[[12. 18. 18. ... 18. 18. 12.] - [18. 27. 27. ... 27. 27. 18.] - ... - [18. 27. 27. ... 27. 27. 18.] - [18. 27. 27. ... 27. 27. 18.] - [12. 18. 18. ... 18. 18. 12.]] +Sample code is as follows. - ... +```python +import numpy as np +from mindspore import Tensor +from mindspore import dtype as mstype +from mindspore.common import set_seed +from mindspore.common.initializer import initializer +from mindspore.ops.operations import nn_ops as nps - [[12. 18. 18. ... 18. 18. 12.] - [18. 27. 27. ... 27. 27. 18.] - [18. 27. 27. ... 27. 27. 18.] +set_seed(1) + +input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32) +weight = initializer('Normal', shape=[32, 3, 4, 3, 3], dtype=mstype.float32) +conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3)) +output = conv3d(input_data, weight) +print(output) +``` + +The output is as follows: + +```python +[[[[[0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]] + ... + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]] ... - [18. 27. 27. ... 27. 27. 18.] - [18. 27. 27. ... 27. 27. 18.] - [12. 18. 18. ... 18. 18. 12.]]]] + [[0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]] + ... + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0] + [0 0 0 ... 0 0 0]]]]] +``` ### The init parameter is a subclass of Initializer -The code example as follow: - import numpy as np - from mindspore import Tensor - from mindspore import dtype as mstype - from mindspore.common import set_seed - from mindspore.ops.operations import nn_ops as nps - from mindspore.common.initializer import Normal, initializer +Sample code is as follows. - set_seed(1) +```python +import numpy as np +from mindspore import Tensor +from mindspore import dtype as mstype +from mindspore.common import set_seed +from mindspore.ops.operations import nn_ops as nps +from mindspore.common.initializer import Normal, initializer - input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32) - weight = initializer(Normal(0.2), shape=[32, 3, 4, 3, 3], dtype=mstype.float32) - conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3)) - output = conv3d(input_data, weight) - print(output) -  +set_seed(1) - [[[[[0 0 0 ... 0 0 0] +input_data = Tensor(np.ones([16, 3, 10, 32, 32]), dtype=mstype.float32) +weight = initializer(Normal(0.2), shape=[32, 3, 4, 3, 3], dtype=mstype.float32) +conv3d = nps.Conv3D(out_channel=32, kernel_size=(4, 3, 3)) +output = conv3d(input_data, weight) +print(output) +``` + +```python +[[[[[0 0 0 ... 0 0 0] [0 0 0 ... 0 0 0] [0 0 0 ... 0 0 0]] ... @@ -153,38 +270,42 @@ The code example as follow: [0 0 0 ... 0 0 0] [0 0 0 ... 0 0 0]] ... - [[0 0 0 ... 0 0 0] + [[0 0 0 ... 0 0 0] [0 0 0 ... 0 0 0] [0 0 0 ... 0 0 0]] ... [0 0 0 ... 0 0 0] [0 0 0 ... 0 0 0] [0 0 0 ... 0 0 0]]]]] +``` ### Application in Parameter -The code example as follow: - - import numpy as np - from mindspore import dtype as mstype - from mindspore.common import set_seed - from mindspore.ops import operations as ops - from mindspore import Tensor, Parameter, context - from mindspore.common.initializer import Normal, initializer - - set_seed(1) - - weight1 = Parameter(initializer('Normal', [5, 4], mstype.float32), name="w1") - weight2 = Parameter(initializer(Normal(0.2), [5, 4], mstype.float32), name="w2") - input_data = Tensor(np.arange(20).reshape(5, 4), dtype=mstype.float32) - net = ops.Add() - output = net(input_data, weight1) - output = net(output, weight2) - print(output) - -  - - [[-0.3305102 1.0412874 2.0412874 3.0412874] - [ 4.0412874 4.9479127 5.9479127 6.9479127] - [ 7.947912 9.063009 10.063009 11.063009 ] - [12.063009 13.536987 14.536987 14.857441 ] - [15.751231 17.073082 17.808317 19.364822 ]] + +The code example is as follows: + +```python +import numpy as np +from mindspore import dtype as mstype +from mindspore.common import set_seed +from mindspore.ops import operations as ops +from mindspore import Tensor, Parameter, context +from mindspore.common.initializer import Normal, initializer + +set_seed(1) + +weight1 = Parameter(initializer('Normal', [5, 4], mstype.float32), name="w1") +weight2 = Parameter(initializer(Normal(0.2), [5, 4], mstype.float32), name="w2") +input_data = Tensor(np.arange(20).reshape(5, 4), dtype=mstype.float32) +net = ops.Add() +output = net(input_data, weight1) +output = net(output, weight2) +print(output) +``` + +```python +[[-0.3305102 1.0412874 2.0412874 3.0412874] + [ 4.0412874 4.9479127 5.9479127 6.9479127] + [ 7.947912 9.063009 10.063009 11.063009 ] + [12.063009 13.536987 14.536987 14.857441 ] + [15.751231 17.073082 17.808317 19.364822 ]] +``` \ No newline at end of file -- Gitee