From 99722cba0d2eb2dcca6c857a062eec15d4043f43 Mon Sep 17 00:00:00 2001 From: zhangyi Date: Mon, 16 May 2022 15:16:00 +0800 Subject: [PATCH] modify the files --- docs/lite/docs/source_en/use/nnie.md | 8 +- tutorials/experts/source_en/debug/ms_class.md | 280 ++++++++++++++++++ .../experts/source_en/debug/op_compilation.md | 16 +- tutorials/experts/source_en/debug/pynative.md | 149 ++++++++++ tutorials/experts/source_en/index.rst | 2 + 5 files changed, 437 insertions(+), 18 deletions(-) create mode 100644 tutorials/experts/source_en/debug/ms_class.md create mode 100644 tutorials/experts/source_en/debug/pynative.md diff --git a/docs/lite/docs/source_en/use/nnie.md b/docs/lite/docs/source_en/use/nnie.md index 6d5db2b7b8..508d03721f 100644 --- a/docs/lite/docs/source_en/use/nnie.md +++ b/docs/lite/docs/source_en/use/nnie.md @@ -301,11 +301,11 @@ During model conversion, the `nnie.cfg` file declared by the NNIE_CONFIG_PATH en You only need to provide image_list whose quantity is the same as that of model inputs. If the model contains the ROI pooling or PSROI pooling layer, you need to provide roi_coordinate_file, the quantity and sequence correspond to the number and sequence of the ROI pooling or PSROI pooling layer in the .prototxt file. -### Suffix _cpu of the Node Name in the .prototxt File +### Suffix _cpu of the Node Name in the prototxt File In the .prototxt file, you can add _cpu to the end of the node name to declare CPU custom operator. The_cpu suffix is ignored in MindSpore Lite and is not supported. If you want to redefine the implementation of an existing operator or add an operator, you can register the operator in custom operator mode. -### Custom Operator in the .prototxt File +### Custom Operator in the prototxt File In the SVP tool chain, the custom layer is declared in the .prototxt file to implement inference by segment and implement the CPU code by users. In MindSpore Lite, you need to add the op_type attribute to the custom layer and register the online inference code in custom operator mode. @@ -331,7 +331,7 @@ During model conversion, the `nnie.cfg` file declared by the NNIE_CONFIG_PATH en In this example, a custom operator of the MY_CUSTOM type is defined. During inference, you need to register a custom operator of the MY_CUSTOM type. -### Suffix _report of the Top Domain in the .prototxt File +### Suffix _report of the Top Domain in the prototxt File When converting the NNIE model, MindSpore Lite fuses most operators into the binary file for NNIE running. Users cannot view the output of the intermediate operators. In this case, you can add the _report suffix to the top domain, during image composition conversion, the output of the intermediate operator is added to the output of the fused layer. If the operator has output (not fused), the output remains unchanged. @@ -372,4 +372,4 @@ During model conversion, the `nnie.cfg` file declared by the NNIE_CONFIG_PATH en ### Segmentation Mechanism and Restrictions Due to the restrictions on the operators supported by the NNIE chip, if there are operators that are not supported by the NNIE chip, the model needs to be divided into supported layers and unsupported layers. - The chip on the board supports a maximum of eight supported layers. If the number of supported layers after segmentation is greater than 8, the model cannot run. You can observe the custom operator (whose attribute contains type:NNIE) by using Netron to obtain the number of supported layers after conversion. + The chip on the board supports a maximum of eight supported layers. If the number of supported layers after segmentation is greater than 8, the model cannot run. You can observe the custom operator (whose attribute contains type:NNIE) by using Netron to obtain the number of supported layers after conversion. \ No newline at end of file diff --git a/tutorials/experts/source_en/debug/ms_class.md b/tutorials/experts/source_en/debug/ms_class.md new file mode 100644 index 0000000000..69416342a0 --- /dev/null +++ b/tutorials/experts/source_en/debug/ms_class.md @@ -0,0 +1,280 @@ +# Calling the Custom Class + + + +## Overview + +In static graph mode, using ms_class to decorate a custom class, users can create and call the instance of this custom class, and obtain attributes and methods for that custom class. + +ms_class applied to static graph mode, expanding the scope of support for improving static graph compilation syntax. In dynamic graph mode, that is, PyNative mode, the use of ms_class does not affect the execution logic of the PyNative mode. + +This document describes how to use ms_class so that you can use ms_class functions more effectively. + +## ms_class Decorates Custom Class + +After decorating a custom class with @ms_class, you can create and call the instance of the custom class and obtain the attributes and methods. + +```python +import numpy as np +import mindspore.nn as nn +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class InnerNet: + value = Tensor(np.array([1, 2, 3])) + +class Net(nn.Cell): + def construct(self): + return InnerNet().value + +set_context(mode=GRAPH_MODE) +net = Net() +out = net() +print(out) +``` + +ms_class support custom class nesting use, custom classes and nn. Cell nesting uses scenes. It should be noted that when a class inherits, if the parent class uses ms_class, the subclass will also have the ability to ms_class. + +```python +import numpy as np +import mindspore.nn as nn +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class Inner: + def __init__(self): + self.value = Tensor(np.array([1, 2, 3])) + +@ms_class +class InnerNet: + def __init__(self): + self.inner = Inner() + +class Net(nn.Cell): + def __init__(self): + super(Net, self).__init__() + self.inner_net = InnerNet() + + def construct(self): + out = self.inner_net.inner.value + return out + +set_context(mode=GRAPH_MODE) +net = Net() +out = net() +print(out) +``` + +ms_class only support decorating custom classes, not nn. Cell and nonclass types. If you execute the following use case, an error will appear. + +```python +import mindspore.nn as nn +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class Net(nn.Cell): + def construct(self, x): + return x + +set_context(mode=GRAPH_MODE) +x = Tensor(1) +net = Net() +net(x) +``` + +The error information is as follows: + +TypeError: ms_class is used for user-defined classes and cannot be used for nn.Cell: Net<>. + +```python +from mindspore import ms_class + +@ms_class +def func(x, y): + return x + y + +func(1, 2) +``` + +The error information is as follows: + +TypeError: Decorator ms_class can only be used for class type, but got . + +## Obtaining the Attributes and Methods of the Custom Class + +Support call a class's attributes by class name, and calling a class's methods by class name is not supported. For instances of a class, calling its attributes and methods is supported. + +```python +import mindspore.nn as nn +import mindspore.common.dtype as mstype +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class InnerNet: + def __init__(self, val): + self.number = val + + def act(self, x, y): + return self.number * (x + y) + +class Net(nn.Cell): + def __init__(self): + super(Net, self).__init__() + self.inner_net = InnerNet(2) + + def construct(self, x, y): + return self.inner_net.number + self.inner_net.act(x, y) + +set_context(mode=GRAPH_MODE) +x = Tensor(2, dtype=mstype.int32) +y = Tensor(3, dtype=mstype.int32) +net = Net() +out = net(x, y) +print(out) +``` + +Calling private attributes and magic methods is not supported, and the method functions that are called must be within the syntax supported by static graph compilation. If you execute the following use case, an error will appear. + +```python +import numpy as np +import mindspore.nn as nn +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class InnerNet: + def __init__(self): + self.value = Tensor(np.array([1, 2, 3])) + +class Net(nn.Cell): + def __init__(self): + super(Net, self).__init__() + self.inner_net = InnerNet() + + def construct(self): + out = self.inner_net.__str__() + return out + +set_context(mode=GRAPH_MODE) +net = Net() +out = net() +``` + +The error information is as follows: + +RuntimeError: `__str__` is a private variable or magic method, which is not supported. + +## Creating Instance of the Custom Class + +In the static graph mode, when you create the instance of the custom class in a configuration/ms_function, the parameter requirement is a constant. + +```python +import numpy as np +import mindspore.nn as nn +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class InnerNet: + def __init__(self, val): + self.number = val + 3 + +class Net(nn.Cell): + def construct(self): + net = InnerNet(2) + return net.number + +set_context(mode=GRAPH_MODE) +net = Net() +out = net() +print(out) +``` + +For other scenarios, when creating an instance of a custom class, there is a restriction that no parameters must be constants. For example, the following use case: + +```python +import numpy as np +import mindspore.nn as nn +import mindspore.common.dtype as mstype +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class InnerNet: + def __init__(self, val): + self.number = val + 3 + +class Net(nn.Cell): + def __init__(self, val): + super(Net, self).__init__() + self.inner = InnerNet(val) + + def construct(self): + return self.inner.number + +set_context(mode=GRAPH_MODE) +x = Tensor(2, dtype=mstype.int32) +net = Net(x) +out = net() +print(out) +``` + +## Calling the Instance of the Custom Class + +When you call an instance of a custom class, the `__call__` function method of that class is called. + +```python +import numpy as np +import mindspore.nn as nn +import mindspore.common.dtype as mstype +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class InnerNet: + def __init__(self, number): + self.number = number + + def __call__(self, x, y): + return self.number * (x + y) + +class Net(nn.Cell): + def construct(self, x, y): + net = InnerNet(2) + out = net(x, y) + return out + +set_context(mode=GRAPH_MODE) +x = Tensor(2, dtype=mstype.int32) +y = Tensor(3, dtype=mstype.int32) +net = Net() +out = net(x, y) +print(out) +``` + +If the class does not define the `__call__` function, an error message will be reported. If you execute the following use case, an error will appear. + +```python +import numpy as np +import mindspore.nn as nn +import mindspore.common.dtype as mstype +from mindspore import set_context, GRAPH_MODE, Tensor, ms_class + +@ms_class +class InnerNet: + def __init__(self, number): + self.number = number + +class Net(nn.Cell): + def construct(self, x, y): + net = InnerNet(2) + out = net(x, y) + return out + +set_context(mode=GRAPH_MODE) +x = Tensor(2, dtype=mstype.int32) +y = Tensor(3, dtype=mstype.int32) +net = Net() +out = net(x, y) +print(out) +``` + +The error information is as follows: + +RumtimeError: MsClassObject: 'InnerNet' has no `__call__` function, please check the code. \ No newline at end of file diff --git a/tutorials/experts/source_en/debug/op_compilation.md b/tutorials/experts/source_en/debug/op_compilation.md index 62612c468f..b49b85601c 100644 --- a/tutorials/experts/source_en/debug/op_compilation.md +++ b/tutorials/experts/source_en/debug/op_compilation.md @@ -43,26 +43,14 @@ if __name__ == "__main__": ``` -The network model consists of a single operator `Square`, and the output is a square value of the input. +The network model consists of a single operator `Square`, and the output is a square value of the input. The execution result is as follows: ```text x: [1. 4. 9.] output: [1. 16. 81.] ``` -The `rank_0/kernel_meta` folder is generated in the directory where the execution is performed, which contains the `.o`, `.json`, `.info` and other files. - -```text -└─src - ├── test_square.py - └── rank_0 - └──kernel_meta - ├── square_12484080525657478220_2.info - ├── square_12484080525657478220_2.json - └── square_12484080525657478220_2.o -``` - -For an operator: +In the current execution directory, a `rank_0/kernel_meta` folder is generated containing the Square operator's `.o` file, `.json` file, `.info` file, and other files. For an operator: The `.o` file is an executable file generated by MindSpore for the operator during network model execution. diff --git a/tutorials/experts/source_en/debug/pynative.md b/tutorials/experts/source_en/debug/pynative.md new file mode 100644 index 0000000000..357b393d0a --- /dev/null +++ b/tutorials/experts/source_en/debug/pynative.md @@ -0,0 +1,149 @@ +# Applying PyNative Mode + + + +In PyNative mode, MindSpore supports the execution of single operators, ordinary functions and networks, as well as the operation of individual gradients. Below we will introduce the use of these operations and considerations in detail through sample code. + +## Executing Operations + +First, we import the dependencies and set the run mode to PyNative mode: + +```python +import numpy as np +import mindspore.ops as ops +import mindspore.nn as nn +from mindspore import Tensor, set_context, PYNATIVE_MODE, dtype as mstype + +set_context(mode=PYNATIVE_MODE) +``` + +### Executing Single Operators + +The following is example code of executing Add operator [mindspore.ops.Add](https://mindspore.cn/docs/zh-CN/master/api_python/ops/mindspore.ops.Add.html#mindspore.ops.Add): + +```python +add = ops.Add() +x = Tensor(np.array([1, 2]).astype(np.float32)) +y = Tensor(np.array([3, 5]).astype(np.float32)) +z = add(x, y) +print("x:", x.asnumpy(), "\ny:", y.asnumpy(), "\nz:", z.asnumpy()) +``` + +### Executing Functions + +Execute the custom function `add_func` and the sample code is as follows: + +```python +add = ops.Add() + +def add_func(x, y): + z = add(x, y) + z = add(z, x) + return z + +x = Tensor(np.array([1, 2]).astype(np.float32)) +y = Tensor(np.array([3, 5]).astype(np.float32)) +z = add_func(x, y) +print("x:", x.asnumpy(), "\ny:", y.asnumpy(), "\nz:", z.asnumpy()) +``` + +### Executing Network + +Execute a custom network `Net` to define the network structure in the construst, and the sample code is as follows: + +```python +class Net(nn.Cell): + def __init__(self): + super(Net, self).__init__() + self.mul = ops.Mul() + + def construct(self, x, y): + return self.mul(x, y) + +net = Net() +x = Tensor(np.array([1.0, 2.0, 3.0]).astype(np.float32)) +y = Tensor(np.array([4.0, 5.0, 6.0]).astype(np.float32)) +z = net(x, y) + +print("x:", x.asnumpy(), "\ny:", y.asnumpy(), "\nz:", z.asnumpy()) +``` + +## Customizing **bprop** Function + +Users can customize backpropagation (calculation) function of the nn.Cell object, thus control the process of the nn.Cell object gradient calculation, locating gradient problems. + +Custom bprop functions are used by: adding a user-defined bprop function to the defined nn. Cell object. The training process uses user-defined bprop functions to generate reverse graphs. + +The sample code is as follows: + +```python +set_context(mode=PYNATIVE_MODE) + +class Net(nn.Cell): + def construct(self, x, y): + z = x * y + z = z * y + return z + + def bprop(self, x, y, out, dout): + x_dout = x + y + y_dout = x * y + return x_dout, y_dout + +grad_all = ops.GradOperation(get_all=True) +output = grad_all(Net())(Tensor(1, mstype.float32), Tensor(2, mstype.float32)) +print(output) +``` + +## Synchronous Execution + +In PyNative mode, in order to improve performance, the operator uses asynchronous execution on the device, so when the operator executes incorrectly, the error message may not be displayed until the program is executed until the end. In response to this situation, MindSpore added a pynative_synchronize setting to control whether asynchronous execution is used on the operator device. + +In PyNative mode, the operator defaults to asynchronous execution, and you can control whether the execution is asynchronous by setting the content. When operator execution fails, it is convenient to see the location of the code where the error occurred through the calling stack. The sample code is as follows: + +```python +from mindspore import dtype as mstype + +# Synchronize operator execution by setting the pynative_synchronize +set_context(mode=PYNATIVE_MODE, pynative_synchronize=True) + +class Net(nn.Cell): + def __init__(self): + super(Net, self).__init__() + self.get_next = ops.GetNext([mstype.float32], [(1, 1)], 1, "test") + + def construct(self, x1,): + x = self.get_next() + x = x + x1 + return x + +set_context() +x1 = np.random.randn(1, 1).astype(np.float32) +net = Net() +output = net(Tensor(x1)) +print(output.asnumpy()) +``` + +Output: At this time, the operator is synchronous execution, and when the operator executes incorrectly, you can see the complete call stack and find the wrong line of code. + +```text +Traceback (most recent call last): + File "test.py", line 24, in + output = net(Tensor(x1)) + File ".../mindspore/nn/cell.py", line 602, in __call__ + raise err + File ".../mindspore/nn/cell.py", line 599, in __call__ + output = self._run_construct(cast_inputs, kwargs) + File ".../mindspore/nn/cell.py", line 429, in _run_construct + output = self.construct(*cast_inputs, **kwargs) + File "test.py", line 17, in construct + x = self.get_next() + File ".../mindspore/ops/primitive.py", line 294, in __call__ + return _run_op(self, self.name, args) + File ".../mindspore/common/api.py", line 90, in wrapper + results = fn(*arg, **kwargs) + File ".../mindspore/ops/primitive.py", line 754, in _run_op + output = real_run_op(obj, op_name, args) +RuntimeError: mindspore/ccsrc/plugin/device/gpu/kernel/data/dataset_iterator_kernel.cc:139 Launch] For 'GetNext', gpu Queue(test) Open Failed: 2 +``` + diff --git a/tutorials/experts/source_en/index.rst b/tutorials/experts/source_en/index.rst index 6135dee697..96d7c709ce 100644 --- a/tutorials/experts/source_en/index.rst +++ b/tutorials/experts/source_en/index.rst @@ -47,9 +47,11 @@ For Experts debug/mindir debug/dump debug/custom_debug + debug/ms_class debug/op_compilation debug/auto_tune debug/dataset_autotune + debug/pynative .. toctree:: :glob: -- Gitee