# MindSpore 1.1.1 Release Notes ## MindSpore ### API Change #### Backwards Incompatible Change ##### Python API ###### `ops.AvgPool`, `ops.MaxPool`, `ops.MaxPoolWithArgmax` change attr name from 'ksize', 'padding' to 'kernel_size', 'pad_mode' ([!11350](https://gitee.com/mindspore/mindspore/pulls/11350)) Previously the kernel size and pad mode attrs of pooling ops are named "ksize" and "padding", which is a little puzzling and inconsistent with convolution ops. So they are rename to "kernel_size" and "pad_mode".
1.1.0 | 1.1.1 |
```python >>> from mindspore.ops import operations as P >>> >>> avg_pool = P.AvgPool(ksize=2, padding='same') >>> max_pool = P.MaxPool(ksize=2, padding='same') >>> max_pool_with_argmax = P.MaxPoolWithArgmax(ksize=2, padding='same') ``` | ```python >>> from mindspore.ops import operations as P >>> >>> avg_pool = P.AvgPool(kernel_size=2, pad_mode='same') >>> max_pool = P.MaxPool(kernel_size=2, pad_mode='same') >>> max_pool_with_argmax = P.MaxPoolWithArgmax(kernel_size=2, pad_mode='same') ``` |
1.1.0 | 1.1.1 |
```python >>> from mindspore import nn >>> >>> start = 1 >>> stop = 10 >>> num = 5 >>> linspace = nn.LinSpace(start, stop, num) >>> output = linspace() ``` | ```python >>> import mindspore >>> from mindspore import Tensor >>> from mindspore import ops >>> >>> linspace = ops.LinSpace() >>> start = Tensor(1, mindspore.float32) >>> stop = Tensor(10, mindspore.float32) >>> num = 5 >>> output = linspace(start, stop, num) ``` |
1.0.1 | 1.1.0 |
```python >>> from mindspore.nn import Adam >>> >>> net = LeNet5() >>> optimizer = Adam(filter(lambda x: x.requires_grad, net.get_parameters())) >>> optimizer.sparse_opt.add_prim_attr("primitive_target", "CPU") ``` | ```python >>> from mindspore.nn import Adam >>> >>> net = LeNet5() >>> optimizer = Adam(filter(lambda x: x.requires_grad, net.get_parameters())) >>> optimizer.target = 'CPU' ``` |
1.0.1 | 1.1.0 |
```python >>> from mindspore.train.quant import quant >>> >>> network = LeNetQuant() >>> inputs = Tensor(np.ones([1, 1, 32, 32]), mindspore.float32) >>> quant.export(network, inputs, file_name="lenet_quant.mindir", file_format='MINDIR') lenet_quant.mindir ``` | ```python >>> from mindspore import export >>> >>> network = LeNetQuant() >>> inputs = Tensor(np.ones([1, 1, 32, 32]), mindspore.float32) >>> export(network, inputs, file_name="lenet_quant", file_format='MINDIR', quant_mode='AUTO') lenet_quant.mindir ``` |
1.0.1 | 1.1.0 |
```python >>> import mindspore.nn as nn >>> >>> dense = nn.Dense(1, 1, activation='relu') ``` | ```python >>> import mindspore.nn as nn >>> import mindspore.ops as ops >>> >>> dense = nn.Dense(1, 1, activation=nn.ReLU()) >>> dense = nn.Dense(1, 1, activation=ops.ReLU()) ``` |
1.0.1 | 1.1.0 |
```python >>> from mindspore import Tensor >>> >>> Tensor((1,2,3)).size() >>> Tensor((1,2,3)).dim() ``` | ```python >>> from mindspore import Tensor >>> >>> Tensor((1,2,3)).size >>> Tensor((1,2,3)).ndim ``` |
1.0.1 | 1.1.0 |
```python >>> from mindspore.nn import EmbeddingLookup >>> >>> input_indices = Tensor(np.array([[1, 0], [3, 2]]), mindspore.int32) >>> result = EmbeddingLookup(4,2)(input_indices) >>> print(result.shape) (2, 2, 2) ``` | ```python >>> from mindspore.nn import EmbeddingLookup >>> >>> input_indices = Tensor(np.array([[1, 0], [3, 2]]), mindspore.int32) >>> result = EmbeddingLookup(4,2)(input_indices, sparse=False) >>> print(result.shape) (2, 2, 2) ``` |
1.0.1 | 1.1.0 |
```python >>> import mindspore.nn.probability.bijector as msb >>> >>> power = 2 >>> bijector = msb.PowerTransform(power=power) ``` | ```python >>> import mindspore.nn.probability.bijector as msb >>> >>> power = 2.0 >>> bijector = msb.PowerTransform(power=power) ``` |
1.0.1 | 1.1.0 |
```python >>> import mindspore.nn.probability.bijector as msb >>> from mindspore import dtype as mstype >>> >>> bijector = msb.GumbelCDF(loc=0.0, scale=1.0, dtype=mstype.float32) ``` | ```python >>> import mindspore.nn.probability.bijector as msb >>> >>> bijector = msb.GumbelCDF(loc=0.0, scale=1.0) ``` |
1.0.1 | 1.1.0 |
```python >>> from mindspore.nn.layer.quant import Conv2dBnAct, DenseBnAct ``` | ```python >>> from mindspore.nn import Conv2dBnAct, DenseBnAct ``` |