From a8eca5359699a23c8d4be49a25e6ff92aa1feae7 Mon Sep 17 00:00:00 2001 From: lixiaohui Date: Mon, 30 Nov 2020 16:02:51 +0800 Subject: [PATCH] Fixbug: modify GuidedBackprop API to make example executable --- mindspore/explainer/_runner.py | 6 +++--- .../benchmark/_attribution/faithfulness.py | 6 ++++-- .../benchmark/_attribution/localization.py | 5 ++++- .../_attribution/_backprop/gradcam.py | 5 ++++- .../_attribution/_backprop/gradient.py | 6 +++++- .../_attribution/_backprop/modified_relu.py | 16 +++++++++++----- .../_attribution/_perturbation/rise.py | 9 +++++++-- 7 files changed, 38 insertions(+), 15 deletions(-) diff --git a/mindspore/explainer/_runner.py b/mindspore/explainer/_runner.py index abe7ba8a66..cdfd881d86 100644 --- a/mindspore/explainer/_runner.py +++ b/mindspore/explainer/_runner.py @@ -161,7 +161,7 @@ class ExplainRunner: Examples: >>> from mindspore.explainer import ExplainRunner >>> from mindspore.explainer.explanation import GuidedBackprop, Gradient - >>> from mindspore.nn import Sigmoid + >>> from mindspore.nn import Softmax >>> from mindspore.train.serialization import load_checkpoint, load_param_into_net >>> # Prepare the dataset for explaining and evaluation, e.g., Cifar10 >>> dataset = get_dataset('/path/to/Cifar10_dataset') @@ -172,9 +172,9 @@ class ExplainRunner: >>> load_param_into_net(net, param_dict) >>> gbp = GuidedBackprop(net) >>> gradient = Gradient(net) - >>> runner = ExplainRunner("./") >>> explainers = [gbp, gradient] - >>> runner.run((dataset, classes), explainers, activation_fn=Sigmoid()) + >>> # runner is an ExplainRunner object + >>> runner.run((dataset, classes), explainers, activation_fn=Softmax()) """ check_value_type("dataset", dataset, tuple) diff --git a/mindspore/explainer/benchmark/_attribution/faithfulness.py b/mindspore/explainer/benchmark/_attribution/faithfulness.py index 460c962005..bcaf02a7d3 100644 --- a/mindspore/explainer/benchmark/_attribution/faithfulness.py +++ b/mindspore/explainer/benchmark/_attribution/faithfulness.py @@ -414,8 +414,10 @@ class Faithfulness(LabelSensitiveMetric): numpy.ndarray, 1D array of shape :math:`(N,)`, result of faithfulness evaluated on `explainer`. Examples: - >>> # init an explainer, the network should contain the output activation function. - >>> network = resnet50(20) + >>> import numpy as np + >>> import mindspore as ms + >>> from mindspore.explainer.explanation import Gradient + >>> # init an explainer with a trained network, e.g., resnet50 >>> gradient = Gradient(network) >>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32) >>> targets = 5 diff --git a/mindspore/explainer/benchmark/_attribution/localization.py b/mindspore/explainer/benchmark/_attribution/localization.py index 95b8c06af0..bc2776ec9d 100644 --- a/mindspore/explainer/benchmark/_attribution/localization.py +++ b/mindspore/explainer/benchmark/_attribution/localization.py @@ -110,7 +110,10 @@ class Localization(LabelSensitiveMetric): numpy.ndarray, 1D array of shape :math:`(N,)`, result of localization evaluated on `explainer`. Examples: - >>> # init an explainer with a trained network + >>> import numpy as np + >>> import mindspore as ms + >>> from mindspore.explainer.explanation import Gradient + >>> # init an explainer with a trained network, e.g., resnet50 >>> gradient = Gradient(network) >>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32) >>> masks = np.zeros([1, 1, 224, 224]) diff --git a/mindspore/explainer/explanation/_attribution/_backprop/gradcam.py b/mindspore/explainer/explanation/_attribution/_backprop/gradcam.py index 46f728db65..fb18189c6c 100644 --- a/mindspore/explainer/explanation/_attribution/_backprop/gradcam.py +++ b/mindspore/explainer/explanation/_attribution/_backprop/gradcam.py @@ -66,7 +66,8 @@ class GradCAM(IntermediateLayerAttribution): Examples: >>> from mindspore.explainer.explanation import GradCAM - >>> net = resnet50(10) + >>> from mindspore.train.serialization import load_checkpoint, load_param_into_net + >>> network = resnet50(10) # please refer to model_zoo >>> param_dict = load_checkpoint("resnet50.ckpt") >>> load_param_into_net(net, param_dict) >>> # specify a layer name to generate explanation, usually the layer can be set as the last conv layer. @@ -111,6 +112,8 @@ class GradCAM(IntermediateLayerAttribution): Tensor, a 4D tensor of shape :math:`(N, 1, H, W)`. Examples: + >>> import mindspore as ms + >>> import numpy as np >>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32) >>> label = 5 >>> # gradcam is a GradCAM object, parse data and the target label to be explained and get the attribution diff --git a/mindspore/explainer/explanation/_attribution/_backprop/gradient.py b/mindspore/explainer/explanation/_attribution/_backprop/gradient.py index d09338f732..302c22348b 100644 --- a/mindspore/explainer/explanation/_attribution/_backprop/gradient.py +++ b/mindspore/explainer/explanation/_attribution/_backprop/gradient.py @@ -61,7 +61,9 @@ class Gradient(Attribution): Examples: >>> from mindspore.explainer.explanation import Gradient - >>> net = resnet50(10) + >>> from mindspore.train.serialization import load_checkpoint, load_param_into_net + >>> # init Gradient with a trained network + >>> net = resnet50(10) # please refer to model_zoo >>> param_dict = load_checkpoint("resnet50.ckpt") >>> load_param_into_net(net, param_dict) >>> gradient = Gradient(net) @@ -89,6 +91,8 @@ class Gradient(Attribution): Tensor, a 4D tensor of shape :math:`(N, 1, H, W)`. Examples: + >>> import mindspore as ms + >>> import numpy as np >>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32) >>> label = 5 >>> # gradient is a Gradient object, parse data and the target label to be explained and get the attribution diff --git a/mindspore/explainer/explanation/_attribution/_backprop/modified_relu.py b/mindspore/explainer/explanation/_attribution/_backprop/modified_relu.py index d8c551b2a7..04df261e85 100644 --- a/mindspore/explainer/explanation/_attribution/_backprop/modified_relu.py +++ b/mindspore/explainer/explanation/_attribution/_backprop/modified_relu.py @@ -97,11 +97,14 @@ class Deconvolution(ModifiedReLU): network (Cell): The black-box model to be explained. Examples: + >>> import numpy as np + >>> import mindspore as ms >>> from mindspore.explainer.explanation import Deconvolution - >>> net = resnet50(10) + >>> from mindspore.train.serialization import load_checkpoint, load_param_into_net + >>> net = resnet50(10) # please refer to model_zoo >>> param_dict = load_checkpoint("resnet50.ckpt") >>> load_param_into_net(net, param_dict) - >>> # init Gradient with a trained network. + >>> # init Deconvolution with a trained network. >>> deconvolution = Deconvolution(net) >>> # parse data and the target label to be explained and get the saliency map >>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32) @@ -132,14 +135,17 @@ class GuidedBackprop(ModifiedReLU): network (Cell): The black-box model to be explained. Examples: + >>> import numpy as np + >>> import mindspore as ms + >>> from mindspore.train.serialization import load_checkpoint, load_param_into_net >>> from mindspore.explainer.explanation import GuidedBackprop - >>> net = resnet50(10) + >>> net = resnet50(10) # please refer to model_zoo >>> param_dict = load_checkpoint("resnet50.ckpt") >>> load_param_into_net(net, param_dict) - >>> # init Gradient with a trained network. + >>> # init GuidedBackprop with a trained network. >>> gbp = GuidedBackprop(net) >>> # parse data and the target label to be explained and get the saliency map - >>> inputs = ms.Tensor(np.random.rand([1, 3, 224, 224]), ms.float32) + >>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32) >>> label = 5 >>> saliency = gbp(inputs, label) """ diff --git a/mindspore/explainer/explanation/_attribution/_perturbation/rise.py b/mindspore/explainer/explanation/_attribution/_perturbation/rise.py index 14cc396c5d..3e710e29a6 100644 --- a/mindspore/explainer/explanation/_attribution/_perturbation/rise.py +++ b/mindspore/explainer/explanation/_attribution/_perturbation/rise.py @@ -52,11 +52,14 @@ class RISE(PerturbationAttribution): Examples: >>> from mindspore.explainer.explanation import RISE - >>> net = resnet50(10) + >>> from mindspore.nn import Sigmoid + >>> from mindspore.train.serialization import load_checkpoint, load_param_into_net + >>> # init RISE with a trained network + >>> net = resnet50(10) # please refer to model_zoo >>> param_dict = load_checkpoint("resnet50.ckpt") >>> load_param_into_net(net, param_dict) >>> # init RISE with specified activation function - >>> rise = RISE(net, activation_fn=nn.layer.Sigmoid()) + >>> rise = RISE(net, activation_fn=Sigmoid()) """ def __init__(self, @@ -113,6 +116,8 @@ class RISE(PerturbationAttribution): Tensor, a 4D tensor of shape :math:`(N, ?, H, W)` or :math:`(N, 1, H, W)`. Examples: + >>> import mindspore as ms + >>> import numpy as np >>> # given an instance of RISE, saliency map can be generate >>> inputs = ms.Tensor(np.random.rand(2, 3, 224, 224), ms.float32) >>> # when `targets` is an integer