Class Activation Map / Gradient Attention Map
分类/分割任务中可能会需要对训练过程中某些层的计算梯度进行操作,对于Keras来说我们可以通过使用Callback()实现返回梯度的目的,具体的例子如下所示,分为非eager模式和eager模式两部分。
1. 非eager模式
tf.compat.v1.disable_eager_execution()
def get_gradient_func(model):
grads_1 = K.gradients(model.outputs[0], model.inputs[0])
grads_2 = K.gradients(model.outputs[1], model.inputs[0])
inputs = model._feed_inputs + model._feed_targets + model._feed_sample_weights
func_1 = K.function(inputs, grads_1)
func_2 = K.function(inputs, grads_2)
return func_lipid, func_calcium
class CustomCallback(Callback):
def __init__(self,model,training_generator,save_grad_path):
self.model = model
self.training_generator = training_generator
self.save_grad_path = save_grad_path
def on_epoch_end(self, epoch, logs=None):
if (epoch+1)%10==0:
epoch_gradient_1 = []
epoch_gradient_2 = []
get_gradient_1, get_gradient_2 = get_gradient_func(self.model)
for step,batch in enumerate(self.training_generator):
batch = tuple(t for t in batch)
train_img = batch[0]
train_label = batch[1]
grads_1 = get_gradient_1([train_img, train_label, np.ones(16)])
grads_2 = get_gradient_2([train_img, train_label, np.ones(16)])
epoch_gradient_1.append(grads_1[0][:,:,:,3])
epoch_gradient_2.append(grads_1[0][:,:,:,3])
else:
pass
2. eager模式
class CustomCallback(Callback):
def __init__(self,model,training_generator,save_grad_path):
self.model = model
self.training_generator = training_generator
self.save_grad_path = save_grad_path
def on_epoch_end(self, epoch, logs=None):
if (epoch+1)%10==0:
epoch_gradient_1 = []
epoch_gradient_2 = []
input_layer = self.model.get_layer("data")
temp_model = Model([self.model.inputs],[self.model.output,input_layer.output])
for step,batch in enumerate(self.training_generator):
batch = tuple(t for t in batch)
train_img = batch[0]
train_label = batch[1]
with tf.GradientTape() as gtape:
out, data = temp_model(train_img)
gtape.watch(data)
grads_1 = gtape.gradient(out[0], data)
with tf.GradientTape() as gtape:
out, data = temp_model(train_img)
gtape.watch(data)
grads_2 = gtape.gradient(out[1], data)
epoch_gradient_1.append(grads_1[:,:,:,3])
epoch_gradient_2.append(grads_2[:,:,:,3])
else:
pass
个人推荐使用eager模式。
References:
https://stackoverflow.com/questions/58322147/how-to-generate-cnn-heatmaps-using-built-in-keras-in-tf2-0-tf-keras
https://stackoverflow.com/questions/61568665/tf2-compute-gradients-in-keras-callback-in-non-eager-mode https://discuss.pytorch.org/t/generating-the-class-activation-maps/42887 https://www.tensorflow.org/api_docs/python/tf/GradientTape#gradient
|