Hi, thank you for this fascinating work and providing a demo of MLDG.
if not stop_gradient:
grad_weight = autograd.grad(meta_loss, weight, create_graph=True)[0]
if bias is not None:
grad_bias = autograd.grad(meta_loss, bias, create_graph=True)[0]
bias_adapt = bias - grad_bias * meta_step_size
else:
bias_adapt = bias
else:
grad_weight = Variable(autograd.grad(meta_loss, weight, create_graph=True)[0].data, requires_grad=False)
if bias is not None:
grad_bias = Variable(autograd.grad(meta_loss, bias, create_graph=True)[0].data, requires_grad=False)
bias_adapt = bias - grad_bias * meta_step_size
else:
bias_adapt = bias
return F.linear(inputs,
weight - grad_weight * meta_step_size,
bias_adapt)
else:
return F.linear(inputs, weight, bias)`