GradientTape.gradient() returns `None` type

26 views Asked by At

following is the code I'm trying to implement for meta learning using maml algorithm on a specific dataset. Inner loop works well, don't know why in the outerloop gradients are None.

def maml(base_model, x_train, y_train, x_test, y_test, task_name, learning_rate = 0.001, epochs = 100, inner_iteration = 20):
  inner_optimizer = tf.keras.optimizers.legacy.Adam()
  outer_optimizer = tf.keras.optimizers.legacy.Adam()
  loss_fn = losses.MeanSquaredError()
  rmse_metric = metrics.RootMeanSquaredError()

  # outer loop
  for epoch in range(epochs):

    cloned_model1 = models.clone_model(base_model)
    cloned_model1.compile(loss=loss_fn, optimizer=inner_optimizer) 
  
    
    with tf.GradientTape() as test_tape:
      # inner loop  
      for _ in range(inner_iteration):
        with tf.GradientTape() as train_tape:
          support_predictions = cloned_model1(x_train)
          support_loss = loss_fn(y_train, support_predictions)
        gradients = train_tape.gradient(support_loss, cloned_model1.trainable_variables)
        inner_optimizer.apply_gradients(zip(gradients, cloned_model1.trainable_variables))

      query_predictions = cloned_model1(x_test)
      query_loss = loss_fn(y_test, query_predictions)
    gradients = test_tape.gradient(query_loss, base_model.trainable_variables)
    print(gradients)
    outer_optimizer.apply_gradients(zip(gradients, base_model.trainable_variables))
    

Output:

[None, None, None, None, None, None, None, None, None, None, None, None, None, None]

And when these gradients when passed for applying gradients using line outer_optimizer.apply_gradients(zip(gradients, base_model.trainable_variables)) I get error:

ValueError: No gradients provided for any variable

Please help me in solving this problem.

0

There are 0 answers