Ask for help: "None" value always appears when using tf.GradientTape

Ask for help

Problem description: A three-input and two-output neural network model was established, named “neural_net”, where [x, y, t] is the input and [psi, p] is the output. “None” value is displayed when using automatic differentiation function, don’t know what the problem is? Hope to get help and the code is as follows.

def net_NS(self, x, y, t):
    lambda_1 = self.lambda_1
    lambda_2 = self.lambda_2
    with tf.GradientTape(persistent=True) as tape3:
        tape3.watch([x, y])
        with tf.GradientTape(persistent=True) as tape2:
            tape2.watch([x, y, t])
            with tf.GradientTape(persistent=True) as tape1:
                tape1.watch([x, y])
                psi_and_p = self.neural_net(tf.concat([x, y, t], 1), self.weights, self.biases)
                psi = psi_and_p[:, 0:1]
                p = psi_and_p[:, 1:2]
            u = tape1.gradient(psi, y)[0]
            v = -tape1.gradient(psi, x)[0]
            p_x = tape1.gradient(p, x)[0]
            p_y = tape1.gradient(p, y)[0]
        u_t = tape2.gradient(u, t)[0]
        u_x = tape2.gradient(u, x)[0]
        u_y = tape2.gradient(u, y)[0]
        v_t = tape2.gradient(v, t)[0]
        v_x = tape2.gradient(v, x)[0]
        v_y = tape2.gradient(v, y)[0]
    u_xx = tape3.gradient(u_x, x)[0]
    u_yy = tape3.gradient(u_y, y)[0]
    v_xx = tape3.gradient(v_x, x)[0]
    v_yy = tape3.gradient(v_y, y)[0]

    # 计算 f_u 和 f_v
    f_u = u_t + lambda_1 * (u * u_x + v * u_y) + p_x - lambda_2 * (u_xx + u_yy)
    f_v = v_t + lambda_1 * (u * v_x + v * v_y) + p_y - lambda_2 * (v_xx + v_yy)

    return u, v, p, f_u, f_v