Im trying to use 2 optimizers for different variables. I am using keras. The parts that are commented out is what I am trying to do, but with a modern tensorflow/keras version
# var_D = [v for v in tf.global_variables() if v.name.startswith('d')]
# var_G = [v for v in tf.global_variables() if v.name.startswith('g') or v.name.starts_with('h')]
optimizer_D = optimizers.Adam(learning_rate=0.0004, beta_1=0.5,
beta_2=0.9) # .minimize(Loss_D, var_list=var_D)
optimizer_G = optimizers.Adam(learning_rate=0.0001, beta_1=0.5,
beta_2=0.9) # .minimize(Loss_G, var_list=var_G)
model = Model(inputs=[X, Y, MASK], outputs=result)
model.compile()