can anyone explain me why sigma has shape 2 instead of one here?
tf.concat gives always error
def normal_sp(params):
return tfd.Normal(loc=params[:,0:1], scale=1e-3 + tf.math.softplus(0.005* params[:,1:2]))
class subq0(tf.keras.Model):
def __init__(self):
super().__init__()
self.mlp2 = tf.keras.Sequential()
self.mlp2.add(tf.keras.Input(shape=(1,)))
self.mlp2.add(tf.keras.layers.Dense(20))
self.mlp2.add(tf.keras.layers.Dense(20))
self.mlp2.add(tf.keras.layers.Dense(1))
self.distrlambda= tfp.layers.DistributionLambda(normal_sp, name='normal')
def call(self, inputs):
sigma= self.mlp2(inputs)
params_mc= tf.concat([sigma,1], axis=0)
dist_mc =self.distrlambda(params_mc)
return dist_mc
polynom = subq0()
optimizer = tf.optimizers.SGD(learning_rate=0.0001,momentum=0.9)
polynom.compile(optimizer=optimizer, loss= NLL )
polynom.build(input_shape=(1,))
tf.keras.utils.plot_model(polynom, "test.png", show_shapes=True)
history_polynom = polynom.fit(X_train, y_train , epochs= 100, verbose=0, batch_size=100 ,validation_data=(X_val,y_val) )
the error is
ValueError: Shape must be rank 2 but is rank 1 for ‘{{node concat}} = ConcatV2[N=2, T=DT_FLOAT, Tidx=DT_INT32](sequential_32/dense_74/BiasAdd, ones, concat/axis)’ with input shapes: [1,1], [1], .