I have the following code which runs ok when the dataset has 80 columns/features but if I implement it on a different feature size, it fails. For example, I received the following error when the csv file had 50 columns: Input 0 of layer “sequential_1” is incompatible with the layer: expected shape=(None, 50, 1), found shape=(None, 80, 1)
# cnn autoencoder architecture
no_of_features = train_X.shape[1]
class AE_L2(Model):
def __init__(self, latent_dim):
super(AE_L2, self).__init__()
self.latent_dim = latent_dim
self.encoder = tf.keras.Sequential(
[
InputLayer((no_of_features,1)),
Conv1D(16, 4, 2, 'same'), BatchNormalization(), LeakyReLU(),
Conv1D(32, 4, 2, 'same'), BatchNormalization(), LeakyReLU(),
Conv1D(64, 4, 2, 'same'), BatchNormalization(), LeakyReLU(),
Flatten(),
Dense(latent_dim)
]
)
self.decoder = tf.keras.Sequential(
[
Dense(640), Reshape((10,64)), BatchNormalization(), ReLU(),
Conv1DTranspose(32, 4, 2, 'same'), BatchNormalization(), ReLU(),
Conv1DTranspose(16, 4, 2, 'same'), BatchNormalization(), LeakyReLU(),
Conv1DTranspose(1, 4, 2, 'same', activation='sigmoid'),
]
)
def call(self, x):
encoded = self.encoder(x)
decoded = self.decoder(encoded)
return decoded
#######
adam = Adam(0.01)
reduce_lr = ReduceLROnPlateau()
early_stopping = EarlyStopping(patience=2)
model_checkpoint = ModelCheckpoint("./models_AE_L2/checkpoint", save_weights_only=True, save_best_only=True)
def mse(y_true, y_pred):
return tf.reduce_mean(tf.square(y_true-y_pred))
autoencoder = AE_L2(no_of_features)
autoencoder.compile(optimizer=adam, loss=mse)
autoencoder.build(input_shape=(None, no_of_features, 1))
autoencoder.summary(expand_nested=True)
Any help or explanation on why it always expects 80 features and how I could implement it to work with any number of features?
![Screenshot 2023-05-23 143432|541x500](upload://9Rs8cRJJSExnaqhY9PyM7aODgoj.png)
I have also attached part of the model summary here, how can I compute the parameter values please? I have tried using ((kernel_size * stride) +1 ) filter but end up with a different set of values.