Eureka after mixing Google, ChatGPT, Trial And Error like an alchemist.
The following code reads a CSV file with a header ‘PACK0, PACK1, …, PACK23, RESULT’, each row containing 192 binary (0/1) inputs that are supposed to be encoded as 24 8-bit ints (so 1,1,1,1,1,1,1,1 is encoded is 255) and one float output and unpacks the inputs in batches before training:
def unpackbits_tf(features, labels):
mask = tf.constant([128, 64, 32, 16, 8, 4, 2, 1], dtype=features.dtype)
expanded_features = tf.expand_dims(features, -1)
unpacked = tf.cast(tf.bitwise.bitwise_and(expanded_features, mask) > 0, tf.int32)
return tf.reshape(unpacked, [-1, features.shape[1] * 8]), labels
features = pd.read_csv('demo.csv', dtype = 'uint8', converters = {'RESULT': float})
labels = features.pop('RESULT')
x_train, x_test, y_train, y_test = train_test_split(features.to_numpy(), labels.to_numpy(), test_size=0.2)
BATCH_SIZE=128
with tf.device("CPU"):
train = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(4 * BATCH_SIZE)
validate = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(BATCH_SIZE)
train = train.map(unpackbits_tf)
validate = validate.map(unpackbits_tf)
model = tf.keras.Sequential([layers.Dense(192,activation="relu"),
layers.Dense(16,activation="relu"),
layers.Dense(16,activation="relu"),
layers.Dense(1,activation="sigmoid")])
model.compile(optimizer=tf.keras.optimizers.Adam(),loss = tf.keras.losses.MeanSquaredError())
callback = tf.keras.callbacks.EarlyStopping(monitor='val_loss', mode='auto', patience=10)
history = model.fit(train,
epochs=1000,
callbacks=[callback],
validation_data=validate)
Regards,
GW