Sorry for the long response time, I was busy with other stuff and couldn’t work on this project. My code minimized that produces the same result above on my computer even after python reinstall:
from datetime import datetime as dt
import tensorflow as tf
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, Input, BatchNormalization
from tensorflow.keras import callbacks
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.preprocessing.image import ImageDataGenerator
name = "test"
batch_size = 32
name = f"{name}-{dt.now():%d_%H_%M}"
img_inputs = Input(shape=(256, 256, 3))
x = Conv2D(32, (3, 3), padding="same", activation='relu')(img_inputs)
x = Conv2D(32, (3, 3), padding="same", activation='relu')(x)
x = BatchNormalization()(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = MaxPooling2D(pool_size=(2, 2))(x) #these extra are here so I don't have 18mil perams is the smaller model
x = Flatten()(x)
x = BatchNormalization()(x)
x = Dropout(0.5)(x)
x = Dense(units=32, activation='relu')(x)
x = Dense(units=32, activation='relu')(x)
x = Dropout(0.2)(x)
x = BatchNormalization()(x)
output = Dense(units=1, activation='sigmoid')(x)
model = Model(inputs=img_inputs, outputs=output, name=name)
model.compile(optimizer=Adam(), loss='binary_crossentropy', metrics=['accuracy'])
train_datagen = ImageDataGenerator(
rescale = 1 / 255,
validation_split=0.2,
)
training_set = train_datagen.flow_from_directory(
'data',
target_size=(256, 256),
batch_size=batch_size,
class_mode='binary',
subset='training')
testing_set = train_datagen.flow_from_directory(
'data',
target_size=(256, 256),
batch_size=batch_size,
class_mode='binary',
subset='validation')
mc = callbacks.ModelCheckpoint(f"checkpoint/{name}.h5", monitor='val_loss', save_best_only=True, save_freq='epoch')
model.fit(
training_set,
steps_per_epoch = training_set.samples // batch_size,
epochs=200,
callbacks=[mc],
batch_size=batch_size,
validation_data = testing_set,
validation_steps = testing_set.samples // batch_size,
)
model.save("model.h5")