Hello everyone I have very limited knowledge regarding training a model and let alone pruning it I don’t know if I am doing it right or wrong.
After pruning the model and validating the pruned model it just gets very high loss base model is ResNet152V2 with 98% accuracy and around 10% loss. Please guide me I feel very overwhelmed its been couple of sleepless nights with this project.
Here’s the code I trained my model with
base_model = ResNet152V2(input_shape=(256,256,3), include_top = False)
for layer in base_model.layers:
layer.trainable =False
X = Flatten()(base_model.output)
X= Dense(units=5, activation = ‘softmax’)(X)
#Final Model
model = Model(base_model.input, X)
#compile the model
model.compile(optimizer = ‘adam’, loss = keras.losses.binary_crossentropy, metrics = [‘accuracy’])
model.summary()
train_datagen = ImageDataGenerator(featurewise_center= True,
preprocessing_function = preprocess_input)
train_data = train_datagen.flow_from_directory(directory = “/kaggle/input/5class-weather/data”,
target_size = (256, 256),
batch_size = 64)
from keras.callbacks import ModelCheckpoint, EarlyStopping
mc = ModelCheckpoint(filepath = “./kaggle/working/best_model.h5”,
monitor = “accuracy”,
verbose = 1,
save_best_only = True)
es = EarlyStopping(monitor = “accuracy”,
min_delta = 0.01,
patience = 10,
verbose = 1)
cb = [mc, es]
his = model.fit_generator(train_data,
steps_per_epoch =50,
epochs= 10,
callbacks = cb)
here’s the code for Pruning the model and validating it
import tensorflow_model_optimization as tfmot
import numpy as np
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.applications.resnet_v2 import preprocess_input
from keras.callbacks import ModelCheckpoint, EarlyStopping
train_datagen = ImageDataGenerator(featurewise_center= True,
preprocessing_function = preprocess_input)
train_data = train_datagen.flow_from_directory(directory = “/kaggle/input/5class-weather/data”,
target_size = (256, 256),
batch_size = 64)
Load the entire model (architecture + weights)
model_for_pruning = tf.keras.models.load_model(‘/kaggle/input/best-model/96.h5’)
Compile the pruned model
model_for_pruning.compile(
optimizer=‘adam’,
loss=tf.keras.losses.binary_crossentropy,
metrics=[‘accuracy’]
)
Compute end step to finish pruning after 2 epochs.
batch_size = 64
epochs = 2
prune_low_magnitude = tfmot.sparsity.keras.prune_low_magnitude
num_images = train_data.samples
end_step = np.ceil(num_images / batch_size).astype(np.int32) * epochs
Define pruning parameters
pruning_params = {
‘pruning_schedule’: tfmot.sparsity.keras.PolynomialDecay(
initial_sparsity=0.50,
final_sparsity=0.80,
begin_step=0,
end_step=end_step
)
}
Apply pruning to the model
model_for_pruning = prune_low_magnitude(model_for_pruning, **pruning_params)
prune_low_magnitude
requires a recompile.
model_for_pruning.compile(
optimizer=‘adam’,
loss=tf.keras.losses.binary_crossentropy,
metrics=[‘accuracy’]
)
model_for_pruning.summary()
import tempfile
Create a temporary directory for logs
logdir = tempfile.mkdtemp()
callbacks = [
tfmot.sparsity.keras.UpdatePruningStep(),
tfmot.sparsity.keras.PruningSummaries(log_dir=logdir),
]
Assuming you have train_images
and train_labels
defined elsewhere
model_for_pruning.fit(
train_data,
batch_size=batch_size,
epochs=epochs,
callbacks=callbacks
)
error that I get and check out my loss along with accuracy decreases
/opt/conda/lib/python3.10/site-packages/keras/preprocessing/image.py:1861: UserWarning: This ImageDataGenerator specifies featurewise_center
, but it hasn’t been fit on any training data. Fit it first by calling .fit(numpy_data)
.
warnings.warn(
Epoch 1/2
285/564 [==============>…] - ETA: 1:33:03 - loss: 1.3915 - accuracy: 0.8301