Hello, I’m working on malaria detection and diagnosis with TensorFlow and I’ve exposed to the value error problem stated in the link below. Even though I follow this [tutorial] and doing what instructor did, I found out that error. After starting to search for error and reading through the message, I couldn’t find anything and even though it says there is error, I couldn’t find any even according to error message. (https://www.youtube.com/watch?v=IA3WxTTPXqQ)
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import tensorflow_datasets as tfds
from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, InputLayer
from keras.optimizers import Adam
from keras.losses import BinaryCrossentropy
dataset, dataset_info = tfds.load('malaria',
with_info=True,
as_supervised=True,
shuffle_files=True,
split=['train'])
print(dataset)
print(dataset_info)
def splits(dataset, TRAIN_RATIO, VAL_RATIO, TEST_RATIO):
DATASET_SIZE = len(dataset)
train_dataset = dataset.take(int(TRAIN_RATIO * DATASET_SIZE))
val_test_dataset = dataset.skip(int(TRAIN_RATIO * DATASET_SIZE))
val_dataset = val_test_dataset.take(int(VAL_RATIO * DATASET_SIZE))
test_dataset = val_test_dataset.skip(int(VAL_RATIO * DATASET_SIZE))
return train_dataset, val_dataset, test_dataset
TRAIN_RATIO = 0.6
VAL_RATIO = 0.2
TEST_RATIO = 0.2
#dataset = tf.data.Dataset.range(10)
train_dataset, val_dataset, test_dataset = splits(dataset[0],
TRAIN_RATIO,
VAL_RATIO,
TEST_RATIO)
print(list(train_dataset.take(1).as_numpy_iterator()),
list(val_dataset.take(1).as_numpy_iterator()),
list(test_dataset.take(1).as_numpy_iterator()))
for data in dataset[0].take(4):
print(data)
for i, (image, label) in enumerate(train_dataset.take(16)):
ax = plt.subplot(4, 4, i + 1)
plt.imshow(image)
plt.title(dataset_info.features['label'].int2str(label))
plt.axis('off')
plt.show()
print(dataset_info.features['label'].int2str(0))
IM_SIZE = 224
def resize_rescale(image, label):
return tf.image.resize(image, (IM_SIZE, IM_SIZE))/255.0, label
train_dataset = train_dataset.map(resize_rescale)
for image, label in train_dataset.take(1):
print(image, label)
train_dataset = train_dataset.shuffle(buffer_size = 8,
reshuffle_each_iteration = True)\
.prefetch(tf.data.AUTOTUNE)
lenet_model = tf.keras.Sequential([
InputLayer(input_shape=(IM_SIZE, IM_SIZE, 3)),
Conv2D(filters = 6, kernel_size = 5, strides=1, padding='valid',
activation= 'sigmoid'),
MaxPool2D(pool_size=2, strides=2),
Conv2D(filters = 16, kernel_size = 5, strides=1, padding='valid',
activation= 'sigmoid'),
MaxPool2D(pool_size=2, strides=2),
Flatten(),
Dense(100, activation= "sigmoid"),
Dense(10, activation= "sigmoid"),
Dense(1, activation= "sigmoid"),
])
print(lenet_model.summary())
lenet_model.compile(optimizer=Adam(learning_rate=0.01), loss=BinaryCrossentropy())
history = lenet_model.fit(train_dataset, validation_data = val_dataset,
epochs = 100, verbose = 1)