I am developing a CNN model using TensorFlow/Keras to predict the curvature of a road from traffic images. My dataset contains 5500 images, and the curvature values are continuous numbers ranging between -0.3 and 0.3. However, my model consistently outputs the same prediction for any input. I’ve experimented with altering the learning rate, changing the optimizer, and modifying dropout and augmentation layers, but none of these changes have resolved the issue.
Here is the architecture of my model:
from tensorflow.keras import layers, models
img_height, img_width = 480, 680 # Example dimensions
class ConditionalDataAugmentation(layers.Layer):
def __init__(self):
super(ConditionalDataAugmentation, self).__init__()
self.data_augmentation = tf.keras.Sequential([
layers.RandomZoom(height_factor=0.5, width_factor=0.5),
layers.RandomTranslation(height_factor=0.5, width_factor=0.5, fill_mode='reflect'),
layers.RandomBrightness(factor=0.5)
])
def call(self, inputs, training=None):
if training:
return self.data_augmentation(inputs)
return inputs
model = models.Sequential([
layers.Resizing(img_height, img_width),
ConditionalDataAugmentation(),
layers.Rescaling(1./255),
layers.Conv2D(16, 3, padding='same', activation='relu'),
layers.BatchNormalization(),
layers.MaxPooling2D(),
layers.Conv2D(32, 3, padding='same', activation='relu'),
layers.BatchNormalization(),
layers.MaxPooling2D(),
layers.Conv2D(64, 3, padding='same', activation='relu'),
layers.BatchNormalization(),
layers.MaxPooling2D(),
layers.Conv2D(128, 3, padding='same', activation='relu'),
layers.BatchNormalization(),
layers.MaxPooling2D(),
layers.Conv2D(256, 3, padding='same', activation='relu'),
layers.BatchNormalization(),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(512, activation='relu'),
layers.Dropout(0.5),
layers.Dense(1)
])
model.build((None, img_height, img_width, 3))
initial_learning_rate = 0.001 # Example learning rate
optimizer = keras.optimizers.Adam(learning_rate=initial_learning_rate)
model.compile(optimizer=optimizer,
loss='mae',
metrics=['mse', 'mae'])