I've got an error

from tensorflow.keras.applications.efficientnet import preprocess_input

Set your desired image dimensions

img_height, img_width = 224, 224

batch_size = 202

Update data pipeline

resized_train_ds = train_ds.map(lambda batch_size, y: (preprocess_input(tf.image.resize( batch_size, (img_height, img_width))), y))
resized_val_ds = val_ds.map(lambda batch_size, y: (preprocess_input(tf.image.resize( batch_size, (img_height, img_width))), y))

Get a batch of samples and check the shape

sample_batch = next(iter(resized_train_ds.batch(1)))
print(“Sample Input Shape:”, sample_batch[0].shape)
print(“Sample Label Shape:”, sample_batch[1].shape)

Define the paths

train_data_dir = pathlib.Path(‘/Users/brown62/.keras/datasets/CUB_200_2011/images’)
val_data_dir = pathlib.Path(‘/Users/brown62/.keras/datasets/CUB_200_2011/images’)

Set your desired image dimensions

img_height, img_width = 224, 224

Use ImageDataGenerator for loading and augmenting the data

train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True
)

val_datagen = ImageDataGenerator(rescale=1./255)

Load the training data

train_generator = train_datagen.flow_from_directory(
train_data_dir,
target_size=(img_height, img_width),
batch_size=32,
class_mode=‘categorical’ # Assuming you have categorical labels
)

Load the validation data

val_generator = val_datagen.flow_from_directory(
val_data_dir,
target_size=(img_height, img_width),
batch_size=32,
class_mode=‘categorical’
)

train_data = train_ds.batch(batch_size)
validation_data = val_ds.batch(batch_size)

Display the shape of the data

print(“Training data shape:”, train_generator.image_shape)
print(“Validation data shape:”, val_generator.image_shape)

I’ve got the error below:

Epoch 1/10

ValueError Traceback (most recent call last)
Cell In[91], line 2
1 epochs = 10
----> 2 history = model.fit(
3 resized_train_ds,
4 validation_data=resized_val_ds,
5 epochs=epochs
6 )

File /Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/utils/traceback_utils.py:70, in filter_traceback..error_handler(*args, **kwargs)
67 filtered_tb = _process_traceback_frames(e.traceback)
68 # To get the full stack trace, call:
69 # tf.debugging.disable_traceback_filtering()
—> 70 raise e.with_traceback(filtered_tb) from None
71 finally:
72 del filtered_tb

File /var/folders/5_/8vtxgjjd2sv9fnr9h0gx8x8w0000gs/T/autograph_generated_filejerj9ee4.py:15, in outer_factory..inner_factory..tf__train_function(iterator)
13 try:
14 do_return = True
—> 15 retval
= ag
_.converted_call(ag__.ld(step_function), (ag__.ld(self), ag__.ld(iterator)), None, fscope)
16 except:
17 do_return = False

ValueError: in user code:

File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/engine/training.py", line 1401, in train_function  *
    return step_function(self, iterator)
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/engine/training.py", line 1384, in step_function  **
    outputs = model.distribute_strategy.run(run_step, args=(data,))
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/engine/training.py", line 1373, in run_step  **
    outputs = model.train_step(data)
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/engine/training.py", line 1151, in train_step
    loss = self.compute_loss(x, y, y_pred, sample_weight)
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/engine/training.py", line 1209, in compute_loss
    return self.compiled_loss(
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/engine/compile_utils.py", line 277, in __call__
    loss_value = loss_obj(y_t, y_p, sample_weight=sw)
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/losses.py", line 143, in __call__
    losses = call_fn(y_true, y_pred)
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/losses.py", line 270, in call  **
    return ag_fn(y_true, y_pred, **self._fn_kwargs)
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/losses.py", line 2221, in categorical_crossentropy
    return backend.categorical_crossentropy(
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/keras/src/backend.py", line 5573, in categorical_crossentropy
    target.shape.assert_is_compatible_with(output.shape)

ValueError: Shapes (None, 1) and (None, 202) are incompatible

Hi @Simon_Brown_II, Could you please make sure that the number of neurons in the last layer is equal to the number of labels. Thank You.