Issue With Keras Functional API and Keras Tuner

Hello,

I am facing an error when using the Keras tuner with the Keras Functionl API. Below is the full code:

import numpy as np
import pandas as pd
import tensorflow as tf

from sklearn.model_selection import train_test_split
from tensorflow.keras import layers
from tensorflow.keras.layers.experimental import preprocessing

import os
import matplotlib.pyplot as plt
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.layers.experimental import preprocessing
import seaborn as sns
import numpy as np

pip install -q -U keras-tuner

import keras_tuner as kt

df = pd.read_csv(“/content/credit-approval_csv.csv”, delimiter=‘,’)

train, test = train_test_split(df, test_size=0.2)
train, val = train_test_split(train, test_size=0.2)
print(len(train), ‘train examples’)
print(len(val), ‘validation examples’)
print(len(test), ‘test examples’)

def df_to_dataset(dataframe, shuffle=True, batch_size=32):
dataframe = df.copy()
labels = dataframe.pop(‘class’)
ds = tf.data.Dataset.from_tensor_slices((dict(dataframe), labels))
if shuffle:
ds = ds.shuffle(buffer_size=len(dataframe))
ds = ds.batch(batch_size)
ds = ds.prefetch(batch_size)
return ds

def get_normalization_layer(name, dataset):

Create a Normalization layer for our feature.

normalizer = preprocessing.Normalization(axis=None)

Prepare a Dataset that only yields our feature.

feature_ds = dataset.map(lambda x, y: x[name])

Learn the statistics of the data.

normalizer.adapt(feature_ds)

return normalizer

def get_category_encoding_layer(name, dataset, dtype, max_tokens=None):

Create a StringLookup layer which will turn strings into integer indices

if dtype == ‘string’:
index = preprocessing.StringLookup(max_tokens=max_tokens)
else:
index = preprocessing.IntegerLookup(max_tokens=max_tokens)

Prepare a Dataset that only yields our feature

feature_ds = dataset.map(lambda x, y: x[name])

Learn the set of possible values and assign them a fixed integer index.

index.adapt(feature_ds)

Create a Discretization for our integer indices.

encoder = preprocessing.CategoryEncoding(num_tokens=index.vocabulary_size())

Apply one-hot encoding to our indices. The lambda function captures the

layer so we can use them, or include them in the functional model later.

return lambda feature: encoder(index(feature))

batch_size = 256
train_ds = df_to_dataset(train, batch_size=batch_size)
val_ds = df_to_dataset(val, shuffle=False, batch_size=batch_size)
test_ds = df_to_dataset(test, shuffle=False, batch_size=batch_size)

all_inputs = []
encoded_features = []

Numeric features.

for header in [‘A2’, ‘A3’, ‘A8’, ‘A11’, ‘A14’, ‘A15’]:
numeric_col = tf.keras.Input(shape=(15), name=header)
normalization_layer = get_normalization_layer(header, train_ds)
encoded_numeric_col = normalization_layer(numeric_col)
all_inputs.append(numeric_col)
encoded_features.append(encoded_numeric_col)

Categorical features encoded as string.

categorical_cols = [‘A13’, ‘A12’, ‘A10’, ‘A9’,
‘A7’, ‘A6’, ‘A5’, ‘A4’, ‘A1’]
for header in categorical_cols:
categorical_col = tf.keras.Input(shape=(15), name=header, dtype=‘string’)
encoding_layer = get_category_encoding_layer(header, train_ds, dtype=‘string’,
max_tokens=5)
encoded_categorical_col = encoding_layer(categorical_col)
all_inputs.append(categorical_col)
encoded_features.append(encoded_categorical_col)

def build_model(hp):
hp_units = hp.Int(‘units’, min_value=1, max_value=1512, step=32)
all_features = tf.keras.layers.concatenate(encoded_features)
dense = layers.Dense(units=hp_units, activation=“relu”)
x = dense(all_features)
x = layers.Dense(units=hp_units, activation=“relu”)(x)
x = layers.Dense(units=hp_units, activation=“relu”)(x)
x = layers.Dense(units=hp_units, activation=“relu”)(x)
x = layers.Dropout(rate=0.5)(x)
outputs = layers.Dense(units=hp_units)(x)

model = tf.keras.Model(all_inputs, outputs)

hp_learning_rate = hp.Choice(‘learning_rate’, values=[1e-2, 1e-3, 1e-4])

optimizer = hp.Choice(“optimizer”, [“adam”, “sgd”, “RMSprop”])
loss = hp.Choice(“loss”, [“BinaryCrossentropy”, “CategoricalCrossentropy”, “SparseCategoricalCrossentropy”])

model.compile(optimizer,
loss,
metrics=[‘accuracy’])

return model

tuner = kt.Hyperband(build_model,
objective=‘val_accuracy’,
max_epochs=10,
factor=3,
hyperband_iterations=1,
directory=‘my_dir’,
project_name=‘intro_to_kt’,
overwrite=True)

tuner.search(train_ds, epochs=50, validation_data=val_ds)`

After this I run into the below error

`Epoch 1/2
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.float32, name=‘A2’), name=‘A2’, description=“created by layer ‘A2’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.float32, name=‘A3’), name=‘A3’, description=“created by layer ‘A3’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.float32, name=‘A8’), name=‘A8’, description=“created by layer ‘A8’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.float32, name=‘A11’), name=‘A11’, description=“created by layer ‘A11’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.float32, name=‘A14’), name=‘A14’, description=“created by layer ‘A14’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.float32, name=‘A15’), name=‘A15’, description=“created by layer ‘A15’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A13’), name=‘A13’, description=“created by layer ‘A13’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A12’), name=‘A12’, description=“created by layer ‘A12’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A10’), name=‘A10’, description=“created by layer ‘A10’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A9’), name=‘A9’, description=“created by layer ‘A9’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A7’), name=‘A7’, description=“created by layer ‘A7’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A6’), name=‘A6’, description=“created by layer ‘A6’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A5’), name=‘A5’, description=“created by layer ‘A5’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A4’), name=‘A4’, description=“created by layer ‘A4’”), but it was called on an input with incompatible shape (None, 1).
WARNING:tensorflow:Model was constructed with shape (None, 15) for input KerasTensor(type_spec=TensorSpec(shape=(None, 15), dtype=tf.string, name=‘A1’), name=‘A1’, description=“created by layer ‘A1’”), but it was called on an input with incompatible shape (None, 1).

ValueError Traceback (most recent call last)
in ()
----> 1 tuner.search(train_ds, epochs=50, validation_data=val_ds)

13 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
984 except Exception as e: # pylint:disable=broad-except
985 if hasattr(e, “ag_error_metadata”):
→ 986 raise e.ag_error_metadata.to_exception(e)
987 else:
988 raise

ValueError: in user code:

/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:855 train_function  *
    return step_function(self, iterator)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:845 step_function  **
    outputs = model.distribute_strategy.run(run_step, args=(data,))
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
    return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
    return self._call_for_each_replica(fn, args, kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
    return fn(*args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:838 run_step  **
    outputs = model.train_step(data)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:795 train_step
    y_pred = self(x, training=True)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/base_layer.py:1030 __call__
    outputs = call_fn(inputs, *args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/functional.py:421 call
    inputs, training=training, mask=mask)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/functional.py:556 _run_internal_graph
    outputs = node.layer(*args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/base_layer.py:1013 __call__
    input_spec.assert_input_compatibility(self.input_spec, inputs, self.name)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/input_spec.py:255 assert_input_compatibility
    ' but received input with shape ' + display_shape(x.shape))

ValueError: Input 0 of layer dense is incompatible with the layer: expected axis -1 of input shape to have value 131 but received input with shape (None, 47)`

Hi,

If you remove the Keras Tuner code, does your model train with without issues? The error seems to be pointing to a discrepancy between your input layer and your data format.

Hi, Thank you for the prompt response. I can try but I need the tuner to select the best model parameters.

Still, let me select some parameters myself and try running the model without the tuner code.

I know, but you usually use the tuner after your model architecture is set and working. After you are sure that everything connects properly, then you try hyperparameter.

Acknowledged. Thank you for the response again. Let me try this and get back to you.

1 Like