I’m trying to submit to a Kaggle competition. And found a very interesting paper about CycleGANs. Now I’m trying to implement this using TensorFlow:
def build_cyclegan(
generator_g, generator_f, discriminator_x, discriminator_y, lambda_cycle=10
):
real_x = tf.keras.layers.Input(shape=(256, 256, 3), name="x_real")
real_y = tf.keras.layers.Input(shape=(256, 256, 3), name="y_real")
fake_y = generator_g(real_x)
cycled_x = generator_f(fake_y)
fake_x = generator_f(real_y)
cycled_y = generator_g(fake_x)
disc_real_x = discriminator_x(real_x)
disc_real_y = discriminator_y(real_y)
disc_fake_x = discriminator_x(fake_x)
disc_fake_y = discriminator_y(fake_y)
cycle_gan = tf.keras.Model(
inputs=[real_x, real_y],
outputs=[
disc_real_x,
disc_real_y,
cycled_x,
cycled_y,
disc_fake_x,
disc_fake_y,
],
)
cycle_loss_10 = partial(cycle_consistency_loss, LAMBDA=lambda_cycle)
cycle_gan.compile(
loss=[
addversarial_loss_discriminator,
addversarial_loss_discriminator,
addversarial_loss_generator,
addversarial_loss_generator,
cycle_loss_10,
cycle_loss_10,
],
optimizer=tf.keras.optimizers.Adam(2e-4, beta_1=0.5),
)
return cycle_gan
And I got a weird exception, which says that my model expects 2 inputs, but I pass only 1 input. This is strange, because I’m trying to pass 2 images into model image
Here how I’m getting data from tfrec
files:
import tensorflow as tf
def _parse_image_function(example_proto):
feature_description = {
"image": tf.io.FixedLenFeature([], tf.string),
}
parsed_features = tf.io.parse_single_example(example_proto, feature_description)
image = tf.io.decode_jpeg(parsed_features["image"], channels=3)
image = tf.reshape(image, (256, 256, 3))
return image
def create_dataset(filenames, repeat=True):
dataset = tf.data.TFRecordDataset(filenames)
dataset = dataset.map(_parse_image_function)
if repeat:
dataset = dataset.repeat()
return dataset
photo_filenames = tf.data.Dataset.list_files(photos_path + "/*.tfrec")
monet_filenames = tf.data.Dataset.list_files(monet_path + "/*.tfrec")
photo_dataset = create_dataset(photo_filenames)
monet_dataset = create_dataset(monet_filenames)
combined_dataset = tf.data.Dataset.zip((photo_dataset, monet_dataset))
combined_dataset = combined_dataset.shuffle(
buffer_size=1000
)
batch_size = 1
dataset = combined_dataset.batch(batch_size).prefetch(tf.data.AUTOTUNE)
This exception appears when I’m trying to fit the model with created dataset:
cycle_gan.fit(dataset, epochs=1)
And the exception says:
ValueError Traceback (most recent call last)
Cell In[95], line 1
----> 1 cycle_gan.fit(dataset, epochs=1)
File ~/coding/python/kaggle/venv-kaggle/lib64/python3.10/site-packages/keras/src/utils/traceback_utils.py:122, in filter_traceback.<locals>.error_handler(*args, **kwargs)
119 filtered_tb = _process_traceback_frames(e.__traceback__)
120 # To get the full stack trace, call:
121 # `keras.config.disable_traceback_filtering()`
--> 122 raise e.with_traceback(filtered_tb) from None
123 finally:
124 del filtered_tb
File ~/coding/python/kaggle/venv-kaggle/lib64/python3.10/site-packages/keras/src/layers/input_spec.py:160, in assert_input_compatibility(input_spec, inputs, layer_name)
158 inputs = tree.flatten(inputs)
159 if len(inputs) != len(input_spec):
--> 160 raise ValueError(
161 f'Layer "{layer_name}" expects {len(input_spec)} input(s),'
162 f" but it received {len(inputs)} input tensors. "
163 f"Inputs received: {inputs}"
164 )
165 for input_index, (x, spec) in enumerate(zip(inputs, input_spec)):
166 if spec is None:
ValueError: Layer "functional_2" expects 2 input(s), but it received 1 input tensors. Inputs received: [<tf.Tensor 'data:0' shape=(None, 256, 256, 3) dtype=uint8>]
I don’t know what am I doing wrong. I thought that issue is because of using tf.data.Dataset.zip method, but everywhere on the internet I found this right, so I don’t even know what to look.