Is there a way to attach the USE as an embedding layer to a Recursive Neural Network (RNN) or a Bi-directional RNN? I got a value error trying to add it to a Sequential model. I am open to using the functional model construction syntax, e.g.
x = tf.keras.layers.Conv2D(filters, kernel_size, padding='same', data_format='channels_last')(input)
x = tf.keras.layers.LeakyReLU()(x)
I want to avoid using Lambda layers. I want to ensure I can save my model in the .h5
format, not just a TensorFlow-specific format.
This is a snippet of the code I tried.
sentence_encoder_layer = tf_hub.KerasLayer(
"https://tfhub.dev/google/universal-sentence-encoder/4",
input_shape=[],
dtype=tf.string,
trainable=False,
name="USE"
)
def define_rnn_with_universal_encoder():
sigmoid_layer = tf.keras.layers.Dense(
units = 5,
activation=tf.keras.activations.softmax,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
name='1_vs_5_sigmoid'
)
model = tf.keras.Sequential([
sentence_encoder_layer,
tf.keras.layers.Bidirectional(
layer=tf.keras.layers.LSTM(128, return_sequences=True)
),
tf.keras.layers.Bidirectional(
layer=tf.keras.layers.GRU(128, return_sequences=False)
),
tf.keras.layers.Flatten(name='flatten'),
tf.keras.layers.Dropout(rate=0.1),
tf.keras.layers.Dense(
units=128,
activation=tf.keras.activations.relu,
name='Dense_1_128'),
tf.keras.layers.Dense(
units=64,
activation=tf.keras.activations.relu,
name='Dense_2_64'),
sigmoid_layer,
], name='model')
optimizer_adam=tf.keras.optimizers.Adam(
learning_rate=0.001,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-07,
amsgrad=False,
name='Adam'
)
model.compile(
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
optimizer = optimizer_adam,
# optimizer = optimizer_sgd,
metrics = [tf.keras.metrics.SparseCategoricalAccuracy()]
# metrics = ['categorical_accuracy']
)
print('Summary:n', model.summary())
return model
This is the error I got:
ValueError: Only instances of keras.Layer
can be added to a Sequential model. Received: <tensorflow_hub.keras_layer.KerasLayer object at 0x7f62bb2c0310> (of type <class ‘tensorflow_hub.keras_layer.KerasLayer’>)