<code>tfhub_preprocess = 'https://tfhub.dev/tensorflow/bert_en_uncased_preprocess/3'
tfhub_encoder = 'https://tfhub.dev/tensorflow/small_bert/bert_en_uncased_L-2_H-128_A-2/2'
</code>
<code>tfhub_preprocess = 'https://tfhub.dev/tensorflow/bert_en_uncased_preprocess/3'
tfhub_encoder = 'https://tfhub.dev/tensorflow/small_bert/bert_en_uncased_L-2_H-128_A-2/2'
</code>
tfhub_preprocess = 'https://tfhub.dev/tensorflow/bert_en_uncased_preprocess/3'
tfhub_encoder = 'https://tfhub.dev/tensorflow/small_bert/bert_en_uncased_L-2_H-128_A-2/2'
<code>def build_smallBERT_CNN_classifier_model():
text_input = tf.keras.layers.Input(shape=(), dtype=tf.string, name='text')
preprocessing = hub.KerasLayer(tfhub_preprocess, trainable=True, name='preprocessing')
encoder_inputs = preprocessing(text_input)
encoder = hub.KerasLayer(tfhub_encoder, trainable=True, name='BERT_encoder')
outputs = encoder(encoder_inputs)
net = sequence_output = outputs["sequence_output"]
net = tf.keras.layers.Dense(64, activation="relu")(net)
net = tf.keras.layers.Dropout(0.1)(net)
net = tf.keras.layers.Dense(num_classes, activation="softmax", name='classifier')(net)
return tf.keras.Model(text_input, net)
</code>
<code>def build_smallBERT_CNN_classifier_model():
text_input = tf.keras.layers.Input(shape=(), dtype=tf.string, name='text')
preprocessing = hub.KerasLayer(tfhub_preprocess, trainable=True, name='preprocessing')
encoder_inputs = preprocessing(text_input)
encoder = hub.KerasLayer(tfhub_encoder, trainable=True, name='BERT_encoder')
outputs = encoder(encoder_inputs)
net = sequence_output = outputs["sequence_output"]
net = tf.keras.layers.Dense(64, activation="relu")(net)
net = tf.keras.layers.Dropout(0.1)(net)
net = tf.keras.layers.Dense(num_classes, activation="softmax", name='classifier')(net)
return tf.keras.Model(text_input, net)
</code>
def build_smallBERT_CNN_classifier_model():
text_input = tf.keras.layers.Input(shape=(), dtype=tf.string, name='text')
preprocessing = hub.KerasLayer(tfhub_preprocess, trainable=True, name='preprocessing')
encoder_inputs = preprocessing(text_input)
encoder = hub.KerasLayer(tfhub_encoder, trainable=True, name='BERT_encoder')
outputs = encoder(encoder_inputs)
net = sequence_output = outputs["sequence_output"]
net = tf.keras.layers.Dense(64, activation="relu")(net)
net = tf.keras.layers.Dropout(0.1)(net)
net = tf.keras.layers.Dense(num_classes, activation="softmax", name='classifier')(net)
return tf.keras.Model(text_input, net)
<code>intent_classifier_model = build_smallBERT_CNN_classifier_model()
</code>
<code>intent_classifier_model = build_smallBERT_CNN_classifier_model()
</code>
intent_classifier_model = build_smallBERT_CNN_classifier_model()
while running the above code I am facing the error:
<code>---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[27], line 1
----> 1 intent_classifier_model = build_smallBERT_CNN_classifier_model()
Cell In[26], line 4, in build_smallBERT_CNN_classifier_model()
2 text_input = tf.keras.layers.Input(shape=(), dtype=tf.string, name='text')
3 preprocessing = hub.KerasLayer(tfhub_preprocess, trainable=True, name='preprocessing')
----> 4 encoder_inputs = preprocessing(text_input)
5 encoder = hub.KerasLayer(tfhub_encoder, trainable=True, name='BERT_encoder')
6 outputs = encoder(encoder_inputs)
File /opt/conda/lib/python3.10/site-packages/tf_keras/src/utils/traceback_utils.py:70, in filter_traceback.<locals>.error_handler(*args, **kwargs)
67 filtered_tb = _process_traceback_frames(e.__traceback__)
68 # To get the full stack trace, call:
69 # `tf.debugging.disable_traceback_filtering()`
---> 70 raise e.with_traceback(filtered_tb) from None
71 finally:
72 del filtered_tb
File /opt/conda/lib/python3.10/site-packages/tensorflow_hub/keras_layer.py:250, in KerasLayer.call(self, inputs, training)
247 else:
248 # Behave like BatchNormalization. (Dropout is different, b/181839368.)
249 training = False
--> 250 result = smart_cond.smart_cond(training,
251 lambda: f(training=True),
252 lambda: f(training=False))
254 # Unwrap dicts returned by signatures.
255 if self._output_key:
File /opt/conda/lib/python3.10/site-packages/tensorflow_hub/keras_layer.py:252, in KerasLayer.call.<locals>.<lambda>()
247 else:
248 # Behave like BatchNormalization. (Dropout is different, b/181839368.)
249 training = False
250 result = smart_cond.smart_cond(training,
251 lambda: f(training=True),
--> 252 lambda: f(training=False))
254 # Unwrap dicts returned by signatures.
255 if self._output_key:
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py:583, in canonicalize_to_monomorphic(args, kwargs, default_values, capture_types, polymorphic_type)
577 parameters.append(
578 _make_validated_mono_param(kwarg_name, arg[kwarg_name],
579 Parameter.KEYWORD_ONLY, type_context,
580 poly_parameter.type_constraint))
581 else:
582 parameters.append(
--> 583 _make_validated_mono_param(name, arg, poly_parameter.kind,
584 type_context,
585 poly_parameter.type_constraint))
587 return FunctionType(parameters, capture_types), type_context
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py:522, in _make_validated_mono_param(name, value, kind, type_context, poly_type)
518 def _make_validated_mono_param(
519 name, value, kind, type_context, poly_type
520 ) -> Parameter:
521 """Generates and validates a parameter for Monomorphic FunctionType."""
--> 522 mono_type = trace_type.from_value(value, type_context)
524 if poly_type and not mono_type.is_subtype_of(poly_type):
525 raise TypeError(f"Parameter `{name}` was expected to be of type "
526 f"{poly_type} but is {mono_type}")
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/trace_type/trace_type_builder.py:185, in from_value(value, context)
178 return default_types.Attrs.from_type_and_attributes(
179 type(value),
180 tuple(
181 from_value(getattr(value, a.name), context)
182 for a in value.__attrs_attrs__))
184 if util.is_np_ndarray(value):
--> 185 ndarray = value.__array__()
186 return default_types.TENSOR(ndarray.shape, ndarray.dtype)
188 if isinstance(value, custom_nest_protocol.CustomNestProtocol):
File /opt/conda/lib/python3.10/site-packages/keras/src/backend/common/keras_tensor.py:61, in KerasTensor.__array__(self)
60 def __array__(self):
---> 61 raise ValueError(
62 "A KerasTensor is symbolic: it's a placeholder for a shape "
63 "an a dtype. It doesn't have any actual numerical value. "
64 "You cannot convert it to a NumPy array."
65 )
ValueError: Exception encountered when calling layer 'preprocessing' (type KerasLayer).
A KerasTensor is symbolic: it's a placeholder for a shape an a dtype. It doesn't have any actual numerical value. You cannot convert it to a NumPy array.
Call arguments received by layer 'preprocessing' (type KerasLayer):
• inputs=<KerasTensor shape=(None,), dtype=string, sparse=None, name=text>
</code>
<code>---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[27], line 1
----> 1 intent_classifier_model = build_smallBERT_CNN_classifier_model()
Cell In[26], line 4, in build_smallBERT_CNN_classifier_model()
2 text_input = tf.keras.layers.Input(shape=(), dtype=tf.string, name='text')
3 preprocessing = hub.KerasLayer(tfhub_preprocess, trainable=True, name='preprocessing')
----> 4 encoder_inputs = preprocessing(text_input)
5 encoder = hub.KerasLayer(tfhub_encoder, trainable=True, name='BERT_encoder')
6 outputs = encoder(encoder_inputs)
File /opt/conda/lib/python3.10/site-packages/tf_keras/src/utils/traceback_utils.py:70, in filter_traceback.<locals>.error_handler(*args, **kwargs)
67 filtered_tb = _process_traceback_frames(e.__traceback__)
68 # To get the full stack trace, call:
69 # `tf.debugging.disable_traceback_filtering()`
---> 70 raise e.with_traceback(filtered_tb) from None
71 finally:
72 del filtered_tb
File /opt/conda/lib/python3.10/site-packages/tensorflow_hub/keras_layer.py:250, in KerasLayer.call(self, inputs, training)
247 else:
248 # Behave like BatchNormalization. (Dropout is different, b/181839368.)
249 training = False
--> 250 result = smart_cond.smart_cond(training,
251 lambda: f(training=True),
252 lambda: f(training=False))
254 # Unwrap dicts returned by signatures.
255 if self._output_key:
File /opt/conda/lib/python3.10/site-packages/tensorflow_hub/keras_layer.py:252, in KerasLayer.call.<locals>.<lambda>()
247 else:
248 # Behave like BatchNormalization. (Dropout is different, b/181839368.)
249 training = False
250 result = smart_cond.smart_cond(training,
251 lambda: f(training=True),
--> 252 lambda: f(training=False))
254 # Unwrap dicts returned by signatures.
255 if self._output_key:
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py:583, in canonicalize_to_monomorphic(args, kwargs, default_values, capture_types, polymorphic_type)
577 parameters.append(
578 _make_validated_mono_param(kwarg_name, arg[kwarg_name],
579 Parameter.KEYWORD_ONLY, type_context,
580 poly_parameter.type_constraint))
581 else:
582 parameters.append(
--> 583 _make_validated_mono_param(name, arg, poly_parameter.kind,
584 type_context,
585 poly_parameter.type_constraint))
587 return FunctionType(parameters, capture_types), type_context
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py:522, in _make_validated_mono_param(name, value, kind, type_context, poly_type)
518 def _make_validated_mono_param(
519 name, value, kind, type_context, poly_type
520 ) -> Parameter:
521 """Generates and validates a parameter for Monomorphic FunctionType."""
--> 522 mono_type = trace_type.from_value(value, type_context)
524 if poly_type and not mono_type.is_subtype_of(poly_type):
525 raise TypeError(f"Parameter `{name}` was expected to be of type "
526 f"{poly_type} but is {mono_type}")
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/trace_type/trace_type_builder.py:185, in from_value(value, context)
178 return default_types.Attrs.from_type_and_attributes(
179 type(value),
180 tuple(
181 from_value(getattr(value, a.name), context)
182 for a in value.__attrs_attrs__))
184 if util.is_np_ndarray(value):
--> 185 ndarray = value.__array__()
186 return default_types.TENSOR(ndarray.shape, ndarray.dtype)
188 if isinstance(value, custom_nest_protocol.CustomNestProtocol):
File /opt/conda/lib/python3.10/site-packages/keras/src/backend/common/keras_tensor.py:61, in KerasTensor.__array__(self)
60 def __array__(self):
---> 61 raise ValueError(
62 "A KerasTensor is symbolic: it's a placeholder for a shape "
63 "an a dtype. It doesn't have any actual numerical value. "
64 "You cannot convert it to a NumPy array."
65 )
ValueError: Exception encountered when calling layer 'preprocessing' (type KerasLayer).
A KerasTensor is symbolic: it's a placeholder for a shape an a dtype. It doesn't have any actual numerical value. You cannot convert it to a NumPy array.
Call arguments received by layer 'preprocessing' (type KerasLayer):
• inputs=<KerasTensor shape=(None,), dtype=string, sparse=None, name=text>
</code>
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[27], line 1
----> 1 intent_classifier_model = build_smallBERT_CNN_classifier_model()
Cell In[26], line 4, in build_smallBERT_CNN_classifier_model()
2 text_input = tf.keras.layers.Input(shape=(), dtype=tf.string, name='text')
3 preprocessing = hub.KerasLayer(tfhub_preprocess, trainable=True, name='preprocessing')
----> 4 encoder_inputs = preprocessing(text_input)
5 encoder = hub.KerasLayer(tfhub_encoder, trainable=True, name='BERT_encoder')
6 outputs = encoder(encoder_inputs)
File /opt/conda/lib/python3.10/site-packages/tf_keras/src/utils/traceback_utils.py:70, in filter_traceback.<locals>.error_handler(*args, **kwargs)
67 filtered_tb = _process_traceback_frames(e.__traceback__)
68 # To get the full stack trace, call:
69 # `tf.debugging.disable_traceback_filtering()`
---> 70 raise e.with_traceback(filtered_tb) from None
71 finally:
72 del filtered_tb
File /opt/conda/lib/python3.10/site-packages/tensorflow_hub/keras_layer.py:250, in KerasLayer.call(self, inputs, training)
247 else:
248 # Behave like BatchNormalization. (Dropout is different, b/181839368.)
249 training = False
--> 250 result = smart_cond.smart_cond(training,
251 lambda: f(training=True),
252 lambda: f(training=False))
254 # Unwrap dicts returned by signatures.
255 if self._output_key:
File /opt/conda/lib/python3.10/site-packages/tensorflow_hub/keras_layer.py:252, in KerasLayer.call.<locals>.<lambda>()
247 else:
248 # Behave like BatchNormalization. (Dropout is different, b/181839368.)
249 training = False
250 result = smart_cond.smart_cond(training,
251 lambda: f(training=True),
--> 252 lambda: f(training=False))
254 # Unwrap dicts returned by signatures.
255 if self._output_key:
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py:583, in canonicalize_to_monomorphic(args, kwargs, default_values, capture_types, polymorphic_type)
577 parameters.append(
578 _make_validated_mono_param(kwarg_name, arg[kwarg_name],
579 Parameter.KEYWORD_ONLY, type_context,
580 poly_parameter.type_constraint))
581 else:
582 parameters.append(
--> 583 _make_validated_mono_param(name, arg, poly_parameter.kind,
584 type_context,
585 poly_parameter.type_constraint))
587 return FunctionType(parameters, capture_types), type_context
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/polymorphism/function_type.py:522, in _make_validated_mono_param(name, value, kind, type_context, poly_type)
518 def _make_validated_mono_param(
519 name, value, kind, type_context, poly_type
520 ) -> Parameter:
521 """Generates and validates a parameter for Monomorphic FunctionType."""
--> 522 mono_type = trace_type.from_value(value, type_context)
524 if poly_type and not mono_type.is_subtype_of(poly_type):
525 raise TypeError(f"Parameter `{name}` was expected to be of type "
526 f"{poly_type} but is {mono_type}")
File /opt/conda/lib/python3.10/site-packages/tensorflow/core/function/trace_type/trace_type_builder.py:185, in from_value(value, context)
178 return default_types.Attrs.from_type_and_attributes(
179 type(value),
180 tuple(
181 from_value(getattr(value, a.name), context)
182 for a in value.__attrs_attrs__))
184 if util.is_np_ndarray(value):
--> 185 ndarray = value.__array__()
186 return default_types.TENSOR(ndarray.shape, ndarray.dtype)
188 if isinstance(value, custom_nest_protocol.CustomNestProtocol):
File /opt/conda/lib/python3.10/site-packages/keras/src/backend/common/keras_tensor.py:61, in KerasTensor.__array__(self)
60 def __array__(self):
---> 61 raise ValueError(
62 "A KerasTensor is symbolic: it's a placeholder for a shape "
63 "an a dtype. It doesn't have any actual numerical value. "
64 "You cannot convert it to a NumPy array."
65 )
ValueError: Exception encountered when calling layer 'preprocessing' (type KerasLayer).
A KerasTensor is symbolic: it's a placeholder for a shape an a dtype. It doesn't have any actual numerical value. You cannot convert it to a NumPy array.
Call arguments received by layer 'preprocessing' (type KerasLayer):
• inputs=<KerasTensor shape=(None,), dtype=string, sparse=None, name=text>
I am looking forward for the solution or the explanation, why this error is occuring?
New contributor
Arpit Dhiman is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.