I am new to machine learning and I need to detect certain commands from users in my React Native application. For this purpose, I have prepared the following ML example, and the test results are positively meeting my needs (I am shortening the variants to avoid being too lengthy).
import tensorflow as tf
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# Expanded dataset
commands = [
"turn on camera",
"open history",
"other",
"math"
]
# Synonymous sentences
command_variants = {
"turn on camera": [
"activate camera", "I want to open the camera", "I want to take a picture",
],
"open history": [
"show history", "display previous records", "I want to look at past records",
],
"other": [
"give a command", "What is the square root of 16?", "How do you solve quadratic equations?",
],
"math": [
"What is 2 + 2?", "Calculate 5 - 3.", "Solve 6 * 7.", "What is 8 / 4?",
]
}
sentences = []
labels = []
for label, (command, variants) in enumerate(command_variants.items()):
sentences.append(command)
labels.append(label)
for variant in variants:
sentences.append(variant)
labels.append(label)
tokenizer = Tokenizer(num_words=5000, oov_token="<OOV>")
tokenizer.fit_on_texts(sentences)
tokenizer_config = {
"word_index": tokenizer.word_index,
"index_word": {v: k for k, v in tokenizer.word_index.items()},
"num_words": tokenizer.num_words,
"oov_token": tokenizer.oov_token
}
with open('m_exports/commands_tokenizer.json', 'w') as f:
json.dump(tokenizer_config, f)
sequences = tokenizer.texts_to_sequences(sentences)
max_len = 250
padded_sequences = pad_sequences(sequences, maxlen=max_len, padding='post')
label_counts = pd.Series(labels).value_counts()
print("Label Distribution:")
print(label_counts)
model = tf.keras.models.Sequential([
tf.keras.layers.Embedding(input_dim=5000, output_dim=128, input_length=max_len),
tf.keras.layers.Bidirectional(tf.keras.layers.SimpleRNN(128)),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(256, activation='relu'),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(len(command_variants), activation='softmax')
])
optimizer = Adam(learning_rate=0.001)
model.compile(optimizer=optimizer, loss='sparse_categorical_crossentropy', metrics=['accuracy'])
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.0001)
early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
history = model.fit(padded_sequences, np.array(labels), epochs=10, validation_split=0.2, callbacks=[reduce_lr, early_stopping])
plt.figure(figsize=(12, 6))
plt.subplot(1, 2, 1)
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model Loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend(['Train', 'Validation'])
plt.subplot(1, 2, 2)
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Model Accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend(['Train', 'Validation'])
plt.show()
print(f'Final Training Loss: {history.history["loss"][-1]}')
print(f'Final Training Accuracy: {history.history["accuracy"][-1]}')
print(f'Final Validation Loss: {history.history["val_loss"][-1]}')
print(f'Final Validation Accuracy: {history.history["val_accuracy"][-1]}')
test_sentences = [
"turn on camera",
"activate microphone",
"show history",
"give a command",
"stop camera",
"pause recording",
"resume recording",
"start new recording",
"play last recording"
]
test_sequences = tokenizer.texts_to_sequences(test_sentences)
padded_test_sequences = pad_sequences(test_sequences, maxlen=max_len, padding='post')
predictions = model.predict(padded_test_sequences)
for sentence, prediction in zip(test_sentences, predictions):
predicted_label = np.argmax(prediction)
predicted_probability = prediction[predicted_label]
print(f'Sentence: "{sentence}"')
print(f'Predicted label index: {predicted_label}')
print(f'Predicted label: {list(command_variants.keys())[predicted_label]}')
print(f'Probability: {predicted_probability}')
print(f'Raw probabilities: {prediction}')
To run this model in my React Native app, I am using the https://github.com/mrousavy/react-native-fast-tflite package. A TFLite file I prepared earlier worked, but the detection rate was very low.
To improve it, I received support from AI, the results improved, but I am encountering the following error when obtaining the TFLite output:
W0000 00:00:1721746792.488252 2555976 tf_tfl_flatbuffer_helpers.cc:392] Ignored output_format.
W0000 00:00:1721746792.488261 2555976 tf_tfl_flatbuffer_helpers.cc:395] Ignored drop_control_dependency.
2024-07-23 17:59:52.556361: W tensorflow/compiler/mlir/lite/flatbuffer_export.cc:3463] TFLite interpreter needs to link Flex delegate in order to run the model since it contains the following Select TFop(s):
Flex ops: FlexTensorListReserve, FlexTensorListSetItem, FlexTensorListStack
Details:
tf.TensorListReserve(tensor<2xi32>, tensor<i32>) -> (tensor<!tf_type.variant<tensor<?x128xf32>>>) : {device = ""}
tf.TensorListSetItem(tensor<!tf_type.variant<tensor<?x128xf32>>>, tensor<i32>, tensor<?x128xf32>) -> (tensor<!tf_type.variant<tensor<?x128xf32>>>) : {device = "", resize_if_index_out_of_bounds = false}
tf.TensorListStack(tensor<!tf_type.variant<tensor<?x128xf32>>>, tensor<2xi32>) -> (tensor<1x?x128xf32>) : {device = "", num_elements = 1 : i64}
See instructions: https://www.tensorflow.org/lite/guide/ops_select
I am using the following code to export the TFLite file:
import tensorflow as tf
# TensorFlow Lite modelini dönüştürme
converter = tf.lite.TFLiteConverter.from_keras_model(model)
converter.experimental_enable_resource_variables = True
converter.target_spec.supported_ops = [
tf.lite.OpsSet.TFLITE_BUILTINS,
tf.lite.OpsSet.SELECT_TF_OPS
]
converter._experimental_lower_tensor_list_ops = False
# Modeli dönüştür
tflite_model = converter.convert()
# TFLite modelini dosyaya kaydetme
with open('m_exports/commands_model.tflite', 'wb') as f:
f.write(tflite_model)
And also getting this error during the loading phase in my application:
Error: TFLite: Failed to allocate memory for input/output tensors! Status: unresolved-ops
What should i do to export this model as a TFLite file so that i can use it in my React Native application?
Thanks.