I am new to ML and tf. This is my first time trying to do a project on it. I have trained a siamese model in tensorflow and saved it with .h5 format. Then, I have used “tensorflowjs_converter” to convert it into a javascript readible format, as following:enter image description here
group1-shardxxof38 are the BIN files and model is a json file. Then, I have tried to load the model into my react app with the following codes:
const tf = require('@tensorflow/tfjs-node');
const fs = require('fs');
// Define and register your custom layer
class L1Dist extends tf.layers.Layer {
constructor(config) {
super(config);
this.supportsMasking = true;
}
call(inputs) {
return tf.tidy(() => {
if (!Array.isArray(inputs) || inputs.length !== 2) {
throw new Error(`Expected inputs to be an array of two elements, but got: ${JSON.stringify(inputs)}`);
}
let inputEmbedding, validationEmbedding;
try {
inputEmbedding = inputs[0] instanceof tf.Tensor ? inputs[0] : tf.tensor(inputs[0]);
validationEmbedding = inputs[1] instanceof tf.Tensor ? inputs[1] : tf.tensor(inputs[1]);
} catch (error) {
console.error('Error creating tensors:', error);
console.error('Input shapes:',
inputs[0] instanceof tf.Tensor ? inputs[0].shape : 'Not a tensor',
inputs[1] instanceof tf.Tensor ? inputs[1].shape : 'Not a tensor'
);
throw error;
}
if (!inputEmbedding.shape.every((dim, i) => dim === validationEmbedding.shape[i])) {
throw new Error(`Input shapes do not match: ${inputEmbedding.shape} vs ${validationEmbedding.shape}`);
}
return tf.abs(tf.sub(inputEmbedding, validationEmbedding));
});
}
computeOutputShape(inputShape) {
return inputShape[0];
}
static get className() {
return 'L1Dist';
}
}
tf.serialization.registerClass(L1Dist);
async function loadModel(modelPath) {
try {
// First, try loading the model normally
const model = await tf.loadLayersModel(`file://${modelPath}`);
console.log('Model loaded successfully');
return model;
} catch (error) {
console.log('Error loading model:', error.message);
console.log('Attempting to load and fix model configuration...');
// Load the model JSON file
const modelJSON = JSON.parse(fs.readFileSync(modelPath, 'utf8'));
// Function to fix inboundNodes
function fixInboundNodes(obj) {
if (Array.isArray(obj)) {
return obj.map(fixInboundNodes);
} else if (typeof obj === 'object' && obj !== null) {
const newObj = {};
for (const [key, value] of Object.entries(obj)) {
if (key === 'inboundNodes') {
newObj[key] = value.map(node => {
if (Array.isArray(node)) {
return node;
} else if (typeof node === 'object') {
return [node.args, node.kwargs];
} else {
return [node];
}
});
} else {
newObj[key] = fixInboundNodes(value);
}
}
return newObj;
}
return obj;
}
// Fix the entire model topology
modelJSON.modelTopology = fixInboundNodes(modelJSON.modelTopology);
console.log('Modified model configuration:');
console.log(JSON.stringify(modelJSON.modelTopology, null, 2));
// Load the model with the modified configuration
try {
const model = await tf.loadLayersModel(tf.io.fromMemory(modelJSON));
console.log('Model loaded successfully with modified configuration');
return model;
} catch (loadError) {
console.error('Error loading modified model:', loadError);
throw loadError;
}
}
}
async function loadAndTestModel() {
const modelPath = 'C://Program_mine/Facial_recognition/facial-recognition-app/facial-recognition-backend/src/models/web_model/model.json';
try {
const siameseModel = await loadModel(modelPath);
// Test the model
const testInput1 = tf.randomNormal([1, 105, 105, 3]);
const testInput2 = tf.randomNormal([1, 105, 105, 3]);
const prediction = siameseModel.predict([testInput1, testInput2]);
console.log('Test prediction shape:', prediction.shape);
prediction.print();
tf.dispose([testInput1, testInput2, prediction]);
return siameseModel;
} catch (error) {
console.error('Error during model loading or analysis:', error);
if (error.stack) {
console.error('Stack trace:', error.stack);
}
throw error;
}
}
module.exports = { loadAndTestModel };
The function fixInboundNodes() tries to turn the object in model.json file into an array. Yet, an error message: Error loading modified model: ValueError: Corrupted configuration, expected array for nodeData: [object Object], is still be showed. I have checked the path to my model.json and the documentation of using the function “tensorflowjs_converter”.
As you can see in the above codes, I have trained my model in wsl2 environment through jupyterlab and I also converted in wsl2. Then, I copy the converted file web_model into windows environment. I don’t know if this relevant. Also, I have tried directly assigning weights in js by building the same model in it. Yet, the weights loaded are not correct. Therefore, the problem of loading my model from tensorflow to tensorjs still exists. 🙁
A_chen is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.