I having a two services producer and consumer.
Producer has big json file in the server. I want to serve over the network through rest api and I used nodejs stream technique to load bytes in memory very little overhead and passed to express response object. I am expecting this large json file
read as small bytes (i.e chunks) and sent it over the network.
Consumer hit that producer api to receive bytes by bytes (chunks) and get processed in nodejs stream.
I have written below code to achieve this functionality.
But when I printing console.log(“chunks received”). I am expected it should prints more than once to denote consumer receiving bytes by bytes (chunks). But in the console log its printed only once.
why this behaviour in nodejs stream. As per documents and references I found that my implementation is correct and I am running in local.
My question is why consumer doesn’t able to receive data as bytes.
// Producer.js
const fs = require('fs');
const path = require('path');
var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Express' });
});
router.get('/transactions', function(req, res, next) {
const stream = fs.createReadStream('transactions.json');
res.setHeader('Content-Type', 'application/json');
stream.on('data', (chunk) => {
// Send each chunk as it's read
res.write(chunk);
});
stream.on('end', () => {
// Signal the end of the response
res.end();
});
});
module.exports = router;
// Consumer.js
const { Readable } = require('stream');
const http = require('http');
// Custom Readable Stream class
class JSONResponseStream extends Readable {
constructor(url) {
super({ objectMode: true });
this.url = url;
}
_read() {
http.get(this.url, (res) => {
let data = '';
res.on('data', (chunk) => {
console.log('chunk received'); // It printed only once
data += chunk;
});
res.on('end', () => {
try {
const jsonData = JSON.parse(data);
console.log(typeof jsonData);
this.push(jsonData); // Push the JSON data into the stream
} catch (error) {
this.emit('error', error);
}
this.push(null); // No more data to read
});
}).on('error', (error) => {
this.emit('error', error);
});
}
}
module.exports = {
JSONResponseStream
}
// Using the custom stream
const jsonStream = new JSONResponseStream('https://api.example.com/data');
jsonStream.on('data', (data) => {
console.log('Received data:', data);
});
jsonStream.on('end', () => {
console.log('No more data.');
});
jsonStream.on('error', (error) => {
console.error('Error:', error);
});
My sample JSON file size is 64 K.B size.
I tried streaming in nodejs