const fs = require(‘fs’);
// Importing strema APIs
const { Transform, pipeline } = require(‘stream’);
// Create a readable stream
const readableStream = fs.createReadStream(‘input.txt’);
// Create a writable stream
const writableStream = fs.createWriteStream(‘output.txt’);
// Set the encoding to be utf8.
readableStream.setEncoding(‘utf8’);
// Transform chunk into uppercase
const uppercaseWordProcessing = new Transform({
transform(chunk, encoding, callback) {
console.log(`Data to be transformed: ${chunk}`);
callback(null, chunk.toString().toUpperCase());
}
});
readableStream
.pipe(uppercaseWordProcessing)
.pipe(writableStream)
// Alternatively, we can use the pipeline API to easily pipe a series of streams
// together and get notified when the pipeline is fully completed.
pipeline(readableStream, uppercaseWordProcessing, writableStream, (error) => {
if (error) {
console.error(`Error occured while transforming stream: ${error}`);
} else {
console.log(‘Pipeline succeeded!’);
}
});
// Handle stream events
readableStream.on(‘end’, () => {
console.log(`Read Stream Ended!`);
writableStream.end();
})
readableStream.on(‘error’, (error) => {
console.error(`Read Stream Ended with an error: ${error}`);
})
writableStream.on(‘finish’, () => {
console.log(`Write Stream Finished!`);
})
writableStream.on(‘error’, (error) => {
console.error(`Write Stream error: ${error}`);
})
In Node Streams, chaining is a way to connect multiple stream operations together using method chaining. Chaining allows you to easily create a pipeline of stream operations that can be applied to a readable stream, transforming or processing the data as it flows through the pipeline.
To chain stream operations together, you simply call methods on a readable stream, which returns new stream objects that can be further manipulated or connected to other Node streams. The resulting stream operations are applied in sequentially as the data flows through the pipeline.
Here’s an example of using chaining to create a pipeline of stream operations: