For large files, reading the entire content into memory is inefficient. Streams process data in chunks, keeping memory usage low. fs.createReadStream() and fs.createWriteStream() return stream objects you can pipe together.
Node.js
Beginner
11 min read
Streams in the fs Module
Example
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
// Copy a large file using streams (memory-efficient):
function copyFile(src, dest) {
const readable = fs.createReadStream(src);
const writable = fs.createWriteStream(dest);
readable.pipe(writable);
writable.on('finish', () => console.log('Copy complete.'));
readable.on('error', (err) => console.error('Read error:', err));
writable.on('error', (err) => console.error('Write error:', err));
}
copyFile('big-file.txt', 'big-file-copy.txt');
// Compress a file on-the-fly with gzip:
function compressFile(src, dest) {
const readable = fs.createReadStream(src);
const gzip = zlib.createGzip();
const writable = fs.createWriteStream(dest);
readable.pipe(gzip).pipe(writable);
writable.on('finish', () => console.log('Compressed:', dest));
}
compressFile('big-file.txt', 'big-file.txt.gz');
// Process a CSV line-by-line using readline:
const readline = require('readline');
const rl = readline.createInterface({
input: fs.createReadStream('data.csv'),
});
rl.on('line', (line) => {
const columns = line.split(',');
console.log(columns[0]); // Print first column
});
rl.on('close', () => console.log('Done reading CSV.'));