The main module of the package implements the native Node.js transform stream which is both readable and writable.
This is the recommended approach if you need a maximum of power. It ensures
scalability by treating your data as a stream from the source to the destination.
The signature is const stream = stringify([options]).
The stream example write 2 records and register multiple events to read the generated CSV output and get notified when the serialisation is finished.
import{ stringify }from"csv-stringify";import assert from"node:assert";const data =[];// Initialize the stringifierconst stringifier =stringify({delimiter:":",});// Use the readable stream api to consume CSV data
stringifier.on("readable",function(){let row;while((row = stringifier.read())!==null){
data.push(row);}});// Catch any error
stringifier.on("error",function(err){
console.error(err.message);});// When finished, validate the CSV output with the expected value
stringifier.on("finish",function(){
assert.equal(
data.join(""),"root:x:0:0:root:/root:/bin/bash\n"+"someone:x:1022:1022::/home/someone:/bin/bash\n",);});// Write records to the stream
stringifier.write(["root","x","0","0","root","/root","/bin/bash"]);
stringifier.write(["someone","x","1022","1022","","/home/someone","/bin/bash",]);// Close the writable stream
stringifier.end();