I'm running the following code on hip_main.dat (50+ mb) and sometimes the chunks are broken up into two 'data' events.
var fs = require('fs');
var chunking = require('chunking-streams');
var SizeChunker = chunking.SizeChunker;
var input = fs.createReadStream('hip_main.dat');
input.setEncoding('ascii');
var chunker = new SizeChunker({
chunkSize: 451,
flushTail: false
});
var output = fs.createWriteStream('hip.json');
var i = 0;
chunker.on('data', function( chunk ) {
// console.log( chunk.data.length );
if (chunk.data.length > 400) {
var RAdeg = chunk.data.toString( 'ascii', 51, 63 );
var DEdeg = chunk.data.toString( 'ascii', 64, 76 );
var Plx = chunk.data.toString( 'ascii', 79, 86 );
//output.write(Plx);
//output.write('\n');
//console.log( RAdeg + ', ' + DEdeg + ', ' + Plx );
} else {
console.log( chunk.id );
i++;
}
});
chunker.on('end', function() {
console.log( i );
});
//chunker.pipe( output );
input.pipe( chunker );
I'm running the following code on hip_main.dat (50+ mb) and sometimes the chunks are broken up into two
'data'events.