diff --git a/browser-version/lib/customUtils.js b/browser-version/lib/customUtils.js index 13021df..0e23bc7 100755 --- a/browser-version/lib/customUtils.js +++ b/browser-version/lib/customUtils.js @@ -6,6 +6,8 @@ * Taken from the crypto-browserify module * https://github.com/dominictarr/crypto-browserify * NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it + * @param {number} size in bytes + * @return {array} */ const randomBytes = size => { const bytes = new Array(size) @@ -21,6 +23,8 @@ const randomBytes = size => { /** * Taken from the base64-js module * https://github.com/beatgammit/base64-js/ + * @param {array} uint8 + * @return {string} */ const byteArrayToBase64 = uint8 => { const lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' @@ -60,6 +64,8 @@ const byteArrayToBase64 = uint8 => { * that's not an issue here * The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision) * See http://en.wikipedia.org/wiki/Birthday_problem + * @param {number} len + * @return {string} */ const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len) diff --git a/lib/byline.js b/lib/byline.js index 19bff8b..21e2437 100644 --- a/lib/byline.js +++ b/lib/byline.js @@ -21,133 +21,93 @@ // IN THE SOFTWARE. const stream = require('stream') -const util = require('util') const timers = require('timers') -// convinience API -module.exports = function (readStream, options) { - return module.exports.createStream(readStream, options) -} - -// basic API -module.exports.createStream = function (readStream, options) { - if (readStream) { - return createLineStream(readStream, options) - } else { - return new LineStream(options) - } -} - -// deprecated API -module.exports.createLineStream = function (readStream) { - console.log('WARNING: byline#createLineStream is deprecated and will be removed soon') - return createLineStream(readStream) -} - -function createLineStream (readStream, options) { - if (!readStream) { - throw new Error('expected readStream') - } - if (!readStream.readable) { - throw new Error('readStream must be readable') - } +const createLineStream = (readStream, options) => { + if (!readStream) throw new Error('expected readStream') + if (!readStream.readable) throw new Error('readStream must be readable') const ls = new LineStream(options) readStream.pipe(ls) return ls } -// -// using the new node v0.10 "streams2" API -// +class LineStream extends stream.Transform { + constructor (options) { + super(options) + options = options || {} + + // use objectMode to stop the output from being buffered + // which re-concatanates the lines, just without newlines. + this._readableState.objectMode = true + this._lineBuffer = [] + this._keepEmptyLines = options.keepEmptyLines || false + this._lastChunkEndedWithCR = false + + // take the source's encoding if we don't have one + this.once('pipe', src => { + if (!this.encoding && src instanceof stream.Readable) this.encoding = src._readableState.encoding // but we can't do this for old-style streams + }) + } -module.exports.LineStream = LineStream + _transform (chunk, encoding, done) { + // decode binary chunks as UTF-8 + encoding = encoding || 'utf8' -function LineStream (options) { - stream.Transform.call(this, options) - options = options || {} - - // use objectMode to stop the output from being buffered - // which re-concatanates the lines, just without newlines. - this._readableState.objectMode = true - this._lineBuffer = [] - this._keepEmptyLines = options.keepEmptyLines || false - this._lastChunkEndedWithCR = false - - // take the source's encoding if we don't have one - const self = this - this.on('pipe', function (src) { - if (!self.encoding) { - // but we can't do this for old-style streams - if (src instanceof stream.Readable) { - self.encoding = src._readableState.encoding - } + if (Buffer.isBuffer(chunk)) { + if (encoding === 'buffer') { + chunk = chunk.toString() // utf8 + encoding = 'utf8' + } else chunk = chunk.toString(encoding) } - }) -} -util.inherits(LineStream, stream.Transform) - -LineStream.prototype._transform = function (chunk, encoding, done) { - // decode binary chunks as UTF-8 - encoding = encoding || 'utf8' - - if (Buffer.isBuffer(chunk)) { - if (encoding === 'buffer') { - chunk = chunk.toString() // utf8 - encoding = 'utf8' - } else { - chunk = chunk.toString(encoding) - } - } - this._chunkEncoding = encoding + this._chunkEncoding = encoding - // see: http://www.unicode.org/reports/tr18/#Line_Boundaries - const lines = chunk.split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g) + // see: http://www.unicode.org/reports/tr18/#Line_Boundaries + const lines = chunk.split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g) - // don't split CRLF which spans chunks - if (this._lastChunkEndedWithCR && chunk[0] === '\n') { - lines.shift() - } + // don't split CRLF which spans chunks + if (this._lastChunkEndedWithCR && chunk[0] === '\n') lines.shift() - if (this._lineBuffer.length > 0) { - this._lineBuffer[this._lineBuffer.length - 1] += lines[0] - lines.shift() - } + if (this._lineBuffer.length > 0) { + this._lineBuffer[this._lineBuffer.length - 1] += lines[0] + lines.shift() + } - this._lastChunkEndedWithCR = chunk[chunk.length - 1] === '\r' - this._lineBuffer = this._lineBuffer.concat(lines) - this._pushBuffer(encoding, 1, done) -} + this._lastChunkEndedWithCR = chunk[chunk.length - 1] === '\r' + this._lineBuffer = this._lineBuffer.concat(lines) + this._pushBuffer(encoding, 1, done) + } -LineStream.prototype._pushBuffer = function (encoding, keep, done) { - // always buffer the last (possibly partial) line - while (this._lineBuffer.length > keep) { - const line = this._lineBuffer.shift() - // skip empty lines - if (this._keepEmptyLines || line.length > 0) { - if (!this.push(this._reencode(line, encoding))) { - // when the high-water mark is reached, defer pushes until the next tick - timers.setImmediate(() => { - this._pushBuffer(encoding, keep, done) - }) - return + _pushBuffer (encoding, keep, done) { + // always buffer the last (possibly partial) line + while (this._lineBuffer.length > keep) { + const line = this._lineBuffer.shift() + // skip empty lines + if (this._keepEmptyLines || line.length > 0) { + if (!this.push(this._reencode(line, encoding))) { + // when the high-water mark is reached, defer pushes until the next tick + timers.setImmediate(() => { this._pushBuffer(encoding, keep, done) }) + return + } } } + done() } - done() -} -LineStream.prototype._flush = function (done) { - this._pushBuffer(this._chunkEncoding, 0, done) -} + _flush (done) { + this._pushBuffer(this._chunkEncoding, 0, done) + } -// see Readable::push -LineStream.prototype._reencode = function (line, chunkEncoding) { - if (this.encoding && this.encoding !== chunkEncoding) { - return Buffer.from(line, chunkEncoding).toString(this.encoding) - } else if (this.encoding) { - // this should be the most common case, i.e. we're using an encoded source stream - return line - } else { - return Buffer.from(line, chunkEncoding) + // see Readable::push + _reencode (line, chunkEncoding) { + if (this.encoding && this.encoding !== chunkEncoding) return Buffer.from(line, chunkEncoding).toString(this.encoding) + else if (this.encoding) return line // this should be the most common case, i.e. we're using an encoded source stream + else return Buffer.from(line, chunkEncoding) } } + +// convenience API +module.exports = (readStream, options) => module.exports.createStream(readStream, options) + +// basic API +module.exports.createStream = (readStream, options) => readStream ? createLineStream(readStream, options) : new LineStream(options) +module.exports.LineStream = LineStream diff --git a/lib/storage.js b/lib/storage.js index 43bf3de..54e4e0e 100755 --- a/lib/storage.js +++ b/lib/storage.js @@ -13,9 +13,22 @@ const { callbackify, promisify } = require('util') const storage = {} const { Readable } = require('stream') +/** + * @callback Storage~existsCallback + * @param {boolean} exists + */ + +/** + * @param {string} file + * @param {Storage~existsCallback} cb + */ // eslint-disable-next-line node/no-callback-literal -storage.exists = (path, cb) => fs.access(path, fs.constants.F_OK, (err) => { cb(!err) }) -storage.existsAsync = path => fsPromises.access(path, fs.constants.F_OK).then(() => true, () => false) +storage.exists = (file, cb) => fs.access(file, fs.constants.F_OK, (err) => { cb(!err) }) +/** + * @param {string} file + * @return {Promise} + */ +storage.existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false) storage.rename = fs.rename storage.renameAsync = fsPromises.rename storage.writeFile = fs.writeFile @@ -32,22 +45,40 @@ storage.mkdir = fs.mkdir storage.mkdirAsync = fsPromises.mkdir /** - * Explicit name ... + * @param {string} file + * @return {Promise} */ storage.ensureFileDoesntExistAsync = async file => { if (await storage.existsAsync(file)) await storage.unlinkAsync(file) } +/** + * @callback Storage~errorCallback + * @param {?Error} err + */ + +/** + * @param {string} file + * @param {Storage~errorCallback} callback + */ storage.ensureFileDoesntExist = (file, callback) => callbackify(storage.ensureFileDoesntExistAsync)(file, err => callback(err)) /** * Flush data in OS buffer to storage if corresponding option is set - * @param {String} options.filename - * @param {Boolean} options.isDir Optional, defaults to false - * If options is a string, it is assumed that the flush of the file (not dir) called options was requested + * @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested + * @param {string} [options.filename] + * @param {boolean} [options.isDir = false] Optional, defaults to false + * @param {Storage~errorCallback} callback */ storage.flushToStorage = (options, callback) => callbackify(storage.flushToStorageAsync)(options, callback) +/** + * Flush data in OS buffer to storage if corresponding option is set + * @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested + * @param {string} [options.filename] + * @param {boolean} [options.isDir = false] Optional, defaults to false + * @return {Promise} + */ storage.flushToStorageAsync = async (options) => { let filename let flags @@ -98,9 +129,9 @@ storage.flushToStorageAsync = async (options) => { /** * Fully write or rewrite the datafile - * @param {String} filename - * @param {String[]} lines - * @param {Function} callback + * @param {string} filename + * @param {string[]} lines + * @param {Storage~errorCallback} callback */ storage.writeFileLines = (filename, lines, callback = () => {}) => { try { @@ -122,19 +153,30 @@ storage.writeFileLines = (filename, lines, callback = () => {}) => { callback(err) } } - +/** + * Fully write or rewrite the datafile + * @param {string} filename + * @param {string[]} lines + * @return {Promise} + * @async + */ storage.writeFileLinesAsync = (filename, lines) => promisify(storage.writeFileLines)(filename, lines) /** * Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost) - * @param {String} filename - * @param {String[]} lines - * @param {Function} callback Optional callback, signature: err + * @param {string} filename + * @param {string[]} lines + * @param {Storage~errorCallback} callback Optional callback, signature: err */ storage.crashSafeWriteFileLines = (filename, lines, callback = () => {}) => { callbackify(storage.crashSafeWriteFileLinesAsync)(filename, lines, callback) } - +/** + * Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost) + * @param {string} filename + * @param {string[]} lines + * @return {Promise} + */ storage.crashSafeWriteFileLinesAsync = async (filename, lines) => { const tempFilename = filename + '~' @@ -154,11 +196,16 @@ storage.crashSafeWriteFileLinesAsync = async (filename, lines) => { /** * Ensure the datafile contains all the data, even if there was a crash during a full file write - * @param {String} filename - * @param {Function} callback signature: err + * @param {string} filename + * @param {Storage~errorCallback} callback signature: err */ storage.ensureDatafileIntegrity = (filename, callback) => callbackify(storage.ensureDatafileIntegrityAsync)(filename, callback) +/** + * Ensure the datafile contains all the data, even if there was a crash during a full file write + * @param {string} filename + * @return {Promise} + */ storage.ensureDatafileIntegrityAsync = async filename => { const tempFilename = filename + '~'