document storage and customUtils, and rewrite byline with ES6

pull/11/head
Timothée Rebours 3 years ago
parent 0b8aab7346
commit 6d43867c20
  1. 6
      browser-version/lib/customUtils.js
  2. 90
      lib/byline.js
  3. 79
      lib/storage.js

@ -6,6 +6,8 @@
* Taken from the crypto-browserify module * Taken from the crypto-browserify module
* https://github.com/dominictarr/crypto-browserify * https://github.com/dominictarr/crypto-browserify
* NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it * NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it
* @param {number} size in bytes
* @return {array<number>}
*/ */
const randomBytes = size => { const randomBytes = size => {
const bytes = new Array(size) const bytes = new Array(size)
@ -21,6 +23,8 @@ const randomBytes = size => {
/** /**
* Taken from the base64-js module * Taken from the base64-js module
* https://github.com/beatgammit/base64-js/ * https://github.com/beatgammit/base64-js/
* @param {array} uint8
* @return {string}
*/ */
const byteArrayToBase64 = uint8 => { const byteArrayToBase64 = uint8 => {
const lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' const lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
@ -60,6 +64,8 @@ const byteArrayToBase64 = uint8 => {
* that's not an issue here * that's not an issue here
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision) * The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
* See http://en.wikipedia.org/wiki/Birthday_problem * See http://en.wikipedia.org/wiki/Birthday_problem
* @param {number} len
* @return {string}
*/ */
const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len) const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len)

@ -21,49 +21,19 @@
// IN THE SOFTWARE. // IN THE SOFTWARE.
const stream = require('stream') const stream = require('stream')
const util = require('util')
const timers = require('timers') const timers = require('timers')
// convinience API const createLineStream = (readStream, options) => {
module.exports = function (readStream, options) { if (!readStream) throw new Error('expected readStream')
return module.exports.createStream(readStream, options) if (!readStream.readable) throw new Error('readStream must be readable')
}
// basic API
module.exports.createStream = function (readStream, options) {
if (readStream) {
return createLineStream(readStream, options)
} else {
return new LineStream(options)
}
}
// deprecated API
module.exports.createLineStream = function (readStream) {
console.log('WARNING: byline#createLineStream is deprecated and will be removed soon')
return createLineStream(readStream)
}
function createLineStream (readStream, options) {
if (!readStream) {
throw new Error('expected readStream')
}
if (!readStream.readable) {
throw new Error('readStream must be readable')
}
const ls = new LineStream(options) const ls = new LineStream(options)
readStream.pipe(ls) readStream.pipe(ls)
return ls return ls
} }
// class LineStream extends stream.Transform {
// using the new node v0.10 "streams2" API constructor (options) {
// super(options)
module.exports.LineStream = LineStream
function LineStream (options) {
stream.Transform.call(this, options)
options = options || {} options = options || {}
// use objectMode to stop the output from being buffered // use objectMode to stop the output from being buffered
@ -74,19 +44,12 @@ function LineStream (options) {
this._lastChunkEndedWithCR = false this._lastChunkEndedWithCR = false
// take the source's encoding if we don't have one // take the source's encoding if we don't have one
const self = this this.once('pipe', src => {
this.on('pipe', function (src) { if (!this.encoding && src instanceof stream.Readable) this.encoding = src._readableState.encoding // but we can't do this for old-style streams
if (!self.encoding) {
// but we can't do this for old-style streams
if (src instanceof stream.Readable) {
self.encoding = src._readableState.encoding
}
}
}) })
} }
util.inherits(LineStream, stream.Transform)
LineStream.prototype._transform = function (chunk, encoding, done) { _transform (chunk, encoding, done) {
// decode binary chunks as UTF-8 // decode binary chunks as UTF-8
encoding = encoding || 'utf8' encoding = encoding || 'utf8'
@ -94,9 +57,7 @@ LineStream.prototype._transform = function (chunk, encoding, done) {
if (encoding === 'buffer') { if (encoding === 'buffer') {
chunk = chunk.toString() // utf8 chunk = chunk.toString() // utf8
encoding = 'utf8' encoding = 'utf8'
} else { } else chunk = chunk.toString(encoding)
chunk = chunk.toString(encoding)
}
} }
this._chunkEncoding = encoding this._chunkEncoding = encoding
@ -104,9 +65,7 @@ LineStream.prototype._transform = function (chunk, encoding, done) {
const lines = chunk.split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g) const lines = chunk.split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g)
// don't split CRLF which spans chunks // don't split CRLF which spans chunks
if (this._lastChunkEndedWithCR && chunk[0] === '\n') { if (this._lastChunkEndedWithCR && chunk[0] === '\n') lines.shift()
lines.shift()
}
if (this._lineBuffer.length > 0) { if (this._lineBuffer.length > 0) {
this._lineBuffer[this._lineBuffer.length - 1] += lines[0] this._lineBuffer[this._lineBuffer.length - 1] += lines[0]
@ -118,7 +77,7 @@ LineStream.prototype._transform = function (chunk, encoding, done) {
this._pushBuffer(encoding, 1, done) this._pushBuffer(encoding, 1, done)
} }
LineStream.prototype._pushBuffer = function (encoding, keep, done) { _pushBuffer (encoding, keep, done) {
// always buffer the last (possibly partial) line // always buffer the last (possibly partial) line
while (this._lineBuffer.length > keep) { while (this._lineBuffer.length > keep) {
const line = this._lineBuffer.shift() const line = this._lineBuffer.shift()
@ -126,9 +85,7 @@ LineStream.prototype._pushBuffer = function (encoding, keep, done) {
if (this._keepEmptyLines || line.length > 0) { if (this._keepEmptyLines || line.length > 0) {
if (!this.push(this._reencode(line, encoding))) { if (!this.push(this._reencode(line, encoding))) {
// when the high-water mark is reached, defer pushes until the next tick // when the high-water mark is reached, defer pushes until the next tick
timers.setImmediate(() => { timers.setImmediate(() => { this._pushBuffer(encoding, keep, done) })
this._pushBuffer(encoding, keep, done)
})
return return
} }
} }
@ -136,18 +93,21 @@ LineStream.prototype._pushBuffer = function (encoding, keep, done) {
done() done()
} }
LineStream.prototype._flush = function (done) { _flush (done) {
this._pushBuffer(this._chunkEncoding, 0, done) this._pushBuffer(this._chunkEncoding, 0, done)
} }
// see Readable::push // see Readable::push
LineStream.prototype._reencode = function (line, chunkEncoding) { _reencode (line, chunkEncoding) {
if (this.encoding && this.encoding !== chunkEncoding) { if (this.encoding && this.encoding !== chunkEncoding) return Buffer.from(line, chunkEncoding).toString(this.encoding)
return Buffer.from(line, chunkEncoding).toString(this.encoding) else if (this.encoding) return line // this should be the most common case, i.e. we're using an encoded source stream
} else if (this.encoding) { else return Buffer.from(line, chunkEncoding)
// this should be the most common case, i.e. we're using an encoded source stream
return line
} else {
return Buffer.from(line, chunkEncoding)
} }
} }
// convenience API
module.exports = (readStream, options) => module.exports.createStream(readStream, options)
// basic API
module.exports.createStream = (readStream, options) => readStream ? createLineStream(readStream, options) : new LineStream(options)
module.exports.LineStream = LineStream

@ -13,9 +13,22 @@ const { callbackify, promisify } = require('util')
const storage = {} const storage = {}
const { Readable } = require('stream') const { Readable } = require('stream')
/**
* @callback Storage~existsCallback
* @param {boolean} exists
*/
/**
* @param {string} file
* @param {Storage~existsCallback} cb
*/
// eslint-disable-next-line node/no-callback-literal // eslint-disable-next-line node/no-callback-literal
storage.exists = (path, cb) => fs.access(path, fs.constants.F_OK, (err) => { cb(!err) }) storage.exists = (file, cb) => fs.access(file, fs.constants.F_OK, (err) => { cb(!err) })
storage.existsAsync = path => fsPromises.access(path, fs.constants.F_OK).then(() => true, () => false) /**
* @param {string} file
* @return {Promise<boolean>}
*/
storage.existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false)
storage.rename = fs.rename storage.rename = fs.rename
storage.renameAsync = fsPromises.rename storage.renameAsync = fsPromises.rename
storage.writeFile = fs.writeFile storage.writeFile = fs.writeFile
@ -32,22 +45,40 @@ storage.mkdir = fs.mkdir
storage.mkdirAsync = fsPromises.mkdir storage.mkdirAsync = fsPromises.mkdir
/** /**
* Explicit name ... * @param {string} file
* @return {Promise<void>}
*/ */
storage.ensureFileDoesntExistAsync = async file => { storage.ensureFileDoesntExistAsync = async file => {
if (await storage.existsAsync(file)) await storage.unlinkAsync(file) if (await storage.existsAsync(file)) await storage.unlinkAsync(file)
} }
/**
* @callback Storage~errorCallback
* @param {?Error} err
*/
/**
* @param {string} file
* @param {Storage~errorCallback} callback
*/
storage.ensureFileDoesntExist = (file, callback) => callbackify(storage.ensureFileDoesntExistAsync)(file, err => callback(err)) storage.ensureFileDoesntExist = (file, callback) => callbackify(storage.ensureFileDoesntExistAsync)(file, err => callback(err))
/** /**
* Flush data in OS buffer to storage if corresponding option is set * Flush data in OS buffer to storage if corresponding option is set
* @param {String} options.filename * @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested
* @param {Boolean} options.isDir Optional, defaults to false * @param {string} [options.filename]
* If options is a string, it is assumed that the flush of the file (not dir) called options was requested * @param {boolean} [options.isDir = false] Optional, defaults to false
* @param {Storage~errorCallback} callback
*/ */
storage.flushToStorage = (options, callback) => callbackify(storage.flushToStorageAsync)(options, callback) storage.flushToStorage = (options, callback) => callbackify(storage.flushToStorageAsync)(options, callback)
/**
* Flush data in OS buffer to storage if corresponding option is set
* @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested
* @param {string} [options.filename]
* @param {boolean} [options.isDir = false] Optional, defaults to false
* @return {Promise<void>}
*/
storage.flushToStorageAsync = async (options) => { storage.flushToStorageAsync = async (options) => {
let filename let filename
let flags let flags
@ -98,9 +129,9 @@ storage.flushToStorageAsync = async (options) => {
/** /**
* Fully write or rewrite the datafile * Fully write or rewrite the datafile
* @param {String} filename * @param {string} filename
* @param {String[]} lines * @param {string[]} lines
* @param {Function} callback * @param {Storage~errorCallback} callback
*/ */
storage.writeFileLines = (filename, lines, callback = () => {}) => { storage.writeFileLines = (filename, lines, callback = () => {}) => {
try { try {
@ -122,19 +153,30 @@ storage.writeFileLines = (filename, lines, callback = () => {}) => {
callback(err) callback(err)
} }
} }
/**
* Fully write or rewrite the datafile
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
* @async
*/
storage.writeFileLinesAsync = (filename, lines) => promisify(storage.writeFileLines)(filename, lines) storage.writeFileLinesAsync = (filename, lines) => promisify(storage.writeFileLines)(filename, lines)
/** /**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost) * Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {String} filename * @param {string} filename
* @param {String[]} lines * @param {string[]} lines
* @param {Function} callback Optional callback, signature: err * @param {Storage~errorCallback} callback Optional callback, signature: err
*/ */
storage.crashSafeWriteFileLines = (filename, lines, callback = () => {}) => { storage.crashSafeWriteFileLines = (filename, lines, callback = () => {}) => {
callbackify(storage.crashSafeWriteFileLinesAsync)(filename, lines, callback) callbackify(storage.crashSafeWriteFileLinesAsync)(filename, lines, callback)
} }
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
*/
storage.crashSafeWriteFileLinesAsync = async (filename, lines) => { storage.crashSafeWriteFileLinesAsync = async (filename, lines) => {
const tempFilename = filename + '~' const tempFilename = filename + '~'
@ -154,11 +196,16 @@ storage.crashSafeWriteFileLinesAsync = async (filename, lines) => {
/** /**
* Ensure the datafile contains all the data, even if there was a crash during a full file write * Ensure the datafile contains all the data, even if there was a crash during a full file write
* @param {String} filename * @param {string} filename
* @param {Function} callback signature: err * @param {Storage~errorCallback} callback signature: err
*/ */
storage.ensureDatafileIntegrity = (filename, callback) => callbackify(storage.ensureDatafileIntegrityAsync)(filename, callback) storage.ensureDatafileIntegrity = (filename, callback) => callbackify(storage.ensureDatafileIntegrityAsync)(filename, callback)
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* @param {string} filename
* @return {Promise<void>}
*/
storage.ensureDatafileIntegrityAsync = async filename => { storage.ensureDatafileIntegrityAsync = async filename => {
const tempFilename = filename + '~' const tempFilename = filename + '~'

Loading…
Cancel
Save