document storage and customUtils, and rewrite byline with ES6

pull/11/head
Timothée Rebours 3 years ago
parent 0b8aab7346
commit 6d43867c20
  1. 6
      browser-version/lib/customUtils.js
  2. 100
      lib/byline.js
  3. 79
      lib/storage.js

@ -6,6 +6,8 @@
* Taken from the crypto-browserify module
* https://github.com/dominictarr/crypto-browserify
* NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it
* @param {number} size in bytes
* @return {array<number>}
*/
const randomBytes = size => {
const bytes = new Array(size)
@ -21,6 +23,8 @@ const randomBytes = size => {
/**
* Taken from the base64-js module
* https://github.com/beatgammit/base64-js/
* @param {array} uint8
* @return {string}
*/
const byteArrayToBase64 = uint8 => {
const lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
@ -60,6 +64,8 @@ const byteArrayToBase64 = uint8 => {
* that's not an issue here
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
* See http://en.wikipedia.org/wiki/Birthday_problem
* @param {number} len
* @return {string}
*/
const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len)

@ -21,49 +21,19 @@
// IN THE SOFTWARE.
const stream = require('stream')
const util = require('util')
const timers = require('timers')
// convinience API
module.exports = function (readStream, options) {
return module.exports.createStream(readStream, options)
}
// basic API
module.exports.createStream = function (readStream, options) {
if (readStream) {
return createLineStream(readStream, options)
} else {
return new LineStream(options)
}
}
// deprecated API
module.exports.createLineStream = function (readStream) {
console.log('WARNING: byline#createLineStream is deprecated and will be removed soon')
return createLineStream(readStream)
}
function createLineStream (readStream, options) {
if (!readStream) {
throw new Error('expected readStream')
}
if (!readStream.readable) {
throw new Error('readStream must be readable')
}
const createLineStream = (readStream, options) => {
if (!readStream) throw new Error('expected readStream')
if (!readStream.readable) throw new Error('readStream must be readable')
const ls = new LineStream(options)
readStream.pipe(ls)
return ls
}
//
// using the new node v0.10 "streams2" API
//
module.exports.LineStream = LineStream
function LineStream (options) {
stream.Transform.call(this, options)
class LineStream extends stream.Transform {
constructor (options) {
super(options)
options = options || {}
// use objectMode to stop the output from being buffered
@ -74,19 +44,12 @@ function LineStream (options) {
this._lastChunkEndedWithCR = false
// take the source's encoding if we don't have one
const self = this
this.on('pipe', function (src) {
if (!self.encoding) {
// but we can't do this for old-style streams
if (src instanceof stream.Readable) {
self.encoding = src._readableState.encoding
}
}
this.once('pipe', src => {
if (!this.encoding && src instanceof stream.Readable) this.encoding = src._readableState.encoding // but we can't do this for old-style streams
})
}
util.inherits(LineStream, stream.Transform)
}
LineStream.prototype._transform = function (chunk, encoding, done) {
_transform (chunk, encoding, done) {
// decode binary chunks as UTF-8
encoding = encoding || 'utf8'
@ -94,9 +57,7 @@ LineStream.prototype._transform = function (chunk, encoding, done) {
if (encoding === 'buffer') {
chunk = chunk.toString() // utf8
encoding = 'utf8'
} else {
chunk = chunk.toString(encoding)
}
} else chunk = chunk.toString(encoding)
}
this._chunkEncoding = encoding
@ -104,9 +65,7 @@ LineStream.prototype._transform = function (chunk, encoding, done) {
const lines = chunk.split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g)
// don't split CRLF which spans chunks
if (this._lastChunkEndedWithCR && chunk[0] === '\n') {
lines.shift()
}
if (this._lastChunkEndedWithCR && chunk[0] === '\n') lines.shift()
if (this._lineBuffer.length > 0) {
this._lineBuffer[this._lineBuffer.length - 1] += lines[0]
@ -116,9 +75,9 @@ LineStream.prototype._transform = function (chunk, encoding, done) {
this._lastChunkEndedWithCR = chunk[chunk.length - 1] === '\r'
this._lineBuffer = this._lineBuffer.concat(lines)
this._pushBuffer(encoding, 1, done)
}
}
LineStream.prototype._pushBuffer = function (encoding, keep, done) {
_pushBuffer (encoding, keep, done) {
// always buffer the last (possibly partial) line
while (this._lineBuffer.length > keep) {
const line = this._lineBuffer.shift()
@ -126,28 +85,29 @@ LineStream.prototype._pushBuffer = function (encoding, keep, done) {
if (this._keepEmptyLines || line.length > 0) {
if (!this.push(this._reencode(line, encoding))) {
// when the high-water mark is reached, defer pushes until the next tick
timers.setImmediate(() => {
this._pushBuffer(encoding, keep, done)
})
timers.setImmediate(() => { this._pushBuffer(encoding, keep, done) })
return
}
}
}
done()
}
}
LineStream.prototype._flush = function (done) {
_flush (done) {
this._pushBuffer(this._chunkEncoding, 0, done)
}
}
// see Readable::push
LineStream.prototype._reencode = function (line, chunkEncoding) {
if (this.encoding && this.encoding !== chunkEncoding) {
return Buffer.from(line, chunkEncoding).toString(this.encoding)
} else if (this.encoding) {
// this should be the most common case, i.e. we're using an encoded source stream
return line
} else {
return Buffer.from(line, chunkEncoding)
// see Readable::push
_reencode (line, chunkEncoding) {
if (this.encoding && this.encoding !== chunkEncoding) return Buffer.from(line, chunkEncoding).toString(this.encoding)
else if (this.encoding) return line // this should be the most common case, i.e. we're using an encoded source stream
else return Buffer.from(line, chunkEncoding)
}
}
// convenience API
module.exports = (readStream, options) => module.exports.createStream(readStream, options)
// basic API
module.exports.createStream = (readStream, options) => readStream ? createLineStream(readStream, options) : new LineStream(options)
module.exports.LineStream = LineStream

@ -13,9 +13,22 @@ const { callbackify, promisify } = require('util')
const storage = {}
const { Readable } = require('stream')
/**
* @callback Storage~existsCallback
* @param {boolean} exists
*/
/**
* @param {string} file
* @param {Storage~existsCallback} cb
*/
// eslint-disable-next-line node/no-callback-literal
storage.exists = (path, cb) => fs.access(path, fs.constants.F_OK, (err) => { cb(!err) })
storage.existsAsync = path => fsPromises.access(path, fs.constants.F_OK).then(() => true, () => false)
storage.exists = (file, cb) => fs.access(file, fs.constants.F_OK, (err) => { cb(!err) })
/**
* @param {string} file
* @return {Promise<boolean>}
*/
storage.existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false)
storage.rename = fs.rename
storage.renameAsync = fsPromises.rename
storage.writeFile = fs.writeFile
@ -32,22 +45,40 @@ storage.mkdir = fs.mkdir
storage.mkdirAsync = fsPromises.mkdir
/**
* Explicit name ...
* @param {string} file
* @return {Promise<void>}
*/
storage.ensureFileDoesntExistAsync = async file => {
if (await storage.existsAsync(file)) await storage.unlinkAsync(file)
}
/**
* @callback Storage~errorCallback
* @param {?Error} err
*/
/**
* @param {string} file
* @param {Storage~errorCallback} callback
*/
storage.ensureFileDoesntExist = (file, callback) => callbackify(storage.ensureFileDoesntExistAsync)(file, err => callback(err))
/**
* Flush data in OS buffer to storage if corresponding option is set
* @param {String} options.filename
* @param {Boolean} options.isDir Optional, defaults to false
* If options is a string, it is assumed that the flush of the file (not dir) called options was requested
* @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested
* @param {string} [options.filename]
* @param {boolean} [options.isDir = false] Optional, defaults to false
* @param {Storage~errorCallback} callback
*/
storage.flushToStorage = (options, callback) => callbackify(storage.flushToStorageAsync)(options, callback)
/**
* Flush data in OS buffer to storage if corresponding option is set
* @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested
* @param {string} [options.filename]
* @param {boolean} [options.isDir = false] Optional, defaults to false
* @return {Promise<void>}
*/
storage.flushToStorageAsync = async (options) => {
let filename
let flags
@ -98,9 +129,9 @@ storage.flushToStorageAsync = async (options) => {
/**
* Fully write or rewrite the datafile
* @param {String} filename
* @param {String[]} lines
* @param {Function} callback
* @param {string} filename
* @param {string[]} lines
* @param {Storage~errorCallback} callback
*/
storage.writeFileLines = (filename, lines, callback = () => {}) => {
try {
@ -122,19 +153,30 @@ storage.writeFileLines = (filename, lines, callback = () => {}) => {
callback(err)
}
}
/**
* Fully write or rewrite the datafile
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
* @async
*/
storage.writeFileLinesAsync = (filename, lines) => promisify(storage.writeFileLines)(filename, lines)
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {String} filename
* @param {String[]} lines
* @param {Function} callback Optional callback, signature: err
* @param {string} filename
* @param {string[]} lines
* @param {Storage~errorCallback} callback Optional callback, signature: err
*/
storage.crashSafeWriteFileLines = (filename, lines, callback = () => {}) => {
callbackify(storage.crashSafeWriteFileLinesAsync)(filename, lines, callback)
}
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
*/
storage.crashSafeWriteFileLinesAsync = async (filename, lines) => {
const tempFilename = filename + '~'
@ -154,11 +196,16 @@ storage.crashSafeWriteFileLinesAsync = async (filename, lines) => {
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* @param {String} filename
* @param {Function} callback signature: err
* @param {string} filename
* @param {Storage~errorCallback} callback signature: err
*/
storage.ensureDatafileIntegrity = (filename, callback) => callbackify(storage.ensureDatafileIntegrityAsync)(filename, callback)
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* @param {string} filename
* @return {Promise<void>}
*/
storage.ensureDatafileIntegrityAsync = async filename => {
const tempFilename = filename + '~'

Loading…
Cancel
Save