diff --git a/CHANGELOG.md b/CHANGELOG.md index e222e6c..0764b67 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,14 +13,13 @@ to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - Added markdown documentation generated from the JSDoc ### Changed -- All the functions are now async at the core, and a fully retro-compatible callback-ified version is exposed. -- The executor is now much simpler and Promise-based. A retro-compatible shim is still exposed, with the exception that it no longer handles [`arguments`](https://developer.mozilla.org/fr/docs/Web/JavaScript/Reference/Functions/arguments) as the arguments Array. If you use the executor directly, you'll need to convert it to a proper Array beforehand. +- All the functions are now async at the core, and a fully retro-compatible callback-ified version is exposed for the exposed functions. +- The executor is now much simpler and Promise-based. A mostly retro-compatible shim is still exposed, with the exception that it no longer handles [`arguments`](https://developer.mozilla.org/fr/docs/Web/JavaScript/Reference/Functions/arguments) as the arguments Array. If you use the executor directly, you'll need to convert it to a proper Array beforehand. However [follicle@1.x](https://github.com/seald/follicle) is not compatible, please update to v2. - As a result, the `async` dependency has been removed completely. To avoid rewriting the tests, shims of some functions of `async` are defined in an utilities file used exclusively in the tests. - The `Datastore#update`'s callback has its signature slightly changed. The `upsert` flag is always defined either at `true` or `false` but not `null` nor `undefined`, and `affectedDocuments` is `null` when none is given rather than `undefined` (except when there is an error of course). - +- ### Deprecated - Formally deprecate giving a string as argument to the `Datastore` constructor -- Formally deprecate using `Persistence.getNWAppFilename()` and `options.nodeWebkitAppName` ## [2.2.0] - 2021-10-29 ### Added diff --git a/README.md b/README.md index 3a8f65c..dbd6331 100755 --- a/README.md +++ b/README.md @@ -8,9 +8,9 @@ written by Louis Chatriot. Since the original maintainer doesn't support this package anymore, we forked it and maintain it for the needs of [Seald](https://www.seald.io). -**Embedded persistent or in memory database for Node.js, nw.js, Electron and -browsers, 100% JavaScript, no binary dependency**. API is a subset of MongoDB's -and it's [plenty fast](#speed). +**Embedded persistent or in memory database for Node.js, Electron and browsers, +100% JavaScript, no binary dependency**. API is a subset of MongoDB's and it's +[plenty fast](#speed). ## Installation diff --git a/benchmarks/commonUtilities.js b/benchmarks/commonUtilities.js index ec708ed..36bd4e0 100755 --- a/benchmarks/commonUtilities.js +++ b/benchmarks/commonUtilities.js @@ -5,6 +5,7 @@ const fs = require('fs') const path = require('path') const Datastore = require('../lib/datastore') const Persistence = require('../lib/persistence') +const { callbackify } = require('util') let executeAsap try { @@ -45,7 +46,7 @@ module.exports.getConfiguration = function (benchDb) { * Ensure the workspace stat and the db datafile is empty */ module.exports.prepareDb = function (filename, cb) { - Persistence.ensureDirectoryExists(path.dirname(filename), function () { + callbackify((dirname) => Persistence.ensureDirectoryExistsAsync(dirname))(path.dirname(filename), function () { fs.access(filename, fs.constants.FS_OK, function (err) { if (!err) { fs.unlink(filename, cb) diff --git a/browser-version/lib/customUtils.js b/browser-version/lib/customUtils.js index 61d4ae5..eb40b27 100755 --- a/browser-version/lib/customUtils.js +++ b/browser-version/lib/customUtils.js @@ -9,7 +9,7 @@ * https://github.com/dominictarr/crypto-browserify * NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it * @param {number} size in bytes - * @return {array} + * @return {Array} */ const randomBytes = size => { const bytes = new Array(size) @@ -25,7 +25,7 @@ const randomBytes = size => { /** * Taken from the base64-js module * https://github.com/beatgammit/base64-js/ - * @param {array} uint8 + * @param {Array} uint8 * @return {string} */ const byteArrayToBase64 = uint8 => { diff --git a/browser-version/lib/storage.browser.js b/browser-version/lib/storage.browser.js index b713a7d..a69c0b4 100755 --- a/browser-version/lib/storage.browser.js +++ b/browser-version/lib/storage.browser.js @@ -5,10 +5,10 @@ * @module storageBrowser * @see module:storage * @see module:storageReactNative + * @private */ const localforage = require('localforage') -const { callbackify } = require('util') // TODO: util is not a dependency, this would fail if util is not polyfilled // Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name const store = localforage.createInstance({ @@ -19,12 +19,10 @@ const store = localforage.createInstance({ /** * Returns Promise if file exists. * - * Async version of {@link module:storageBrowser.exists}. * @param {string} file * @return {Promise} * @async * @alias module:storageBrowser.existsAsync - * @see module:storageBrowser.exists */ const existsAsync = async file => { try { @@ -37,27 +35,12 @@ const existsAsync = async file => { } /** - * @callback module:storageBrowser~existsCallback - * @param {boolean} exists - */ - -/** - * Callback returns true if file exists. - * @function - * @param {string} file - * @param {module:storageBrowser~existsCallback} cb - * @alias module:storageBrowser.exists - */ -const exists = callbackify(existsAsync) - -/** - * Async version of {@link module:storageBrowser.rename}. + * Moves the item from one path to another. * @param {string} oldPath * @param {string} newPath * @return {Promise} * @alias module:storageBrowser.renameAsync * @async - * @see module:storageBrowser.rename */ const renameAsync = async (oldPath, newPath) => { try { @@ -73,25 +56,13 @@ const renameAsync = async (oldPath, newPath) => { } /** - * Moves the item from one path to another - * @function - * @param {string} oldPath - * @param {string} newPath - * @param {NoParamCallback} c - * @return {void} - * @alias module:storageBrowser.rename - */ -const rename = callbackify(renameAsync) - -/** - * Async version of {@link module:storageBrowser.writeFile}. + * Saves the item at given path. * @param {string} file * @param {string} data * @param {object} [options] * @return {Promise} * @alias module:storageBrowser.writeFileAsync * @async - * @see module:storageBrowser.writeFile */ const writeFileAsync = async (file, data, options) => { // Options do not matter in browser setup @@ -103,18 +74,7 @@ const writeFileAsync = async (file, data, options) => { } /** - * Saves the item at given path - * @function - * @param {string} path - * @param {string} data - * @param {object} options - * @param {function} callback - * @alias module:storageBrowser.writeFile - */ -const writeFile = callbackify(writeFileAsync) - -/** - * Async version of {@link module:storageBrowser.appendFile}. + * Append to the item at given path. * @function * @param {string} filename * @param {string} toAppend @@ -122,7 +82,6 @@ const writeFile = callbackify(writeFileAsync) * @return {Promise} * @alias module:storageBrowser.appendFileAsync * @async - * @see module:storageBrowser.appendFile */ const appendFileAsync = async (filename, toAppend, options) => { // Options do not matter in browser setup @@ -135,25 +94,13 @@ const appendFileAsync = async (filename, toAppend, options) => { } /** - * Append to the item at given path - * @function - * @param {string} filename - * @param {string} toAppend - * @param {object} [options] - * @param {function} callback - * @alias module:storageBrowser.appendFile - */ -const appendFile = callbackify(appendFileAsync) - -/** - * Async version of {@link module:storageBrowser.readFile}. + * Read data at given path. * @function * @param {string} filename * @param {object} [options] * @return {Promise} * @alias module:storageBrowser.readFileAsync * @async - * @see module:storageBrowser.readFile */ const readFileAsync = async (filename, options) => { try { @@ -163,15 +110,6 @@ const readFileAsync = async (filename, options) => { return '' } } -/** - * Read data at given path - * @function - * @param {string} filename - * @param {object} options - * @param {function} callback - * @alias module:storageBrowser.readFile - */ -const readFile = callbackify(readFileAsync) /** * Async version of {@link module:storageBrowser.unlink}. @@ -179,8 +117,7 @@ const readFile = callbackify(readFileAsync) * @param {string} filename * @return {Promise} * @async - * @alias module:storageBrowser.unlinkAsync - * @see module:storageBrowser.unlink + * @alias module:storageBrowser */ const unlinkAsync = async filename => { try { @@ -190,15 +127,6 @@ const unlinkAsync = async filename => { } } -/** - * Remove the data at given path - * @function - * @param {string} path - * @param {function} callback - * @alias module:storageBrowser.unlink - */ -const unlink = callbackify(unlinkAsync) - /** * Shim for {@link module:storage.mkdirAsync}, nothing to do, no directories will be used on the browser. * @function @@ -209,15 +137,6 @@ const unlink = callbackify(unlinkAsync) * @async */ const mkdirAsync = (path, options) => Promise.resolve() -/** - * Shim for {@link module:storage.mkdir}, nothing to do, no directories will be used on the browser. - * @function - * @param {string} path - * @param {object} options - * @param {function} callback - * @alias module:storageBrowser.mkdir - */ -const mkdir = callbackify(mkdirAsync) /** * Shim for {@link module:storage.ensureDatafileIntegrityAsync}, nothing to do, no data corruption possible in the browser. @@ -228,61 +147,32 @@ const mkdir = callbackify(mkdirAsync) const ensureDatafileIntegrityAsync = (filename) => Promise.resolve() /** - * Shim for {@link module:storage.ensureDatafileIntegrity}, nothing to do, no data corruption possible in the browser. - * @function - * @param {string} filename - * @param {NoParamCallback} callback signature: err - * @alias module:storageBrowser.ensureDatafileIntegrity - */ -const ensureDatafileIntegrity = callbackify(ensureDatafileIntegrityAsync) - -/** - * Async version of {@link module:storageBrowser.crashSafeWriteFileLines}. - * @param {string} filename + * Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost) + * * @param {string} filename * @param {string[]} lines * @return {Promise} * @alias module:storageBrowser.crashSafeWriteFileLinesAsync - * @see module:storageBrowser.crashSafeWriteFileLines */ const crashSafeWriteFileLinesAsync = async (filename, lines) => { lines.push('') // Add final new line await writeFileAsync(filename, lines.join('\n')) } -/** - * Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost) - * @function - * @param {string} filename - * @param {string[]} lines - * @param {NoParamCallback} [callback] Optional callback, signature: err - * @alias module:storageBrowser.crashSafeWriteFileLines - */ -const crashSafeWriteFileLines = callbackify(crashSafeWriteFileLinesAsync) - // Interface -module.exports.exists = exists module.exports.existsAsync = existsAsync -module.exports.rename = rename module.exports.renameAsync = renameAsync -module.exports.writeFile = writeFile module.exports.writeFileAsync = writeFileAsync -module.exports.crashSafeWriteFileLines = crashSafeWriteFileLines module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync -module.exports.appendFile = appendFile module.exports.appendFileAsync = appendFileAsync -module.exports.readFile = readFile module.exports.readFileAsync = readFileAsync -module.exports.unlink = unlink module.exports.unlinkAsync = unlinkAsync -module.exports.mkdir = mkdir module.exports.mkdirAsync = mkdirAsync -module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync diff --git a/browser-version/lib/storage.react-native.js b/browser-version/lib/storage.react-native.js index ca2c8d2..ad7de93 100755 --- a/browser-version/lib/storage.react-native.js +++ b/browser-version/lib/storage.react-native.js @@ -3,12 +3,13 @@ * * This version is the React-Native version and uses [@react-native-async-storage/async-storage]{@link https://github.com/react-native-async-storage/async-storage}. * @module storageReactNative - * @see module:storageBrowser - * @see module:storageReactNative + * @see module:storagereact-native + * @see module:storage + * @private */ const AsyncStorage = require('@react-native-async-storage/async-storage').default -const { callbackify } = require('util') // TODO: util is not a dependency, this would fail if util is not polyfilled +const { callbackify } = require('util') /** * Async version of {@link module:storageReactNative.exists}. @@ -21,7 +22,7 @@ const { callbackify } = require('util') // TODO: util is not a dependency, this const existsAsync = async file => { try { const value = await AsyncStorage.getItem(file) - if (value !== null) return true // Even if value is undefined, localforage returns null + if (value !== null) return true // Even if value is undefined, AsyncStorage returns null return false } catch (error) { return false @@ -85,7 +86,7 @@ const rename = callbackify(renameAsync) * @see module:storageReactNative.writeFile */ const writeFileAsync = async (file, data, options) => { - // Options do not matter in browser setup + // Options do not matter in react-native setup try { await AsyncStorage.setItem(file, data) } catch (error) { @@ -116,7 +117,7 @@ const writeFile = callbackify(writeFileAsync) * @see module:storageReactNative.appendFile */ const appendFileAsync = async (filename, toAppend, options) => { - // Options do not matter in browser setup + // Options do not matter in react-native setup try { const contents = (await AsyncStorage.getItem(filename)) || '' await AsyncStorage.setItem(filename, contents + toAppend) @@ -192,7 +193,7 @@ const unlinkAsync = async filename => { const unlink = callbackify(unlinkAsync) /** - * Shim for {@link module:storage.mkdirAsync}, nothing to do, no directories will be used on the browser. + * Shim for {@link module:storage.mkdirAsync}, nothing to do, no directories will be used on the react-native. * @function * @param {string} dir * @param {object} [options] @@ -203,7 +204,7 @@ const unlink = callbackify(unlinkAsync) const mkdirAsync = (dir, options) => Promise.resolve() /** - * Shim for {@link module:storage.mkdir}, nothing to do, no directories will be used on the browser. + * Shim for {@link module:storage.mkdir}, nothing to do, no directories will be used on the react-native. * @function * @param {string} path * @param {object} options @@ -213,7 +214,7 @@ const mkdirAsync = (dir, options) => Promise.resolve() const mkdir = callbackify(mkdirAsync) /** - * Shim for {@link module:storage.ensureDatafileIntegrityAsync}, nothing to do, no data corruption possible in the browser. + * Shim for {@link module:storage.ensureDatafileIntegrityAsync}, nothing to do, no data corruption possible in the react-native. * @param {string} filename * @return {Promise} * @alias module:storageReactNative.ensureDatafileIntegrityAsync @@ -221,7 +222,7 @@ const mkdir = callbackify(mkdirAsync) const ensureDatafileIntegrityAsync = (filename) => Promise.resolve() /** - * Shim for {@link module:storage.ensureDatafileIntegrity}, nothing to do, no data corruption possible in the browser. + * Shim for {@link module:storage.ensureDatafileIntegrity}, nothing to do, no data corruption possible in the react-native. * @function * @param {string} filename * @param {NoParamCallback} callback signature: err diff --git a/index.d.ts b/index.d.ts index 5a65f75..ae85d47 100644 --- a/index.d.ts +++ b/index.d.ts @@ -26,8 +26,6 @@ declare class Nedb extends EventEmitter { getAllData(): T[]; - resetIndexes(newData?: any): void; - ensureIndex(options: Nedb.EnsureIndexOptions, callback?: (err: Error | null) => void): void; ensureIndexAsync(options: Nedb.EnsureIndexOptions): Promise; @@ -36,17 +34,6 @@ declare class Nedb extends EventEmitter { removeIndexAsync(fieldName: string): Promise; - addToIndexes(doc: T | T[]): void; - - removeFromIndexes(doc: T | T[]): void; - - updateIndexes(oldDoc: T, newDoc: T): void; - updateIndexes(updates: Array<{ oldDoc: T; newDoc: T }>): void; - - getCandidates(query: any, dontExpireStaleDocs: boolean, callback?: (err: Error | null, candidates: T[]) => void): void; - - getCandidatesAsync(query: any, dontExpireStaleDocs: boolean): Promise; - insert(newDoc: T, callback?: (err: Error | null, document: T) => void): void; insert(newDocs: T[], callback?: (err: Error | null, documents: T[]) => void): void; diff --git a/jsdoc2md.js b/jsdoc2md.js index 08de194..0cf2085 100644 --- a/jsdoc2md.js +++ b/jsdoc2md.js @@ -9,9 +9,9 @@ const jsdocConf = './jsdoc.conf.js' const outputDir = './docs' const getJsdocDataOptions = { - /* same inpout path as jsdoc */ + /* same input path as jsdoc */ files: require(jsdocConf).source.include, - configure: './jsdoc.conf.js', + configure: jsdocConf, 'no-cache': true } diff --git a/lib/byline.js b/lib/byline.js index f9a752d..00a3ced 100644 --- a/lib/byline.js +++ b/lib/byline.js @@ -21,6 +21,7 @@ // IN THE SOFTWARE. /** * @module byline + * @private */ const stream = require('stream') const timers = require('timers') @@ -37,6 +38,7 @@ const createLineStream = (readStream, options) => { * Fork from {@link https://github.com/jahewson/node-byline}. * @see https://github.com/jahewson/node-byline * @alias module:byline.LineStream + * @private */ class LineStream extends stream.Transform { constructor (options) { @@ -112,9 +114,4 @@ class LineStream extends stream.Transform { } } -// convenience API -module.exports = (readStream, options) => module.exports.createStream(readStream, options) - -// basic API -module.exports.createStream = (readStream, options) => readStream ? createLineStream(readStream, options) : new LineStream(options) -module.exports.LineStream = LineStream +module.exports = createLineStream diff --git a/lib/cursor.js b/lib/cursor.js index e5f4f72..87a583c 100755 --- a/lib/cursor.js +++ b/lib/cursor.js @@ -1,18 +1,11 @@ const model = require('./model.js') -const { callbackify, promisify } = require('util') +const { callbackify } = require('util') /** * Has a callback - * @callback Cursor~execFnWithCallback - * @param {?Error} err - * @param {?document[]|?document} res - */ - -/** - * Does not have a callback, may return a Promise. - * @callback Cursor~execFnWithoutCallback - * @param {?document[]|?document} res - * @return {Promise|*} + * @callback Cursor~mapFn + * @param {document[]} res + * @return {*|Promise<*>} */ /** @@ -26,10 +19,9 @@ class Cursor { * Create a new cursor for this collection * @param {Datastore} db - The datastore this cursor is bound to * @param {query} query - The query this cursor will operate on - * @param {Cursor~execFnWithoutCallback|Cursor~execFnWithCallback} [execFn] - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove - * @param {boolean} [hasCallback = true] If false, specifies that the `execFn` is of type {@link Cursor~execFnWithoutCallback} rather than {@link Cursor~execFnWithCallback}. + * @param {Cursor~mapFn} [mapFn] - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove */ - constructor (db, query, execFn, hasCallback = true) { + constructor (db, query, mapFn) { /** * @protected * @type {Datastore} @@ -42,16 +34,10 @@ class Cursor { this.query = query || {} /** * The handler to be executed after cursor has found the results. - * @type {Cursor~execFnWithoutCallback|Cursor~execFnWithCallback|undefined} + * @type {Cursor~mapFn} * @protected */ - if (execFn) this.execFn = execFn - /** - * Determines if the {@link Cursor#execFn} is an {@link Cursor~execFnWithoutCallback} or not. - * @protected - * @type {boolean} - */ - this.hasCallback = hasCallback + if (mapFn) this.mapFn = mapFn /** * @see Cursor#limit * @type {undefined|number} @@ -125,9 +111,9 @@ class Cursor { * This is an internal function. You should use {@link Cursor#execAsync} or {@link Cursor#exec}. * @param {document[]} candidates * @return {document[]} - * @protected + * @private */ - project (candidates) { + _project (candidates) { const res = [] let action @@ -174,91 +160,67 @@ class Cursor { * Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne * This is an internal function, use execAsync which uses the executor * @return {document[]|Promise<*>} + * @private */ async _execAsync () { let res = [] let added = 0 let skipped = 0 - let error = null - - try { - const candidates = await this.db.getCandidatesAsync(this.query) - for (const candidate of candidates) { - if (model.match(candidate, this.query)) { - // If a sort is defined, wait for the results to be sorted before applying limit and skip - if (!this._sort) { - if (this._skip && this._skip > skipped) skipped += 1 - else { - res.push(candidate) - added += 1 - if (this._limit && this._limit <= added) break - } - } else res.push(candidate) - } + const candidates = await this.db._getCandidatesAsync(this.query) + + for (const candidate of candidates) { + if (model.match(candidate, this.query)) { + // If a sort is defined, wait for the results to be sorted before applying limit and skip + if (!this._sort) { + if (this._skip && this._skip > skipped) skipped += 1 + else { + res.push(candidate) + added += 1 + if (this._limit && this._limit <= added) break + } + } else res.push(candidate) } + } - // Apply all sorts - if (this._sort) { - // Sorting - const criteria = Object.entries(this._sort).map(([key, direction]) => ({ key, direction })) - res.sort((a, b) => { - for (const criterion of criteria) { - const compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), this.db.compareStrings) - if (compare !== 0) return compare - } - return 0 - }) + // Apply all sorts + if (this._sort) { + // Sorting + const criteria = Object.entries(this._sort).map(([key, direction]) => ({ key, direction })) + res.sort((a, b) => { + for (const criterion of criteria) { + const compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), this.db.compareStrings) + if (compare !== 0) return compare + } + return 0 + }) - // Applying limit and skip - const limit = this._limit || res.length - const skip = this._skip || 0 + // Applying limit and skip + const limit = this._limit || res.length + const skip = this._skip || 0 - res = res.slice(skip, skip + limit) - } - - // Apply projection - try { - res = this.project(res) - } catch (e) { - error = e - res = undefined - } - } catch (e) { - error = e + res = res.slice(skip, skip + limit) } - if (this.execFn && this.hasCallback) return promisify(this.execFn)(error, res) - else if (error) throw error - else if (this.execFn) return this.execFn(res) - else return res + + // Apply projection + res = this._project(res) + if (this.mapFn) return this.mapFn(res) + return res } /** * @callback Cursor~execCallback * @param {Error} err - * @param {document[]|*} res If an execFn was given to the Cursor, then the type of this parameter is the one returned by the execFn. + * @param {document[]|*} res If an mapFn was given to the Cursor, then the type of this parameter is the one returned by the mapFn. */ - /** - * Get all matching elements - * Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne - * - * This is an internal function, use {@link Cursor#exec} which uses the [executor]{@link Datastore#executor}. - * @param {Cursor~execCallback} _callback - * @protected - * @see Cursor#exec - */ - _exec (_callback) { - callbackify(this._execAsync.bind(this))(_callback) - } - /** * Get all matching elements * Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne * @param {Cursor~execCallback} _callback */ exec (_callback) { - this.db.executor.push({ this: this, fn: this._exec, arguments: [_callback] }) + callbackify(() => this.execAsync())(_callback) } /** diff --git a/lib/customUtils.js b/lib/customUtils.js index e94c741..34bfd94 100755 --- a/lib/customUtils.js +++ b/lib/customUtils.js @@ -2,6 +2,7 @@ * Utility functions that need to be reimplemented for each environment. * This is the version for Node.js * @module customUtilsNode + * @private */ const crypto = require('crypto') diff --git a/lib/datastore.js b/lib/datastore.js index 6107997..36e9f81 100755 --- a/lib/datastore.js +++ b/lib/datastore.js @@ -8,6 +8,12 @@ const model = require('./model.js') const Persistence = require('./persistence.js') const { isDate } = require('./utils.js') +// TODO: have one version of the documentation for each function +// TODO: remove jsdoc2md file that generates a docs/ directory, and replace it with something that generates the README +// TODO: check the classes and modules which need to be included int he documentation +// TODO: replace examples of the Readme with @example JSDoc tags +// TODO: update changelog + /** * Callback with no parameter * @callback NoParamCallback @@ -48,6 +54,13 @@ const { isDate } = require('./utils.js') * @return {Promise<*>} */ +/** + * Callback with generic parameters + * @callback GenericCallback + * @param {?Error} err + * @param {...*} args + */ + /** * Compaction event. Happens when the Datastore's Persistence has been compacted. * It happens when calling `datastore.persistence.compactDatafile`, which is called periodically if you have called @@ -118,7 +131,9 @@ const { isDate } = require('./utils.js') */ /** - * The `beforeDeserialization`and `afterDeserialization` callbacks should + * The `beforeDeserialization` and `afterDeserialization` callbacks are hooks which are executed respectively before + * parsing each document and after stringifying them. They can be used for example to encrypt the Datastore. + * The `beforeDeserialization` should revert what `afterDeserialization` has done. * @callback serializationHook * @param {string} x * @return {string} @@ -145,22 +160,23 @@ class Datastore extends EventEmitter { * next major version.** * @param {string} [options.filename = null] Path to the file where the data is persisted. If left blank, the datastore is * automatically considered in-memory only. It cannot end with a `~` which is used in the temporary files NeDB uses to - * perform crash-safe writes. - * @param {boolean} [options.inMemoryOnly = false] If set to true, no data will be written in storage. + * perform crash-safe writes. Not used if `options.inMemoryOnly` is `true`. + * @param {boolean} [options.inMemoryOnly = false] If set to true, no data will be written in storage. This option has + * priority over `options.filename`. * @param {boolean} [options.timestampData = false] If set to true, createdAt and updatedAt will be created and * populated automatically (if not specified by user) * @param {boolean} [options.autoload = false] If used, the database will automatically be loaded from the datafile * upon creation (you don't need to call `loadDatabase`). Any command issued before load is finished is buffered and * will be executed when load is done. When autoloading is done, you can either use the `onload` callback, or you can * use `this.autoloadPromise` which resolves (or rejects) when autloading is done. - * @param {function} [options.onload] If you use autoloading, this is the handler called after the `loadDatabase`. It + * @param {NoParamCallback} [options.onload] If you use autoloading, this is the handler called after the `loadDatabase`. It * takes one `error` argument. If you use autoloading without specifying this handler, and an error happens during * load, an error will be thrown. - * @param {function} [options.beforeDeserialization] Hook you can use to transform data after it was serialized and + * @param {serializationHook} [options.beforeDeserialization] Hook you can use to transform data after it was serialized and * before it is written to disk. Can be used for example to encrypt data before writing database to disk. This * function takes a string as parameter (one line of an NeDB data file) and outputs the transformed string, **which * must absolutely not contain a `\n` character** (or data will be lost). - * @param {function} [options.afterSerialization] Inverse of `afterSerialization`. Make sure to include both and not + * @param {serializationHook} [options.afterSerialization] Inverse of `afterSerialization`. Make sure to include both and not * just one, or you risk data loss. For the same reason, make sure both functions are inverses of one another. Some * failsafe mechanisms are in place to prevent data loss if you misuse the serialization hooks: NeDB checks that never * one is declared without the other, and checks that they are reverse of one another by testing on random strings of @@ -172,11 +188,6 @@ class Datastore extends EventEmitter { * @param {compareStrings} [options.compareStrings] If specified, it overrides default string comparison which is not * well adapted to non-US characters in particular accented letters. Native `localCompare` will most of the time be * the right choice. - * @param {string} [options.nodeWebkitAppName] **Deprecated:** if you are using NeDB from whithin a Node Webkit app, - * specify its name (the same one you use in the `package.json`) in this field and the `filename` will be relative to - * the directory Node Webkit uses to store the rest of the application's data (local storage etc.). It works on Linux, - * OS X and Windows. Now that you can use `require('nw.gui').App.dataPath` in Node Webkit to get the path to the data - * directory for your application, you should not use this option anymore and it will be removed. * * @fires Datastore#event:"compaction.done" */ @@ -189,7 +200,7 @@ class Datastore extends EventEmitter { deprecate(() => { filename = options this.inMemoryOnly = false // Default - }, 'Giving a string to the Datastore constructor is deprecated and will be removed in the next version. Please use an options object with an argument \'filename\'.')() + }, '@seald-io/nedb: Giving a string to the Datastore constructor is deprecated and will be removed in the next major version. Please use an options object with an argument \'filename\'.')() } else { options = options || {} filename = options.filename @@ -245,7 +256,6 @@ class Datastore extends EventEmitter { */ this.persistence = new Persistence({ db: this, - nodeWebkitAppName: options.nodeWebkitAppName, afterSerialization: options.afterSerialization, beforeDeserialization: options.beforeDeserialization, corruptAlertThreshold: options.corruptAlertThreshold @@ -296,15 +306,16 @@ class Datastore extends EventEmitter { if (options.onload) options.onload(err) else throw err }) - } + } else this.autoloadPromise = null } /** * Load the database from the datafile, and trigger the execution of buffered commands if any. - * @param {function} callback + * @param {NoParamCallback} callback */ loadDatabase (callback) { - this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: [callback] }, true) + if (typeof callback !== 'function') callback = () => {} + callbackify(() => this.loadDatabaseAsync())(callback) } /** @@ -327,9 +338,10 @@ class Datastore extends EventEmitter { /** * Reset all currently defined indexes. - * @param {?document|?document[]} newData + * @param {?document|?document[]} [newData] + * @private */ - resetIndexes (newData) { + _resetIndexes (newData) { for (const index of Object.values(this.indexes)) { index.reset(newData) } @@ -347,9 +359,9 @@ class Datastore extends EventEmitter { * @param {number} [options.expireAfterSeconds] - if set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored * @param {NoParamCallback} callback Callback, signature: err */ - // TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state ensureIndex (options = {}, callback = () => {}) { - callbackify(this.ensureIndexAsync.bind(this))(options, callback) + const promise = this.ensureIndexAsync(options) // to make sure the synchronous part of ensureIndexAsync is executed synchronously + callbackify(() => promise)(callback) } /** @@ -362,7 +374,6 @@ class Datastore extends EventEmitter { * @return {Promise} * @see Datastore#ensureIndex */ - // TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state async ensureIndexAsync (options = {}) { if (!options.fieldName) { const err = new Error('Cannot create an index without a fieldName') @@ -382,7 +393,7 @@ class Datastore extends EventEmitter { } // We may want to force all options to be persisted including defaults, not just the ones passed the index creation function - await this.persistence.persistNewStateAsync([{ $$indexCreated: options }]) + await this.executor.pushAsync(() => this.persistence.persistNewStateAsync([{ $$indexCreated: options }]), true) } /** @@ -392,9 +403,9 @@ class Datastore extends EventEmitter { * field in a nested document. * @param {NoParamCallback} callback Optional callback, signature: err */ - // TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state removeIndex (fieldName, callback = () => {}) { - callbackify(this.removeIndexAsync.bind(this))(fieldName, callback) + const promise = this.removeIndexAsync(fieldName) + callbackify(() => promise)(callback) } /** @@ -404,11 +415,10 @@ class Datastore extends EventEmitter { * @return {Promise} * @see Datastore#removeIndex */ - // TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state async removeIndexAsync (fieldName) { delete this.indexes[fieldName] - await this.persistence.persistNewStateAsync([{ $$indexRemoved: fieldName }]) + await this.executor.pushAsync(() => this.persistence.persistNewStateAsync([{ $$indexRemoved: fieldName }]), true) } /** @@ -416,9 +426,9 @@ class Datastore extends EventEmitter { * * This is an internal function. * @param {document} doc - * @protected + * @private */ - addToIndexes (doc) { + _addToIndexes (doc) { let failingIndex let error const keys = Object.keys(this.indexes) @@ -448,9 +458,9 @@ class Datastore extends EventEmitter { * * This is an internal function. * @param {document} doc - * @protected + * @private */ - removeFromIndexes (doc) { + _removeFromIndexes (doc) { for (const index of Object.values(this.indexes)) { index.remove(doc) } @@ -468,8 +478,9 @@ class Datastore extends EventEmitter { * `{oldDoc, newDoc}` pairs. * @param {document} [newDoc] Document to replace the oldDoc with. If the first argument is an `Array` of * `{oldDoc, newDoc}` pairs, this second argument is ignored. + * @private */ - updateIndexes (oldDoc, newDoc) { + _updateIndexes (oldDoc, newDoc) { let failingIndex let error const keys = Object.keys(this.indexes) @@ -501,7 +512,7 @@ class Datastore extends EventEmitter { * * @private */ - _getCandidates (query) { + _getRawCandidates (query) { const indexNames = Object.keys(this.indexes) // STEP 1: get candidates list by checking indexes from most to least frequent usecase // For a basic match @@ -544,37 +555,16 @@ class Datastore extends EventEmitter { * * This is an internal function. * @param {query} query - * @param {boolean|function} [dontExpireStaleDocs = false] If true don't remove stale docs. Useful for the remove - * function which shouldn't be impacted by expirations. If argument is not given, it is used as the callback. - * @param {MultipleDocumentsCallback} callback Signature err, candidates - * - * @protected - */ - getCandidates (query, dontExpireStaleDocs, callback) { - if (typeof dontExpireStaleDocs === 'function') { - callback = dontExpireStaleDocs - dontExpireStaleDocs = false - } - - callbackify(this.getCandidatesAsync.bind(this))(query, dontExpireStaleDocs, callback) - } - - /** - * Async version of {@link Datastore#getCandidates}. - * - * This is an internal function. - * @param {query} query * @param {boolean} [dontExpireStaleDocs = false] If true don't remove stale docs. Useful for the remove function * which shouldn't be impacted by expirations. * @return {Promise} candidates - * @see Datastore#getCandidates - * @protected + * @private */ - async getCandidatesAsync (query, dontExpireStaleDocs = false) { + async _getCandidatesAsync (query, dontExpireStaleDocs = false) { const validDocs = [] // STEP 1: get candidates list by checking indexes from most to least frequent usecase - const docs = this._getCandidates(query) + const docs = this._getRawCandidates(query) // STEP 2: remove all expired documents if (!dontExpireStaleDocs) { const expiredDocsIds = [] @@ -593,22 +583,10 @@ class Datastore extends EventEmitter { /** * Insert a new document - * This is an internal function, use {@link Datastore#insert} which has the same signature. - * @param {document|document[]} newDoc - * @param {SingleDocumentCallback} callback - * - * @private - */ - _insert (newDoc, callback) { - return callbackify(this._insertAsync.bind(this))(newDoc, callback) - } - - /** - * Async version of {@link Datastore#_insert}. + * This is an internal function, use {@link Datastore#insertAsync} which has the same signature. * @param {document|document[]} newDoc * @return {Promise} * @private - * @see Datastore#_insert */ async _insertAsync (newDoc) { const preparedDoc = this._prepareDocumentForInsertion(newDoc) @@ -662,7 +640,7 @@ class Datastore extends EventEmitter { */ _insertInCache (preparedDoc) { if (Array.isArray(preparedDoc)) this._insertMultipleDocsInCache(preparedDoc) - else this.addToIndexes(preparedDoc) + else this._addToIndexes(preparedDoc) } /** @@ -677,7 +655,7 @@ class Datastore extends EventEmitter { for (let i = 0; i < preparedDocs.length; i += 1) { try { - this.addToIndexes(preparedDocs[i]) + this._addToIndexes(preparedDocs[i]) } catch (e) { error = e failingIndex = i @@ -687,7 +665,7 @@ class Datastore extends EventEmitter { if (error) { for (let i = 0; i < failingIndex; i += 1) { - this.removeFromIndexes(preparedDocs[i]) + this._removeFromIndexes(preparedDocs[i]) } throw error @@ -701,8 +679,9 @@ class Datastore extends EventEmitter { * * @private */ - insert (newDoc, callback = () => {}) { - this.executor.push({ this: this, fn: this._insert, arguments: [newDoc, callback] }) + insert (newDoc, callback) { + if (typeof callback !== 'function') callback = () => {} + callbackify(doc => this.insertAsync(doc))(newDoc, callback) } /** @@ -741,7 +720,7 @@ class Datastore extends EventEmitter { * @async */ countAsync (query) { - return new Cursor(this, query, async docs => docs.length, false) + return new Cursor(this, query, docs => docs.length) } /** @@ -778,7 +757,7 @@ class Datastore extends EventEmitter { * @async */ findAsync (query, projection = {}) { - const cursor = new Cursor(this, query, docs => docs.map(doc => model.deepCopy(doc)), false) + const cursor = new Cursor(this, query, docs => docs.map(doc => model.deepCopy(doc))) cursor.projection(projection) return cursor @@ -822,7 +801,7 @@ class Datastore extends EventEmitter { * @see Datastore#findOne */ findOneAsync (query, projection = {}) { - const cursor = new Cursor(this, query, docs => docs.length === 1 ? model.deepCopy(docs[0]) : null, false) + const cursor = new Cursor(this, query, docs => docs.length === 1 ? model.deepCopy(docs[0]) : null) cursor.projection(projection).limit(1) return cursor @@ -852,38 +831,6 @@ class Datastore extends EventEmitter { /** * Update all docs matching query. * - * Use {@link Datastore#update} which has the same signature. - * @param {query} query is the same kind of finding query you use with `find` and `findOne` - * @param {document|update} update specifies how the documents should be modified. It is either a new document or a - * set of modifiers (you cannot use both together, it doesn't make sense!). Using a new document will replace the - * matched docs. Using a set of modifiers will create the fields they need to modify if they don't exist, and you can - * apply them to subdocs. Available field modifiers are `$set` to change a field's value, `$unset` to delete a field, - * `$inc` to increment a field's value and `$min`/`$max` to change field's value, only if provided value is - * less/greater than current value. To work on arrays, you have `$push`, `$pop`, `$addToSet`, `$pull`, and the special - * `$each` and `$slice`. - * @param {object} [options] Optional options. If not given, is interpreted as the callback. - * @param {boolean} [options.multi = false] If true, can update multiple documents - * @param {boolean} [options.upsert = false] If true, can insert a new document corresponding to the `update` rules if - * your `query` doesn't match anything. If your `update` is a simple object with no modifiers, it is the inserted - * document. In the other case, the `query` is stripped from all operator recursively, and the `update` is applied to - * it. - * @param {boolean} [options.returnUpdatedDocs = false] (not Mongo-DB compatible) If true and update is not an upsert, - * will return the array of documents matched by the find query and updated. Updated documents will be returned even - * if the update did not actually modify them. - * @param {Datastore~updateCallback} callback - * - * @private - */ - _update (query, update, options, callback) { - const _callback = (err, res = {}) => { - callback(err, res.numAffected, res.affectedDocuments, res.upsert) - } - callbackify(this._updateAsync.bind(this))(query, update, options, _callback) - } - - /** - * Async version of {@link Datastore#_update}. - * * Use {@link Datastore#updateAsync} which has the same signature. * @param {query} query is the same kind of finding query you use with `find` and `findOne` * @param {document|update} update specifies how the documents should be modified. It is either a new document or a @@ -904,7 +851,6 @@ class Datastore extends EventEmitter { * if the update did not actually modify them. * * @return {Promise<{numAffected: number, affectedDocuments: document[]|document|null, upsert: boolean}>} - * @see Datastore#_update * @private */ async _updateAsync (query, update, options) { @@ -913,7 +859,7 @@ class Datastore extends EventEmitter { // If upsert option is set, check whether we need to insert the doc if (upsert) { - const cursor = new Cursor(this, query, x => x, false) + const cursor = new Cursor(this, query) // Need to use an internal function not tied to the executor to avoid deadlock const docs = await cursor.limit(1)._execAsync() @@ -940,7 +886,7 @@ class Datastore extends EventEmitter { const modifications = [] let createdAt - const candidates = await this.getCandidatesAsync(query) + const candidates = await this._getCandidatesAsync(query) // Preparing update (if an error is thrown here neither the datafile nor // the in-memory indexes are affected) for (const candidate of candidates) { @@ -957,7 +903,7 @@ class Datastore extends EventEmitter { } // Change the docs in memory - this.updateIndexes(modifications) + this._updateIndexes(modifications) // Update the datafile const updatedDocs = modifications.map(x => x.newDoc) @@ -999,7 +945,10 @@ class Datastore extends EventEmitter { options = {} } const callback = cb || (() => {}) - this.executor.push({ this: this, fn: this._update, arguments: [query, update, options, callback] }) + const _callback = (err, res = {}) => { + callback(err, res.numAffected, res.affectedDocuments, res.upsert) + } + callbackify((query, update, options) => this.updateAsync(query, update, options))(query, update, options, _callback) } /** @@ -1038,23 +987,6 @@ class Datastore extends EventEmitter { /** * Remove all docs matching the query. * - * Use {@link Datastore#remove} which has the same signature. - * - * For now very naive implementation (similar to update). - * @param {query} query - * @param {object} options options - * @param {boolean} [options.multi = false] If true, can update multiple documents - * @param {Datastore~removeCallback} callback - * @see Datastore#remove - * @private - */ - _remove (query, options, callback) { - callbackify(this._removeAsync.bind(this))(query, options, callback) - } - - /** - * Async version of {@link Datastore#_remove}. - * * Use {@link Datastore#removeAsync} which has the same signature. * @param {query} query * @param {object} [options] Optional options @@ -1066,7 +998,7 @@ class Datastore extends EventEmitter { async _removeAsync (query, options = {}) { const multi = options.multi !== undefined ? options.multi : false - const candidates = await this.getCandidatesAsync(query, true) + const candidates = await this._getCandidatesAsync(query, true) const removedDocs = [] let numRemoved = 0 @@ -1074,7 +1006,7 @@ class Datastore extends EventEmitter { if (model.match(d, query) && (multi || numRemoved === 0)) { numRemoved += 1 removedDocs.push({ $$deleted: true, _id: d._id }) - this.removeFromIndexes(d) + this._removeFromIndexes(d) } }) @@ -1095,7 +1027,7 @@ class Datastore extends EventEmitter { options = {} } const callback = cb || (() => {}) - this.executor.push({ this: this, fn: this._remove, arguments: [query, options, callback] }) + callbackify((query, options) => this.removeAsync(query, options))(query, options, callback) } /** diff --git a/lib/executor.js b/lib/executor.js index 531ced7..b7767f1 100755 --- a/lib/executor.js +++ b/lib/executor.js @@ -3,6 +3,7 @@ const Waterfall = require('./waterfall') /** * Executes operations sequentially. * Has an option for a buffer that can be triggered afterwards. + * @private */ class Executor { /** @@ -26,64 +27,23 @@ class Executor { * @type {Waterfall} * @private */ - this.buffer = new Waterfall() - this.buffer.chain(new Promise(resolve => { - /** - * Method to trigger the buffer processing. - * - * Do not be use directly, use `this.processBuffer` instead. - * @function - * @private - */ - this._triggerBuffer = resolve - })) + this.buffer = null + /** + * Method to trigger the buffer processing. + * + * Do not be use directly, use `this.processBuffer` instead. + * @function + * @private + */ + this._triggerBuffer = null + this.resetBuffer() } /** * If executor is ready, queue task (and process it immediately if executor was idle) * If not, buffer task for later processing - * @param {Object} task - * @param {Object} task.this - Object to use as this - * @param {function} task.fn - Function to execute - * @param {Array} task.arguments - Array of arguments, IMPORTANT: only the last argument may be a function - * (the callback) and the last argument cannot be false/undefined/null - * @param {Boolean} [forceQueuing = false] Optional (defaults to false) force executor to queue task even if it is not ready - */ - push (task, forceQueuing) { - const func = async () => { - const lastArg = task.arguments[task.arguments.length - 1] - await new Promise(resolve => { - if (typeof lastArg === 'function') { - // We got a callback - task.arguments.pop() // remove original callback - task.fn.apply(task.this, [...task.arguments, function () { - resolve() // triggers next task after next tick - lastArg.apply(null, arguments) // call original callback - }]) - } else if (!lastArg && task.arguments.length !== 0) { - // We got a falsy callback - task.arguments.pop() // remove original callback - task.fn.apply(task.this, [...task.arguments, () => { - resolve() - }]) - } else { - // We don't have a callback - task.fn.apply(task.this, [...task.arguments, () => { - resolve() - }]) - } - }) - } - this.pushAsync(func, forceQueuing) - } - - /** - * Async version of {@link Executor#push}. - * This version is way simpler than its callbackEquivalent: you give it an async function `task`, it is executed when - * all the previous tasks are done, and then resolves or rejects and when it is finished with its original result or - * error. - * @param {AsyncFunction} task - * @param {boolean} [forceQueuing = false] + * @param {AsyncFunction} task Function to execute + * @param {boolean} [forceQueuing = false] Optional (defaults to false) force executor to queue task even if it is not ready * @return {Promise<*>} * @async * @see Executor#push @@ -102,6 +62,17 @@ class Executor { this._triggerBuffer() this.queue.waterfall(() => this.buffer.guardian) } + + /** + * Removes all tasks queued up in the buffer + */ + resetBuffer () { + this.buffer = new Waterfall() + this.buffer.chain(new Promise(resolve => { + this._triggerBuffer = resolve + })) + if (this.ready) this._triggerBuffer() + } } // Interface diff --git a/lib/indexes.js b/lib/indexes.js index df53228..fee3ccc 100755 --- a/lib/indexes.js +++ b/lib/indexes.js @@ -30,6 +30,7 @@ const projectForUnique = elt => { /** * Indexes on field names, with atomic operations and which can optionally enforce a unique constraint or allow indexed * fields to be undefined + * @private */ class Index { /** diff --git a/lib/model.js b/lib/model.js index 6d97830..b310f07 100755 --- a/lib/model.js +++ b/lib/model.js @@ -4,6 +4,7 @@ * Copying * Querying, update * @module model + * @private */ const { uniq, isDate, isRegExp } = require('./utils.js') @@ -260,7 +261,7 @@ const compareThings = (a, b, _compareStrings) => { /** * Create the complete modifier function - * @param {function} lastStepModifierFunction a lastStepModifierFunction + * @param {modifierFunction} lastStepModifierFunction a lastStepModifierFunction * @param {boolean} [unset = false] Bad looking specific fix, needs to be generalized modifiers that behave like $unset are implemented * @return {modifierFunction} * @private diff --git a/lib/persistence.js b/lib/persistence.js index 22e4bb0..b888637 100755 --- a/lib/persistence.js +++ b/lib/persistence.js @@ -1,5 +1,5 @@ const path = require('path') -const { callbackify, promisify, deprecate } = require('util') +const { callbackify } = require('util') const byline = require('./byline') const customUtils = require('./customUtils.js') const Index = require('./indexes.js') @@ -47,7 +47,6 @@ class Persistence { * Create a new Persistence object for database options.db * @param {Datastore} options.db * @param {Number} [options.corruptAlertThreshold] Optional, threshold after which an alert is thrown if too much data is corrupt - * @param {string} [options.nodeWebkitAppName] Optional, specify the name of your NW app if you want options.filename to be relative to the directory where Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion) * @param {serializationHook} [options.beforeDeserialization] Hook you can use to transform data after it was serialized and before it is written to disk. * @param {serializationHook} [options.afterSerialization] Inverse of `afterSerialization`. */ @@ -84,13 +83,6 @@ class Persistence { } } } - - // For NW apps, store data in the same directory where NW stores application data - if (this.filename && options.nodeWebkitAppName) { - deprecate(() => { - this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename) - }, 'The nodeWebkitAppName option is deprecated and will be removed in the next version. To get the path to the directory where Node Webkit stores the data for your app, use the internal nw.gui module like this require(\'nw.gui\').App.dataPath See https://github.com/rogerwang/node-webkit/issues/500')() - } } /** @@ -98,21 +90,9 @@ class Persistence { * This serves as a compaction function since the cache always contains only the number of documents in the collection * while the data file is append-only so it may grow larger * - * This is an internal function, use {@link Persistence#compactDatafile} which uses the [executor]{@link Datastore#executor}. - * @param {NoParamCallback} [callback = () => {}] - * @protected - */ - persistCachedDatabase (callback = () => {}) { - return callbackify(this.persistCachedDatabaseAsync.bind(this))(callback) - } - - /** - * Async version of {@link Persistence#persistCachedDatabase}. - * * This is an internal function, use {@link Persistence#compactDatafileAsync} which uses the [executor]{@link Datastore#executor}. * @return {Promise} * @protected - * @see Persistence#persistCachedDatabase */ async persistCachedDatabaseAsync () { const lines = [] @@ -141,10 +121,11 @@ class Persistence { /** * Queue a rewrite of the datafile * @param {NoParamCallback} [callback = () => {}] - * @see Persistence#persistCachedDatabase + * @see Persistence#persistCachedDatabaseAsync */ - compactDatafile (callback = () => {}) { - this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [callback] }) + compactDatafile (callback) { + if (typeof callback !== 'function') callback = () => {} + callbackify(() => this.compactDatafileAsync())(callback) } /** @@ -183,21 +164,8 @@ class Persistence { * Use an append-only format * * Do not use directly, it should only used by a {@link Datastore} instance. - * @param {string[]} newDocs Can be empty if no doc was updated/removed - * @param {NoParamCallback} [callback = () => {}] - * @protected - */ - persistNewState (newDocs, callback = () => {}) { - callbackify(this.persistNewStateAsync.bind(this))(newDocs, err => callback(err)) - } - - /** - * Async version of {@link Persistence#persistNewState} - * - * Do not use directly, it should only used by a {@link Datastore} instance. * @param {document[]} newDocs Can be empty if no doc was updated/removed * @return {Promise} - * @see Persistence#persistNewState */ async persistNewStateAsync (newDocs) { let toPersist = '' @@ -261,14 +229,6 @@ class Persistence { return { data: tdata, indexes: indexes } } - /** - * @callback Persistence~treatRawStreamCallback - * @param {?Error} err - * @param {?object} data - * @param {document[]} data.data - * @param {Object.} data.indexes - */ - /** * From a database's raw data stream, return the corresponding machine understandable collection * Is only used by a {@link Datastore} instance. @@ -279,66 +239,56 @@ class Persistence { * * Do not use directly, it should only used by a {@link Datastore} instance. * @param {Readable} rawStream - * @param {Persistence~treatRawStreamCallback} cb + * @return {Promise<{data: document[], indexes: Object.}>} + * @async * @protected */ - treatRawStream (rawStream, cb) { - const dataById = {} - const indexes = {} - - // Last line of every data file is usually blank so not really corrupt - let corruptItems = -1 - - const lineStream = byline(rawStream, { keepEmptyLines: true }) - let length = 0 - - lineStream.on('data', (line) => { - try { - const doc = model.deserialize(this.beforeDeserialization(line)) - if (doc._id) { - if (doc.$$deleted === true) delete dataById[doc._id] - else dataById[doc._id] = doc - } else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != null) indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated - else if (typeof doc.$$indexRemoved === 'string') delete indexes[doc.$$indexRemoved] - } catch (e) { - corruptItems += 1 - } + treatRawStreamAsync (rawStream) { + return new Promise((resolve, reject) => { + const dataById = {} + + const indexes = {} + + // Last line of every data file is usually blank so not really corrupt + let corruptItems = -1 + + const lineStream = byline(rawStream, { keepEmptyLines: true }) + let length = 0 + + lineStream.on('data', (line) => { + try { + const doc = model.deserialize(this.beforeDeserialization(line)) + if (doc._id) { + if (doc.$$deleted === true) delete dataById[doc._id] + else dataById[doc._id] = doc + } else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != null) indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated + else if (typeof doc.$$indexRemoved === 'string') delete indexes[doc.$$indexRemoved] + } catch (e) { + corruptItems += 1 + } - length++ - }) + length++ + }) - lineStream.on('end', () => { - // A bit lenient on corruption - if (length > 0 && corruptItems / length > this.corruptAlertThreshold) { - const err = new Error(`More than ${Math.floor(100 * this.corruptAlertThreshold)}% of the data file is corrupt, the wrong beforeDeserialization hook may be used. Cautiously refusing to start NeDB to prevent dataloss`) - cb(err, null) - return - } + lineStream.on('end', () => { + // A bit lenient on corruption + if (length > 0 && corruptItems / length > this.corruptAlertThreshold) { + const err = new Error(`More than ${Math.floor(100 * this.corruptAlertThreshold)}% of the data file is corrupt, the wrong beforeDeserialization hook may be used. Cautiously refusing to start NeDB to prevent dataloss`) + reject(err, null) + return + } - const data = Object.values(dataById) + const data = Object.values(dataById) - cb(null, { data, indexes: indexes }) - }) + resolve({ data, indexes: indexes }) + }) - lineStream.on('error', function (err) { - cb(err) + lineStream.on('error', function (err) { + reject(err, null) + }) }) } - /** - * Async version of {@link Persistence#treatRawStream}. - * - * Do not use directly, it should only used by a {@link Datastore} instance. - * @param {Readable} rawStream - * @return {Promise<{data: document[], indexes: Object.}>} - * @async - * @protected - * @see Persistence#treatRawStream - */ - treatRawStreamAsync (rawStream) { - return promisify(this.treatRawStream.bind(this))(rawStream) - } - /** * Load the database * 1) Create all indexes @@ -363,12 +313,12 @@ class Persistence { * @see Persistence#loadDatabase */ async loadDatabaseAsync () { - this.db.resetIndexes() + this.db._resetIndexes() // In-memory only datastore if (this.inMemoryOnly) return - await Persistence.ensureDirectoryExistsAsync(path.dirname(this.filename)) // TODO: maybe ignore error - await storage.ensureDatafileIntegrityAsync(this.filename) // TODO: maybe ignore error + await Persistence.ensureDirectoryExistsAsync(path.dirname(this.filename)) + await storage.ensureDatafileIntegrityAsync(this.filename) let treatedData if (storage.readFileStream) { @@ -387,9 +337,9 @@ class Persistence { // Fill cached database (i.e. all indexes) with data try { - this.db.resetIndexes(treatedData.data) + this.db._resetIndexes(treatedData.data) } catch (e) { - this.db.resetIndexes() // Rollback any index which didn't fail + this.db._resetIndexes() // Rollback any index which didn't fail throw e } @@ -397,54 +347,30 @@ class Persistence { this.db.executor.processBuffer() } - /** - * Check if a directory stat and create it on the fly if it is not the case. - * @param {string} dir - * @param {NoParamCallback} [callback = () => {}] - */ - static ensureDirectoryExists (dir, callback = () => {}) { - storage.mkdir(dir, { recursive: true }, err => { callback(err) }) + async dropDatabaseAsync () { + this.stopAutocompaction() // stop autocompaction + this.db.executor.ready = false // prevent queuing new tasks + this.db.executor.resetBuffer() // remove pending buffered tasks + await this.db.executor.queue.guardian // wait for the ongoing tasks to end + // remove indexes (which means remove data from memory) + this.db.indexes = {} + // add back _id index, otherwise it will fail + this.db.indexes._id = new Index({ fieldName: '_id', unique: true }) + // reset TTL on indexes + this.db.ttlIndexes = {} + + // remove datastore file + await this.db.executor(() => storage.unlinkAsync(this.filename), true) } /** - * Async version of {@link Persistence.ensureDirectoryExists}. + * Check if a directory stat and create it on the fly if it is not the case. * @param {string} dir * @return {Promise} - * @see Persistence.ensureDirectoryExists */ static async ensureDirectoryExistsAsync (dir) { await storage.mkdirAsync(dir, { recursive: true }) } - - /** - * Return the path the datafile if the given filename is relative to the directory where Node Webkit stores - * data for this application. Probably the best place to store data - * @param {string} appName - * @param {string} relativeFilename - * @return {string} - * @deprecated - */ - static getNWAppFilename (appName, relativeFilename) { - return deprecate(() => { - let home - - if (process.platform === 'win32' || process.platform === 'win64') { - home = process.env.LOCALAPPDATA || process.env.APPDATA - if (!home) throw new Error('Couldn\'t find the base application data folder') - home = path.join(home, appName) - } else if (process.platform === 'darwin') { - home = process.env.HOME - if (!home) throw new Error('Couldn\'t find the base application data directory') - home = path.join(home, 'Library', 'Application Support', appName) - } else if (process.platform === 'linux') { - home = process.env.HOME - if (!home) throw new Error('Couldn\'t find the base application data directory') - home = path.join(home, '.config', appName) - } else throw new Error(`Can't use the Node Webkit relative path for platform ${process.platform}`) - - return path.join(home, 'nedb-data', relativeFilename) - }, 'The getNWAppFilename static method is deprecated and will be removed in the next version. To get the path to the directory where Node Webkit stores the data for your app, use the internal nw.gui module like this require(\'nw.gui\').App.dataPath See https://github.com/rogerwang/node-webkit/issues/500')() - } } // Interface diff --git a/lib/storage.js b/lib/storage.js index c561e50..3dab3dc 100755 --- a/lib/storage.js +++ b/lib/storage.js @@ -6,29 +6,15 @@ * @see module:storageBrowser * @see module:storageReactNative * @module storage + * @private */ const fs = require('fs') const fsPromises = fs.promises const path = require('path') -const { callbackify, promisify } = require('util') const { Readable } = require('stream') /** - * @callback module:storage~existsCallback - * @param {boolean} exists - */ - -/** - * Callback returns true if file exists. - * @param {string} file - * @param {module:storage~existsCallback} cb - * @alias module:storage.exists - */ -// eslint-disable-next-line node/no-callback-literal -const exists = (file, cb) => fs.access(file, fs.constants.F_OK, (err) => { cb(!err) }) - -/** - * Async version of {@link module:storage.exists}. + * Returns true if file exists. * @param {string} file * @return {Promise} * @async @@ -38,41 +24,18 @@ const exists = (file, cb) => fs.access(file, fs.constants.F_OK, (err) => { cb(!e const existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false) /** - * Node.js' [fs.rename]{@link https://nodejs.org/api/fs.html#fsrenameoldpath-newpath-callback}. - * @function - * @param {string} oldPath - * @param {string} newPath - * @param {NoParamCallback} c - * @return {void} - * @alias module:storage.rename - */ -const rename = fs.rename - -/** - * Async version of {@link module:storage.rename}. + * Node.js' [fsPromises.rename]{@link https://nodejs.org/api/fs.html#fspromisesrenameoldpath-newpath} * @function * @param {string} oldPath * @param {string} newPath * @return {Promise} * @alias module:storage.renameAsync * @async - * @see module:storage.rename */ const renameAsync = fsPromises.rename /** - * Node.js' [fs.writeFile]{@link https://nodejs.org/api/fs.html#fswritefilefile-data-options-callback}. - * @function - * @param {string} path - * @param {string} data - * @param {object} options - * @param {function} callback - * @alias module:storage.writeFile - */ -const writeFile = fs.writeFile - -/** - * Async version of {@link module:storage.writeFile}. + * Node.js' [fsPromises.writeFile]{@link https://nodejs.org/api/fs.html#fspromiseswritefilefile-data-options}. * @function * @param {string} path * @param {string} data @@ -80,7 +43,6 @@ const writeFile = fs.writeFile * @return {Promise} * @alias module:storage.writeFileAsync * @async - * @see module:storage.writeFile */ const writeFileAsync = fsPromises.writeFile @@ -95,38 +57,17 @@ const writeFileAsync = fsPromises.writeFile const writeFileStream = fs.createWriteStream /** - * Node.js' [fs.unlink]{@link https://nodejs.org/api/fs.html#fsunlinkpath-callback}. - * @function - * @param {string} path - * @param {function} callback - * @alias module:storage.unlink - */ -const unlink = fs.unlink - -/** - * Async version of {@link module:storage.unlink}. + * Node.js' [fsPromises.unlink]{@link https://nodejs.org/api/fs.html#fspromisesunlinkpath}. * @function * @param {string} path * @return {Promise} * @async * @alias module:storage.unlinkAsync - * @see module:storage.unlink */ const unlinkAsync = fsPromises.unlink /** - * Node.js' [fs.appendFile]{@link https://nodejs.org/api/fs.html#fsappendfilepath-data-options-callback}. - * @function - * @param {string} path - * @param {string} data - * @param {object} options - * @param {function} callback - * @alias module:storage.appendFile - */ -const appendFile = fs.appendFile - -/** - * Async version of {@link module:storage.appendFile}. + * Node.js' [fsPromises.appendFile]{@link https://nodejs.org/api/fs.html#fspromisesappendfilepath-data-options}. * @function * @param {string} path * @param {string} data @@ -134,29 +75,17 @@ const appendFile = fs.appendFile * @return {Promise} * @alias module:storage.appendFileAsync * @async - * @see module:storage.appendFile */ const appendFileAsync = fsPromises.appendFile /** - * Node.js' [fs.readFile]{@link https://nodejs.org/api/fs.html#fsreadfilepath-options-callback} - * @function - * @param {string} path - * @param {object} options - * @param {function} callback - * @alias module:storage.readFile - */ -const readFile = fs.readFile - -/** - * Async version of {@link module:storage.readFile}. + * Node.js' [fsPromises.readFile]{@link https://nodejs.org/api/fs.html#fspromisesreadfilepath-options}. * @function * @param {string} path * @param {object} [options] * @return {Promise} * @alias module:storage.readFileAsync * @async - * @see module:storage.readFile */ const readFileAsync = fsPromises.readFile @@ -171,66 +100,35 @@ const readFileAsync = fsPromises.readFile const readFileStream = fs.createReadStream /** - * Node.js' [fs.mkdir]{@link https://nodejs.org/api/fs.html#fsmkdirpath-options-callback}. - * @function - * @param {string} path - * @param {object} options - * @param {function} callback - * @alias module:storage.mkdir - */ -const mkdir = fs.mkdir - -/** - * Async version of {@link module:storage.mkdir}. + * Node.js' [fsPromises.mkdir]{@link https://nodejs.org/api/fs.html#fspromisesmkdirpath-options}. * @function * @param {string} path * @param {object} options * @return {Promise} * @alias module:storage.mkdirAsync * @async - * @see module:storage.mkdir */ const mkdirAsync = fsPromises.mkdir /** - * Async version of {@link module:storage.ensureFileDoesntExist} + * Removes file if it exists. * @param {string} file * @return {Promise} * @alias module:storage.ensureFileDoesntExistAsync * @async - * @see module:storage.ensureFileDoesntExist */ const ensureFileDoesntExistAsync = async file => { if (await existsAsync(file)) await unlinkAsync(file) } -/** - * Removes file if it exists. - * @param {string} file - * @param {NoParamCallback} callback - * @alias module:storage.ensureFileDoesntExist - */ -const ensureFileDoesntExist = (file, callback) => callbackify(ensureFileDoesntExistAsync)(file, err => callback(err)) - /** * Flush data in OS buffer to storage if corresponding option is set. * @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested * @param {string} [options.filename] * @param {boolean} [options.isDir = false] Optional, defaults to false - * @param {NoParamCallback} callback - * @alias module:storage.flushToStorage - */ -const flushToStorage = (options, callback) => callbackify(flushToStorageAsync)(options, callback) - -/** - * Async version of {@link module:storage.flushToStorage}. - * @param {object|string} options - * @param {string} [options.filename] - * @param {boolean} [options.isDir = false] * @return {Promise} * @alias module:storage.flushToStorageAsync * @async - * @see module:storage.flushToStorage */ const flushToStorageAsync = async (options) => { let filename @@ -247,17 +145,17 @@ const flushToStorageAsync = async (options) => { // except in the very rare event of the first time database is loaded and a crash happens if (flags === 'r' && (process.platform === 'win32' || process.platform === 'win64')) return - let fd, errorOnFsync, errorOnClose // TODO: sometimes it leaves some file descriptors open + let filehandle, errorOnFsync, errorOnClose try { - fd = await fsPromises.open(filename, flags) + filehandle = await fsPromises.open(filename, flags) try { - await fd.sync() + await filehandle.sync() } catch (errFS) { errorOnFsync = errFS } } finally { try { - await fd.close() + await filehandle.close() } catch (errC) { errorOnClose = errC } @@ -274,10 +172,11 @@ const flushToStorageAsync = async (options) => { * Fully write or rewrite the datafile. * @param {string} filename * @param {string[]} lines - * @param {NoParamCallback} [callback = () => {}] - * @alias module:storage.writeFileLines + * @return {Promise} + * @alias module:storage.writeFileLinesAsync + * @async */ -const writeFileLines = (filename, lines, callback = () => {}) => { +const writeFileLinesAsync = (filename, lines) => new Promise((resolve, reject) => { try { const stream = writeFileStream(filename) const readable = Readable.from(lines) @@ -285,46 +184,36 @@ const writeFileLines = (filename, lines, callback = () => {}) => { try { stream.write(line + '\n') } catch (err) { - callback(err) + reject(err) } }) readable.on('end', () => { - stream.close(callback) + stream.close(err => { + if (err) reject(err) + else resolve() + }) + }) + + readable.on('error', err => { + if (err) reject(err) + else resolve() + }) + + stream.on('error', err => { + if (err) reject(err) + else resolve() }) - readable.on('error', callback) - stream.on('error', callback) } catch (err) { - callback(err) + reject(err) } -} -/** - * Async version of {@link module:storage.writeFileLines}. - * @param {string} filename - * @param {string[]} lines - * @return {Promise} - * @alias module:storage.writeFileLinesAsync - * @async - * @see module:storage.writeFileLines - */ -const writeFileLinesAsync = (filename, lines) => promisify(writeFileLines)(filename, lines) +}) /** * Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost). * @param {string} filename * @param {string[]} lines - * @param {NoParamCallback} [callback] Optional callback, signature: err - * @alias module:storage.crashSafeWriteFileLines - */ -const crashSafeWriteFileLines = (filename, lines, callback = () => {}) => { - callbackify(crashSafeWriteFileLinesAsync)(filename, lines, callback) -} -/** - * Async version of {@link module:storage.crashSafeWriteFileLines}. - * @param {string} filename - * @param {string[]} lines * @return {Promise} * @alias module:storage.crashSafeWriteFileLinesAsync - * @see module:storage.crashSafeWriteFileLines */ const crashSafeWriteFileLinesAsync = async (filename, lines) => { const tempFilename = filename + '~' @@ -346,17 +235,8 @@ const crashSafeWriteFileLinesAsync = async (filename, lines) => { /** * Ensure the datafile contains all the data, even if there was a crash during a full file write. * @param {string} filename - * @param {NoParamCallback} callback signature: err - * @alias module:storage.ensureDatafileIntegrity - */ -const ensureDatafileIntegrity = (filename, callback) => callbackify(ensureDatafileIntegrityAsync)(filename, callback) - -/** - * Async version of {@link module:storage.ensureDatafileIntegrity}. - * @param {string} filename * @return {Promise} * @alias module:storage.ensureDatafileIntegrityAsync - * @see module:storage.ensureDatafileIntegrity */ const ensureDatafileIntegrityAsync = async filename => { const tempFilename = filename + '~' @@ -373,41 +253,28 @@ const ensureDatafileIntegrityAsync = async filename => { } // Interface -module.exports.exists = exists module.exports.existsAsync = existsAsync -module.exports.rename = rename module.exports.renameAsync = renameAsync -module.exports.writeFile = writeFile module.exports.writeFileAsync = writeFileAsync -module.exports.writeFileLines = writeFileLines module.exports.writeFileLinesAsync = writeFileLinesAsync -module.exports.crashSafeWriteFileLines = crashSafeWriteFileLines module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync -module.exports.appendFile = appendFile module.exports.appendFileAsync = appendFileAsync -module.exports.readFile = readFile module.exports.readFileAsync = readFileAsync -module.exports.unlink = unlink module.exports.unlinkAsync = unlinkAsync -module.exports.mkdir = mkdir module.exports.mkdirAsync = mkdirAsync -module.exports.readFileStream = writeFileStream module.exports.readFileStream = readFileStream -module.exports.flushToStorage = flushToStorage module.exports.flushToStorageAsync = flushToStorageAsync -module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync -module.exports.ensureFileDoesntExist = ensureFileDoesntExist module.exports.ensureFileDoesntExistAsync = ensureFileDoesntExistAsync diff --git a/lib/utils.js b/lib/utils.js index ac64bf6..951df8f 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -3,6 +3,13 @@ * This replaces the underscore dependency. * * @module utils + * @private + */ + +/** + * @callback IterateeFunction + * @param {*} arg + * @return {*} */ /** @@ -12,7 +19,7 @@ * * Heavily inspired by {@link https://underscorejs.org/#uniq}. * @param {Array} array - * @param {function} [iteratee] transformation applied to every element before checking for duplicates. This will not + * @param {IterateeFunction} [iteratee] transformation applied to every element before checking for duplicates. This will not * transform the items in the result. * @return {Array} * @alias module:utils.uniq diff --git a/lib/waterfall.js b/lib/waterfall.js index fab25b4..bf2895d 100644 --- a/lib/waterfall.js +++ b/lib/waterfall.js @@ -1,5 +1,6 @@ /** * Responsible for sequentially executing actions on the database + * @private */ class Waterfall { /** @@ -11,21 +12,9 @@ class Waterfall { * * It will change any time `this.waterfall` is called. * - * Use {@link Waterfall#guardian} instead which retrievethe latest version of the guardian. * @type {Promise} - * @private */ - this._guardian = Promise.resolve() - } - - /** - * Getter that gives a Promise which resolves when all tasks up to when this function is called are done. - * - * This Promise cannot reject. - * @return {Promise} - */ - get guardian () { - return this._guardian + this.guardian = Promise.resolve() } /** @@ -35,7 +24,7 @@ class Waterfall { */ waterfall (func) { return (...args) => { - this._guardian = this.guardian.then(() => { + this.guardian = this.guardian.then(() => { return func(...args) .then(result => ({ error: false, result }), result => ({ error: true, result })) }) diff --git a/test/browser/nedb-browser.spec.js b/test/browser/nedb-browser.spec.js index 25743d7..24917a6 100755 --- a/test/browser/nedb-browser.spec.js +++ b/test/browser/nedb-browser.spec.js @@ -1,5 +1,5 @@ /* eslint-env mocha */ -/* global chai, Nedb */ +/* global chai, Nedb, testUtils */ /** * Testing the browser version of NeDB @@ -265,7 +265,7 @@ describe('Indexing', function () { db.insert({ a: 6 }, function () { db.insert({ a: 7 }, function () { // eslint-disable-next-line node/handle-callback-err - db.getCandidates({ a: 6 }, function (err, candidates) { + testUtils.callbackify(query => db._getCandidatesAsync(query))({ a: 6 }, function (err, candidates) { assert.strictEqual(candidates.length, 3) assert.isDefined(candidates.find(function (doc) { return doc.a === 4 })) assert.isDefined(candidates.find(function (doc) { return doc.a === 6 })) @@ -274,7 +274,7 @@ describe('Indexing', function () { db.ensureIndex({ fieldName: 'a' }) // eslint-disable-next-line node/handle-callback-err - db.getCandidates({ a: 6 }, function (err, candidates) { + testUtils.callbackify(query => db._getCandidatesAsync(query))({ a: 6 }, function (err, candidates) { assert.strictEqual(candidates.length, 1) assert.isDefined(candidates.find(function (doc) { return doc.a === 6 })) diff --git a/test/byline.test.js b/test/byline.test.js index fa07524..29edb05 100644 --- a/test/byline.test.js +++ b/test/byline.test.js @@ -46,7 +46,7 @@ describe('byline', function () { it('should work with streams2 API', function (done) { let stream = fs.createReadStream(localPath('empty.txt')) - stream = byline.createStream(stream) + stream = byline(stream) stream.on('readable', function () { while (stream.read() !== null) { diff --git a/test/cursor.async.test.js b/test/cursor.async.test.js index f4c3b4c..9178255 100755 --- a/test/cursor.async.test.js +++ b/test/cursor.async.test.js @@ -120,9 +120,9 @@ describe('Cursor Async', function () { compareStrings: function (a, b) { return a.length - b.length } }) - db.insertAsync({ name: 'alpha' }) // TODO was not awaited - db.insertAsync({ name: 'charlie' }) // TODO was not awaited - db.insertAsync({ name: 'zulu' }) // TODO was not awaited + await db.insertAsync({ name: 'alpha' }) + await db.insertAsync({ name: 'charlie' }) + await db.insertAsync({ name: 'zulu' }) const docs = await db.findAsync({}).sort({ name: 1 }) assert.equal(docs.map(x => x.name)[0], 'zulu') @@ -130,7 +130,7 @@ describe('Cursor Async', function () { assert.equal(docs.map(x => x.name)[2], 'charlie') delete db.compareStrings - const docs2 = await db.find({}).sort({ name: 1 }) + const docs2 = await db.findAsync({}).sort({ name: 1 }) assert.equal(docs2.map(x => x.name)[0], 'alpha') assert.equal(docs2.map(x => x.name)[1], 'charlie') assert.equal(docs2.map(x => x.name)[2], 'zulu') diff --git a/test/cursor.test.js b/test/cursor.test.js index fbce0c4..64a837c 100755 --- a/test/cursor.test.js +++ b/test/cursor.test.js @@ -7,6 +7,7 @@ const { each, waterfall } = require('./utils.test.js') const Datastore = require('../lib/datastore') const Persistence = require('../lib/persistence') const Cursor = require('../lib/cursor') +const { callbackify } = require('util') const { assert } = chai chai.should() @@ -21,7 +22,7 @@ describe('Cursor', function () { waterfall([ function (cb) { - Persistence.ensureDirectoryExists(path.dirname(testDb), function () { + callbackify((dirname) => Persistence.ensureDirectoryExistsAsync(dirname))(path.dirname(testDb), function () { fs.access(testDb, fs.constants.F_OK, function (err) { if (!err) { fs.unlink(testDb, cb) diff --git a/test/db.async.test.js b/test/db.async.test.js index 0625fc9..6ae55e4 100644 --- a/test/db.async.test.js +++ b/test/db.async.test.js @@ -53,7 +53,7 @@ describe('Database async', function () { await fs.writeFile(autoDb, fileStr, 'utf8') const db = new Datastore({ filename: autoDb, autoload: true }) - const docs = await db.find({}) + const docs = await db.findAsync({}) assert.equal(docs.length, 2) }) @@ -300,7 +300,7 @@ describe('Database async', function () { await d.insertAsync({ tf: 6 }) const _doc2 = await d.insertAsync({ tf: 4, an: 'other' }) await d.insertAsync({ tf: 9 }) - const data = await d.getCandidatesAsync({ r: 6, tf: 4 }) + const data = await d._getCandidatesAsync({ r: 6, tf: 4 }) const doc1 = data.find(d => d._id === _doc1._id) const doc2 = data.find(d => d._id === _doc2._id) @@ -310,12 +310,12 @@ describe('Database async', function () { }) it('Can use an index to get docs with a $in match', async () => { - await d.ensureIndex({ fieldName: 'tf' }) + await d.ensureIndexAsync({ fieldName: 'tf' }) await d.insertAsync({ tf: 4 }) const _doc1 = await d.insertAsync({ tf: 6 }) await d.insertAsync({ tf: 4, an: 'other' }) const _doc2 = await d.insertAsync({ tf: 9 }) - const data = await d.getCandidatesAsync({ r: 6, tf: { $in: [6, 9, 5] } }) + const data = await d._getCandidatesAsync({ r: 6, tf: { $in: [6, 9, 5] } }) const doc1 = data.find(d => d._id === _doc1._id) const doc2 = data.find(d => d._id === _doc2._id) @@ -330,7 +330,7 @@ describe('Database async', function () { const _doc2 = await d.insertAsync({ tf: 6 }) const _doc3 = await d.insertAsync({ tf: 4, an: 'other' }) const _doc4 = await d.insertAsync({ tf: 9 }) - const data = await d.getCandidatesAsync({ r: 6, notf: { $in: [6, 9, 5] } }) + const data = await d._getCandidatesAsync({ r: 6, notf: { $in: [6, 9, 5] } }) const doc1 = data.find(d => d._id === _doc1._id) const doc2 = data.find(d => d._id === _doc2._id) const doc3 = data.find(d => d._id === _doc3._id) @@ -349,7 +349,7 @@ describe('Database async', function () { const _doc2 = await d.insertAsync({ tf: 6 }) await d.insertAsync({ tf: 4, an: 'other' }) const _doc4 = await d.insertAsync({ tf: 9 }) - const data = await d.getCandidatesAsync({ r: 6, tf: { $lte: 9, $gte: 6 } }) + const data = await d._getCandidatesAsync({ r: 6, tf: { $lte: 9, $gte: 6 } }) const doc2 = data.find(d => d._id === _doc2._id) const doc4 = data.find(d => d._id === _doc4._id) @@ -448,12 +448,12 @@ describe('Database async', function () { await d.insertAsync({ somedata: 'again', plus: 'additional data' }) await d.insertAsync({ somedata: 'again' }) // Test with query that will return docs - const doc = await d.findOne({ somedata: 'ok' }) + const doc = await d.findOneAsync({ somedata: 'ok' }) assert.equal(Object.keys(doc).length, 2) assert.equal(doc.somedata, 'ok') assert.notEqual(doc._id, undefined) // Test with query that doesn't match anything - const doc2 = await d.findOne({ somedata: 'nope' }) + const doc2 = await d.findOneAsync({ somedata: 'nope' }) assert.equal(doc2, null) }) @@ -462,28 +462,28 @@ describe('Database async', function () { const date2 = new Date(9999) await d.insertAsync({ now: date1, sth: { name: 'nedb' } }) - const doc1 = await d.findOne({ now: date1 }) + const doc1 = await d.findOneAsync({ now: date1 }) assert.equal(doc1.sth.name, 'nedb') - const doc2 = await d.findOne({ now: date2 }) + const doc2 = await d.findOneAsync({ now: date2 }) assert.equal(doc2, null) - const doc3 = await d.findOne({ sth: { name: 'nedb' } }) + const doc3 = await d.findOneAsync({ sth: { name: 'nedb' } }) assert.equal(doc3.sth.name, 'nedb') - const doc4 = await d.findOne({ sth: { name: 'other' } }) + const doc4 = await d.findOneAsync({ sth: { name: 'other' } }) assert.equal(doc4, null) }) it('Can use dot-notation to query subfields', async () => { await d.insertAsync({ greeting: { english: 'hello' } }) - const doc1 = await d.findOne({ 'greeting.english': 'hello' }) + const doc1 = await d.findOneAsync({ 'greeting.english': 'hello' }) assert.equal(doc1.greeting.english, 'hello') - const doc2 = await d.findOne({ 'greeting.english': 'hellooo' }) + const doc2 = await d.findOneAsync({ 'greeting.english': 'hellooo' }) assert.equal(doc2, null) - const doc3 = await d.findOne({ 'greeting.englis': 'hello' }) + const doc3 = await d.findOneAsync({ 'greeting.englis': 'hello' }) assert.equal(doc3, null) }) @@ -551,11 +551,11 @@ describe('Database async', function () { assert.equal(doc2.hello, 'home') // And a skip - const doc3 = await d.findOne({ a: { $gt: 14 } }).sort({ a: 1 }).skip(1) + const doc3 = await d.findOneAsync({ a: { $gt: 14 } }).sort({ a: 1 }).skip(1) assert.equal(doc3.hello, 'earth') // No result - const doc4 = await d.findOne({ a: { $gt: 14 } }).sort({ a: 1 }).skip(2) + const doc4 = await d.findOneAsync({ a: { $gt: 14 } }).sort({ a: 1 }).skip(2) assert.equal(doc4, null) }) @@ -795,7 +795,7 @@ describe('Database async', function () { }) it('If the update query contains modifiers, it is applied to the object resulting from removing all operators from the find query 1', async () => { - await d.update({ $or: [{ a: 4 }, { a: 5 }] }, { + await d.updateAsync({ $or: [{ a: 4 }, { a: 5 }] }, { $set: { hello: 'world' }, $inc: { bloup: 3 } // eslint-disable-next-line node/handle-callback-err @@ -809,7 +809,7 @@ describe('Database async', function () { }) it('If the update query contains modifiers, it is applied to the object resulting from removing all operators from the find query 2', async () => { - await d.update({ $or: [{ a: 4 }, { a: 5 }], cac: 'rrr' }, { + await d.updateAsync({ $or: [{ a: 4 }, { a: 5 }], cac: 'rrr' }, { $set: { hello: 'world' }, $inc: { bloup: 3 } // eslint-disable-next-line node/handle-callback-err @@ -845,7 +845,7 @@ describe('Database async', function () { }, { multi: false }) assert.equal(numAffected, 1) - const doc = await d.findOne({ _id: id }) + const doc = await d.findOneAsync({ _id: id }) assert.equal(Object.keys(doc).length, 3) assert.equal(doc._id, id) assert.equal(doc.something, 'changed') @@ -874,13 +874,13 @@ describe('Database async', function () { await d.insertAsync({ bloup: { blip: 'blap', other: true } }) // Correct methos await d.updateAsync({}, { $set: { 'bloup.blip': 'hello' } }, {}) - const doc = await d.findOne({}) + const doc = await d.findOneAsync({}) assert.equal(doc.bloup.blip, 'hello') assert.equal(doc.bloup.other, true) // Wrong await d.updateAsync({}, { $set: { bloup: { blip: 'ola' } } }, {}) - const doc2 = await d.findOne({}) + const doc2 = await d.findOneAsync({}) assert.equal(doc2.bloup.blip, 'ola') assert.equal(doc2.bloup.other, undefined) // This information was lost }) @@ -977,7 +977,7 @@ describe('Database async', function () { }) it('If a multi update fails on one document, previous updates should be rolled back', async () => { - d.ensureIndexAsync({ fieldName: 'a' }) // TODO should be awaited, but was not in original tests + await d.ensureIndexAsync({ fieldName: 'a' }) const doc1 = await d.insertAsync({ a: 4 }) const doc2 = await d.insertAsync({ a: 5 }) const doc3 = await d.insertAsync({ a: 'abc' }) @@ -1002,7 +1002,7 @@ describe('Database async', function () { }) it('If an index constraint is violated by an update, all changes should be rolled back', async () => { - d.ensureIndex({ fieldName: 'a', unique: true }) // TODO should be awaited, but was not in original tests + await d.ensureIndexAsync({ fieldName: 'a', unique: true }) const doc1 = await d.insertAsync({ a: 4 }) const doc2 = await d.insertAsync({ a: 5 }) // With this query, candidates are always returned in the order 4, 5, 'abc' so it's always the last one which fails @@ -1067,20 +1067,20 @@ describe('Database async', function () { it('createdAt property is unchanged and updatedAt correct after an update, even a complete document replacement', async () => { const d2 = new Datastore({ inMemoryOnly: true, timestampData: true }) - d2.insertAsync({ a: 1 }) // TODO probably should await but was not awaited in original tests + await d2.insertAsync({ a: 1 }) const doc = await d2.findOneAsync({ a: 1 }) const createdAt = doc.createdAt.getTime() // Modifying update await wait(20) - d2.updateAsync({ a: 1 }, { $set: { b: 2 } }, {}) // TODO probably should await but was not awaited in original tests + await d2.updateAsync({ a: 1 }, { $set: { b: 2 } }, {}) const doc2 = await d2.findOneAsync({ a: 1 }) assert.equal(doc2.createdAt.getTime(), createdAt) assert.ok(Date.now() - doc2.updatedAt.getTime() < 5) // Complete replacement await wait(20) - d2.update({ a: 1 }, { c: 3 }, {}) // TODO probably should await but was not awaited in original tests + await d2.updateAsync({ a: 1 }, { c: 3 }, {}) const doc3 = await d2.findOneAsync({ c: 3 }) assert.equal(doc3.createdAt.getTime(), createdAt) assert.ok(Date.now() - doc3.updatedAt.getTime() < 5) @@ -1089,8 +1089,8 @@ describe('Database async', function () { describe('Callback signature', function () { it('Regular update, multi false', async () => { - d.insertAsync({ a: 1 }) // TODO probably should await but was not awaited in original tests - d.insertAsync({ a: 2 }) // TODO probably should await but was not awaited in original tests + await d.insertAsync({ a: 1 }) + await d.insertAsync({ a: 2 }) // returnUpdatedDocs set to false const { numAffected, affectedDocuments, upsert } = await d.updateAsync({ a: 1 }, { $set: { b: 20 } }, {}) @@ -1111,8 +1111,8 @@ describe('Database async', function () { }) it('Regular update, multi true', async () => { - d.insertAsync({ a: 1 }) // TODO probably should await but was not awaited in original tests - d.insertAsync({ a: 2 }) // TODO probably should await but was not awaited in original tests + await d.insertAsync({ a: 1 }) + await d.insertAsync({ a: 2 }) // returnUpdatedDocs set to false const { @@ -1139,8 +1139,8 @@ describe('Database async', function () { }) it('Upsert', async () => { - d.insertAsync({ a: 1 }) // TODO probably should await but was not awaited in original tests - d.insertAsync({ a: 2 }) // TODO probably should await but was not awaited in original tests + await d.insertAsync({ a: 1 }) + await d.insertAsync({ a: 2 }) // Upsert flag not set const { numAffected, affectedDocuments, upsert } = await d.updateAsync({ a: 3 }, { $set: { b: 20 } }, {}) @@ -1280,7 +1280,7 @@ describe('Database async', function () { assert.deepEqual(Object.keys(d.indexes), ['_id']) - d.ensureIndexAsync({ fieldName: 'z' }) // TODO: was not async + await d.ensureIndexAsync({ fieldName: 'z' }) assert.equal(d.indexes.z.fieldName, 'z') assert.equal(d.indexes.z.unique, false) assert.equal(d.indexes.z.sparse, false) @@ -1335,7 +1335,7 @@ describe('Database async', function () { await d.updateAsync({ z: '1' }, { $set: { yes: 'yep' } }, {}) assert.deepEqual(Object.keys(d.indexes), ['_id']) - d.ensureIndexAsync({ fieldName: 'z' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'z' }) assert.equal(d.indexes.z.fieldName, 'z') assert.equal(d.indexes.z.unique, false) assert.equal(d.indexes.z.sparse, false) @@ -1367,7 +1367,7 @@ describe('Database async', function () { assert.equal(d.getAllData().length, 0) - d.ensureIndexAsync({ fieldName: 'z' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'z' }) assert.equal(d.indexes.z.fieldName, 'z') assert.equal(d.indexes.z.unique, false) assert.equal(d.indexes.z.sparse, false) @@ -1426,17 +1426,17 @@ describe('Database async', function () { assert.equal(d.getAllData().length, 0) - d.ensureIndexAsync({ fieldName: 'z', unique: true }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'z', unique: true }) assert.equal(d.indexes.z.tree.getNumberOfKeys(), 0) await fs.writeFile(testDb, rawData, 'utf8') await assert.rejects(() => d.loadDatabaseAsync(), err => { assert.equal(err.errorType, 'uniqueViolated') assert.equal(err.key, '1') - assert.equal(d.getAllData().length, 0) // TODO wtf ? - assert.equal(d.indexes.z.tree.getNumberOfKeys(), 0) // TODO wtf ? return true }) + assert.equal(d.getAllData().length, 0) + assert.equal(d.indexes.z.tree.getNumberOfKeys(), 0) }) it('If a unique constraint is not respected, ensureIndex will return an error and not create an index', async () => { @@ -1465,7 +1465,7 @@ describe('Database async', function () { describe('Indexing newly inserted documents', function () { it('Newly inserted documents are indexed', async () => { - d.ensureIndexAsync({ fieldName: 'z' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'z' }) assert.equal(d.indexes.z.tree.getNumberOfKeys(), 0) const newDoc = await d.insertAsync({ a: 2, z: 'yes' }) @@ -1478,8 +1478,8 @@ describe('Database async', function () { }) it('If multiple indexes are defined, the document is inserted in all of them', async () => { - d.ensureIndexAsync({ fieldName: 'z' }) // TODO was not awaited - d.ensureIndexAsync({ fieldName: 'ya' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'z' }) + await d.ensureIndexAsync({ fieldName: 'ya' }) assert.equal(d.indexes.z.tree.getNumberOfKeys(), 0) const newDoc = await d.insertAsync({ a: 2, z: 'yes', ya: 'indeed' }) @@ -1496,7 +1496,7 @@ describe('Database async', function () { }) it('Can insert two docs at the same key for a non unique index', async () => { - d.ensureIndexAsync({ fieldName: 'z' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'z' }) assert.equal(d.indexes.z.tree.getNumberOfKeys(), 0) const newDoc = await d.insertAsync({ a: 2, z: 'yes' }) @@ -1509,7 +1509,7 @@ describe('Database async', function () { }) it('If the index has a unique constraint, an error is thrown if it is violated and the data is not modified', async () => { - d.ensureIndexAsync({ fieldName: 'z', unique: true }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'z', unique: true }) assert.equal(d.indexes.z.tree.getNumberOfKeys(), 0) const newDoc = await d.insertAsync({ a: 2, z: 'yes' }) @@ -1533,9 +1533,9 @@ describe('Database async', function () { }) it('If an index has a unique constraint, other indexes cannot be modified when it raises an error', async () => { - d.ensureIndexAsync({ fieldName: 'nonu1' }) // TODO was not awaited - d.ensureIndexAsync({ fieldName: 'uni', unique: true }) // TODO was not awaited - d.ensureIndexAsync({ fieldName: 'nonu2' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'nonu1' }) + await d.ensureIndexAsync({ fieldName: 'uni', unique: true }) + await d.ensureIndexAsync({ fieldName: 'nonu2' }) const newDoc = await d.insertAsync({ nonu1: 'yes', nonu2: 'yes2', uni: 'willfail' }) assert.equal(d.indexes.nonu1.tree.getNumberOfKeys(), 1) @@ -1558,7 +1558,7 @@ describe('Database async', function () { }) it('Unique indexes prevent you from inserting two docs where the field is undefined except if theyre sparse', async () => { - d.ensureIndexAsync({ fieldName: 'zzz', unique: true }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'zzz', unique: true }) assert.equal(d.indexes.zzz.tree.getNumberOfKeys(), 0) const newDoc = await d.insertAsync({ a: 2, z: 'yes' }) @@ -1571,7 +1571,7 @@ describe('Database async', function () { return true }) - d.ensureIndexAsync({ fieldName: 'yyy', unique: true, sparse: true }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'yyy', unique: true, sparse: true }) await d.insertAsync({ a: 5, z: 'other', zzz: 'set' }) assert.equal(d.indexes.yyy.getAll().length, 0) // Nothing indexed @@ -1579,8 +1579,8 @@ describe('Database async', function () { }) it('Insertion still works as before with indexing', async () => { - d.ensureIndexAsync({ fieldName: 'a' }) // TODO was not awaited - d.ensureIndexAsync({ fieldName: 'b' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'a' }) + await d.ensureIndexAsync({ fieldName: 'b' }) const doc1 = await d.insertAsync({ a: 1, b: 'hello' }) const doc2 = await d.insertAsync({ a: 2, b: 'si' }) @@ -1590,7 +1590,7 @@ describe('Database async', function () { }) it('All indexes point to the same data as the main index on _id', async () => { - d.ensureIndexAsync({ fieldName: 'a' }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'a' }) const doc1 = await d.insertAsync({ a: 1, b: 'hello' }) const doc2 = await d.insertAsync({ a: 2, b: 'si' }) @@ -1608,7 +1608,7 @@ describe('Database async', function () { }) it('If a unique constraint is violated, no index is changed, including the main one', async () => { - d.ensureIndexAsync({ fieldName: 'a', unique: true }) // TODO was not awaited + await d.ensureIndexAsync({ fieldName: 'a', unique: true }) const doc1 = await d.insertAsync({ a: 1, b: 'hello' }) await assert.rejects(() => d.insertAsync({ a: 1, b: 'si' })) @@ -1627,7 +1627,7 @@ describe('Database async', function () { describe('Updating indexes upon document update', function () { it('Updating docs still works as before with indexing', async () => { - d.ensureIndexAsync({ fieldName: 'a' }) // TODO: not awaited + await d.ensureIndexAsync({ fieldName: 'a' }) const _doc1 = await d.insertAsync({ a: 1, b: 'hello' }) const _doc2 = await d.insertAsync({ a: 2, b: 'si' }) @@ -1658,8 +1658,8 @@ describe('Database async', function () { }) it('Indexes get updated when a document (or multiple documents) is updated', async () => { - d.ensureIndexAsync({ fieldName: 'a' }) // TODO: not awaited - d.ensureIndexAsync({ fieldName: 'b' }) // TODO: not awaited + await d.ensureIndexAsync({ fieldName: 'a' }) + await d.ensureIndexAsync({ fieldName: 'b' }) const doc1 = await d.insertAsync({ a: 1, b: 'hello' }) const doc2 = await d.insertAsync({ a: 2, b: 'si' }) @@ -1711,9 +1711,9 @@ describe('Database async', function () { }) it('If a simple update violates a contraint, all changes are rolled back and an error is thrown', async () => { - d.ensureIndexAsync({ fieldName: 'a', unique: true }) // TODO: not awaited - d.ensureIndexAsync({ fieldName: 'b', unique: true }) // TODO: not awaited - d.ensureIndexAsync({ fieldName: 'c', unique: true }) // TODO: not awaited + await d.ensureIndexAsync({ fieldName: 'a', unique: true }) + await d.ensureIndexAsync({ fieldName: 'b', unique: true }) + await d.ensureIndexAsync({ fieldName: 'c', unique: true }) const _doc1 = await d.insertAsync({ a: 1, b: 10, c: 100 }) const _doc2 = await d.insertAsync({ a: 2, b: 20, c: 200 }) @@ -1752,9 +1752,9 @@ describe('Database async', function () { }) it('If a multi update violates a contraint, all changes are rolled back and an error is thrown', async () => { - d.ensureIndexAsync({ fieldName: 'a', unique: true }) // TODO: was not awaited - d.ensureIndexAsync({ fieldName: 'b', unique: true }) // TODO: was not awaited - d.ensureIndexAsync({ fieldName: 'c', unique: true }) // TODO: was not awaited + await d.ensureIndexAsync({ fieldName: 'a', unique: true }) + await d.ensureIndexAsync({ fieldName: 'b', unique: true }) + await d.ensureIndexAsync({ fieldName: 'c', unique: true }) const _doc1 = await d.insertAsync({ a: 1, b: 10, c: 100 }) const _doc2 = await d.insertAsync({ a: 2, b: 20, c: 200 }) @@ -1798,7 +1798,7 @@ describe('Database async', function () { describe('Updating indexes upon document remove', function () { it('Removing docs still works as before with indexing', async () => { - d.ensureIndexAsync({ fieldName: 'a' }) // TODO: was not awaited + await d.ensureIndexAsync({ fieldName: 'a' }) await d.insertAsync({ a: 1, b: 'hello' }) const _doc2 = await d.insertAsync({ a: 2, b: 'si' }) @@ -1822,8 +1822,8 @@ describe('Database async', function () { }) it('Indexes get updated when a document (or multiple documents) is removed', async () => { - d.ensureIndexAsync({ fieldName: 'a' }) // TODO: was not awaited - d.ensureIndexAsync({ fieldName: 'b' }) // TODO: was not awaited + await d.ensureIndexAsync({ fieldName: 'a' }) + await d.ensureIndexAsync({ fieldName: 'b' }) await d.insertAsync({ a: 1, b: 'hello' }) const doc2 = await d.insertAsync({ a: 2, b: 'si' }) @@ -2027,9 +2027,9 @@ describe('Database async', function () { }) // ==== End of 'Persisting indexes' ==== it('Results of getMatching should never contain duplicates', async () => { - d.ensureIndexAsync({ fieldName: 'bad' }) // TODO: was not awaited + await d.ensureIndexAsync({ fieldName: 'bad' }) await d.insertAsync({ bad: ['a', 'b'] }) - const res = await d.getCandidatesAsync({ bad: { $in: ['a', 'b'] } }) + const res = await d._getCandidatesAsync({ bad: { $in: ['a', 'b'] } }) assert.equal(res.length, 1) }) }) // ==== End of 'Using indexes' ==== // diff --git a/test/db.test.js b/test/db.test.js index fbac065..675f5d3 100755 --- a/test/db.test.js +++ b/test/db.test.js @@ -7,6 +7,7 @@ const { apply, each, waterfall } = require('./utils.test.js') const model = require('../lib/model') const Datastore = require('../lib/datastore') const Persistence = require('../lib/persistence') +const { callbackify } = require('util') const reloadTimeUpperBound = 60 // In ms, an upper bound for the reload time used to check createdAt and updatedAt const { assert } = chai @@ -22,7 +23,7 @@ describe('Database', function () { waterfall([ function (cb) { - Persistence.ensureDirectoryExists(path.dirname(testDb), function () { + callbackify((dirname) => Persistence.ensureDirectoryExistsAsync(dirname))(path.dirname(testDb), function () { fs.access(testDb, fs.constants.FS_OK, function (err) { if (!err) { fs.unlink(testDb, cb) @@ -472,7 +473,7 @@ describe('Database', function () { d.insert({ tf: 4, an: 'other' }, function (err, _doc2) { d.insert({ tf: 9 }, function () { // eslint-disable-next-line node/handle-callback-err - d.getCandidates({ r: 6, tf: 4 }, function (err, data) { + callbackify(query => d._getCandidatesAsync(query))({ r: 6, tf: 4 }, function (err, data) { const doc1 = data.find(function (d) { return d._id === _doc1._id }) const doc2 = data.find(function (d) { return d._id === _doc2._id }) @@ -501,7 +502,7 @@ describe('Database', function () { // eslint-disable-next-line node/handle-callback-err d.insert({ tf: 9 }, function (err, _doc2) { // eslint-disable-next-line node/handle-callback-err - d.getCandidates({ r: 6, tf: { $in: [6, 9, 5] } }, function (err, data) { + callbackify(query => d._getCandidatesAsync(query))({ r: 6, tf: { $in: [6, 9, 5] } }, function (err, data) { const doc1 = data.find(function (d) { return d._id === _doc1._id }) const doc2 = data.find(function (d) { return d._id === _doc2._id }) @@ -530,7 +531,7 @@ describe('Database', function () { // eslint-disable-next-line node/handle-callback-err d.insert({ tf: 9 }, function (err, _doc4) { // eslint-disable-next-line node/handle-callback-err - d.getCandidates({ r: 6, notf: { $in: [6, 9, 5] } }, function (err, data) { + callbackify(query => d._getCandidatesAsync(query))({ r: 6, notf: { $in: [6, 9, 5] } }, function (err, data) { const doc1 = data.find(function (d) { return d._id === _doc1._id }) const doc2 = data.find(function (d) { return d._id === _doc2._id }) const doc3 = data.find(function (d) { return d._id === _doc3._id }) @@ -563,7 +564,7 @@ describe('Database', function () { // eslint-disable-next-line node/handle-callback-err d.insert({ tf: 9 }, function (err, _doc4) { // eslint-disable-next-line node/handle-callback-err - d.getCandidates({ r: 6, tf: { $lte: 9, $gte: 6 } }, function (err, data) { + callbackify(query => d._getCandidatesAsync(query))({ r: 6, tf: { $lte: 9, $gte: 6 } }, function (err, data) { const doc2 = data.find(function (d) { return d._id === _doc2._id }) const doc4 = data.find(function (d) { return d._id === _doc4._id }) @@ -2178,26 +2179,27 @@ describe('Database', function () { d.getAllData().length.should.equal(0) - d.ensureIndex({ fieldName: 'z' }) - d.indexes.z.fieldName.should.equal('z') - d.indexes.z.unique.should.equal(false) - d.indexes.z.sparse.should.equal(false) - d.indexes.z.tree.getNumberOfKeys().should.equal(0) + d.ensureIndex({ fieldName: 'z' }, function () { + d.indexes.z.fieldName.should.equal('z') + d.indexes.z.unique.should.equal(false) + d.indexes.z.sparse.should.equal(false) + d.indexes.z.tree.getNumberOfKeys().should.equal(0) - fs.writeFile(testDb, rawData, 'utf8', function () { - d.loadDatabase(function () { - const doc1 = d.getAllData().find(function (doc) { return doc.z === '1' }) - const doc2 = d.getAllData().find(function (doc) { return doc.z === '2' }) - const doc3 = d.getAllData().find(function (doc) { return doc.z === '3' }) + fs.writeFile(testDb, rawData, 'utf8', function () { + d.loadDatabase(function () { + const doc1 = d.getAllData().find(function (doc) { return doc.z === '1' }) + const doc2 = d.getAllData().find(function (doc) { return doc.z === '2' }) + const doc3 = d.getAllData().find(function (doc) { return doc.z === '3' }) - d.getAllData().length.should.equal(3) + d.getAllData().length.should.equal(3) - d.indexes.z.tree.getNumberOfKeys().should.equal(3) - d.indexes.z.tree.search('1')[0].should.equal(doc1) - d.indexes.z.tree.search('2')[0].should.equal(doc2) - d.indexes.z.tree.search('3')[0].should.equal(doc3) + d.indexes.z.tree.getNumberOfKeys().should.equal(3) + d.indexes.z.tree.search('1')[0].should.equal(doc1) + d.indexes.z.tree.search('2')[0].should.equal(doc2) + d.indexes.z.tree.search('3')[0].should.equal(doc3) - done() + done() + }) }) }) }) @@ -2248,18 +2250,19 @@ describe('Database', function () { d.getAllData().length.should.equal(0) - d.ensureIndex({ fieldName: 'z', unique: true }) - d.indexes.z.tree.getNumberOfKeys().should.equal(0) + d.ensureIndex({ fieldName: 'z', unique: true }, function () { + d.indexes.z.tree.getNumberOfKeys().should.equal(0) - fs.writeFile(testDb, rawData, 'utf8', function () { - d.loadDatabase(function (err) { - err.should.not.equal(null) - err.errorType.should.equal('uniqueViolated') - err.key.should.equal('1') - d.getAllData().length.should.equal(0) - d.indexes.z.tree.getNumberOfKeys().should.equal(0) + fs.writeFile(testDb, rawData, 'utf8', function () { + d.loadDatabase(function (err) { + assert.isNotNull(err) + err.errorType.should.equal('uniqueViolated') + err.key.should.equal('1') + d.getAllData().length.should.equal(0) + d.indexes.z.tree.getNumberOfKeys().should.equal(0) - done() + done() + }) }) }) }) @@ -3022,7 +3025,7 @@ describe('Database', function () { d.ensureIndex({ fieldName: 'bad' }) d.insert({ bad: ['a', 'b'] }, function () { // eslint-disable-next-line node/handle-callback-err - d.getCandidates({ bad: { $in: ['a', 'b'] } }, function (err, res) { + callbackify(query => d._getCandidatesAsync(query))({ bad: { $in: ['a', 'b'] } }, function (err, res) { res.length.should.equal(1) done() }) diff --git a/test/executor.test.js b/test/executor.test.js index a9d2de9..0b537b8 100755 --- a/test/executor.test.js +++ b/test/executor.test.js @@ -6,6 +6,7 @@ const path = require('path') const { waterfall } = require('./utils.test.js') const Datastore = require('../lib/datastore') const Persistence = require('../lib/persistence') +const { callbackify } = require('util') const { assert } = chai chai.should() @@ -153,7 +154,7 @@ describe('Executor', function () { waterfall([ function (cb) { - Persistence.ensureDirectoryExists(path.dirname(testDb), function () { + callbackify((dirname) => Persistence.ensureDirectoryExistsAsync(dirname))(path.dirname(testDb), function () { fs.access(testDb, fs.constants.F_OK, function (err) { if (!err) { fs.unlink(testDb, cb) diff --git a/test/persistence.async.test.js b/test/persistence.async.test.js index 3ff9e0e..32e6399 100755 --- a/test/persistence.async.test.js +++ b/test/persistence.async.test.js @@ -334,6 +334,29 @@ describe('Persistence async', function () { assert.equal(doc2Reloaded, undefined) }) + it('Calling dropDatabase after the datafile was modified loads the new data', async () => { + await d.loadDatabaseAsync() + await d.insertAsync({ a: 1 }) + await d.insertAsync({ a: 2 }) + const data = d.getAllData() + const doc1 = data.find(doc => doc.a === 1) + const doc2 = data.find(doc => doc.a === 2) + assert.equal(data.length, 2) + assert.equal(doc1.a, 1) + assert.equal(doc2.a, 2) + + await fs.writeFile(testDb, '{"a":3,"_id":"aaa"}', 'utf8') + await d.loadDatabaseAsync() + const dataReloaded = d.getAllData() + const doc1Reloaded = dataReloaded.find(function (doc) { return doc.a === 1 }) + const doc2Reloaded = dataReloaded.find(function (doc) { return doc.a === 2 }) + const doc3Reloaded = dataReloaded.find(function (doc) { return doc.a === 3 }) + assert.equal(dataReloaded.length, 1) + assert.equal(doc3Reloaded.a, 3) + assert.equal(doc1Reloaded, undefined) + assert.equal(doc2Reloaded, undefined) + }) + it('When treating raw data, refuse to proceed if too much data is corrupt, to avoid data loss', async () => { const corruptTestFilename = 'workspace/corruptTest.db' const fakeData = '{"_id":"one","hello":"world"}\n' + 'Some corrupt data\n' + '{"_id":"two","hello":"earth"}\n' + '{"_id":"three","hello":"you"}\n' @@ -707,7 +730,7 @@ describe('Persistence async', function () { it('persistCachedDatabase should update the contents of the datafile and leave a clean state even if there is a temp datafile', async () => { await d.insertAsync({ hello: 'world' }) - const docs = await d.find({}) + const docs = await d.findAsync({}) assert.equal(docs.length, 1) if (await exists(testDb)) { await fs.unlink(testDb) } @@ -749,7 +772,7 @@ describe('Persistence async', function () { const theDb = new Datastore({ filename: dbFile }) await theDb.loadDatabaseAsync() - const docs = await theDb.find({}) + const docs = await theDb.findAsync({}) assert.equal(docs.length, 0) const doc1 = await theDb.insertAsync({ a: 'hello' }) diff --git a/test/persistence.test.js b/test/persistence.test.js index a19ae04..aa965b4 100755 --- a/test/persistence.test.js +++ b/test/persistence.test.js @@ -9,6 +9,7 @@ const Datastore = require('../lib/datastore') const Persistence = require('../lib/persistence') const storage = require('../lib/storage') const { execFile, fork } = require('child_process') +const { callbackify } = require('util') const Readable = require('stream').Readable const { assert } = chai @@ -24,7 +25,7 @@ describe('Persistence', function () { waterfall([ function (cb) { - Persistence.ensureDirectoryExists(path.dirname(testDb), function () { + callbackify((dirname) => Persistence.ensureDirectoryExistsAsync(dirname))(path.dirname(testDb), function () { fs.access(testDb, fs.constants.FS_OK, function (err) { if (!err) { fs.unlink(testDb, cb) @@ -66,7 +67,7 @@ describe('Persistence', function () { stream.push(rawData) stream.push(null) - d.persistence.treatRawStream(stream, function (err, result) { + callbackify(rawStream => d.persistence.treatRawStreamAsync(rawStream))(stream, function (err, result) { assert.isNull(err) const treatedData = result.data treatedData.sort(function (a, b) { return a._id - b._id }) @@ -101,7 +102,7 @@ describe('Persistence', function () { stream.push(rawData) stream.push(null) - d.persistence.treatRawStream(stream, function (err, result) { + callbackify(rawStream => d.persistence.treatRawStreamAsync(rawStream))(stream, function (err, result) { assert.isNull(err) const treatedData = result.data treatedData.sort(function (a, b) { return a._id - b._id }) @@ -135,7 +136,7 @@ describe('Persistence', function () { stream.push(rawData) stream.push(null) - d.persistence.treatRawStream(stream, function (err, result) { + callbackify(rawStream => d.persistence.treatRawStreamAsync(rawStream))(stream, function (err, result) { assert.isNull(err) const treatedData = result.data treatedData.sort(function (a, b) { return a._id - b._id }) @@ -169,7 +170,7 @@ describe('Persistence', function () { stream.push(rawData) stream.push(null) - d.persistence.treatRawStream(stream, function (err, result) { + callbackify(rawStream => d.persistence.treatRawStreamAsync(rawStream))(stream, function (err, result) { assert.isNull(err) const treatedData = result.data treatedData.sort(function (a, b) { return a._id - b._id }) @@ -205,7 +206,7 @@ describe('Persistence', function () { stream.push(rawData) stream.push(null) - d.persistence.treatRawStream(stream, function (err, result) { + callbackify(rawStream => d.persistence.treatRawStreamAsync(rawStream))(stream, function (err, result) { assert.isNull(err) const treatedData = result.data treatedData.sort(function (a, b) { return a._id - b._id }) @@ -239,7 +240,7 @@ describe('Persistence', function () { stream.push(rawData) stream.push(null) - d.persistence.treatRawStream(stream, function (err, result) { + callbackify(rawStream => d.persistence.treatRawStreamAsync(rawStream))(stream, function (err, result) { assert.isNull(err) const treatedData = result.data treatedData.sort(function (a, b) { return a._id - b._id }) @@ -277,7 +278,7 @@ describe('Persistence', function () { stream.push(rawData) stream.push(null) - d.persistence.treatRawStream(stream, function (err, result) { + callbackify(rawStream => d.persistence.treatRawStreamAsync(rawStream))(stream, function (err, result) { assert.isNull(err) const treatedData = result.data const indexes = result.indexes @@ -454,7 +455,7 @@ describe('Persistence', function () { it('Declaring only one hook will throw an exception to prevent data loss', function (done) { const hookTestFilename = 'workspace/hookTest.db' - storage.ensureFileDoesntExist(hookTestFilename, function () { + callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { fs.writeFileSync(hookTestFilename, 'Some content', 'utf8'); (function () { @@ -487,7 +488,7 @@ describe('Persistence', function () { it('Declaring two hooks that are not reverse of one another will cause an exception to prevent data loss', function (done) { const hookTestFilename = 'workspace/hookTest.db' - storage.ensureFileDoesntExist(hookTestFilename, function () { + callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { fs.writeFileSync(hookTestFilename, 'Some content', 'utf8'); (function () { @@ -509,7 +510,7 @@ describe('Persistence', function () { it('A serialization hook can be used to transform data before writing new state to disk', function (done) { const hookTestFilename = 'workspace/hookTest.db' - storage.ensureFileDoesntExist(hookTestFilename, function () { + callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { const d = new Datastore({ filename: hookTestFilename, autoload: true, @@ -586,7 +587,7 @@ describe('Persistence', function () { it('Use serialization hook when persisting cached database or compacting', function (done) { const hookTestFilename = 'workspace/hookTest.db' - storage.ensureFileDoesntExist(hookTestFilename, function () { + callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { const d = new Datastore({ filename: hookTestFilename, autoload: true, @@ -619,7 +620,7 @@ describe('Persistence', function () { idx = model.deserialize(idx) assert.deepStrictEqual(idx, { $$indexCreated: { fieldName: 'idefix' } }) - d.persistence.persistCachedDatabase(function () { + callbackify(() => d.persistence.persistCachedDatabaseAsync())(function () { const _data = fs.readFileSync(hookTestFilename, 'utf8') const data = _data.split('\n') let doc0 = bd(data[0]) @@ -646,7 +647,7 @@ describe('Persistence', function () { it('Deserialization hook is correctly used when loading data', function (done) { const hookTestFilename = 'workspace/hookTest.db' - storage.ensureFileDoesntExist(hookTestFilename, function () { + callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { const d = new Datastore({ filename: hookTestFilename, autoload: true, @@ -714,7 +715,7 @@ describe('Persistence', function () { fs.existsSync('workspace/it.db').should.equal(false) fs.existsSync('workspace/it.db~').should.equal(false) - storage.ensureDatafileIntegrity(p.filename, function (err) { + callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) { assert.isNull(err) fs.existsSync('workspace/it.db').should.equal(true) @@ -737,7 +738,7 @@ describe('Persistence', function () { fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db~').should.equal(false) - storage.ensureDatafileIntegrity(p.filename, function (err) { + callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) { assert.isNull(err) fs.existsSync('workspace/it.db').should.equal(true) @@ -760,7 +761,7 @@ describe('Persistence', function () { fs.existsSync('workspace/it.db').should.equal(false) fs.existsSync('workspace/it.db~').should.equal(true) - storage.ensureDatafileIntegrity(p.filename, function (err) { + callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) { assert.isNull(err) fs.existsSync('workspace/it.db').should.equal(true) @@ -785,7 +786,7 @@ describe('Persistence', function () { fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db~').should.equal(true) - storage.ensureDatafileIntegrity(theDb.persistence.filename, function (err) { + callbackify(storage.ensureDatafileIntegrityAsync)(theDb.persistence.filename, function (err) { assert.isNull(err) fs.existsSync('workspace/it.db').should.equal(true) @@ -820,7 +821,7 @@ describe('Persistence', function () { fs.writeFileSync(testDb + '~', 'something', 'utf8') fs.existsSync(testDb + '~').should.equal(true) - d.persistence.persistCachedDatabase(function (err) { + callbackify(() => d.persistence.persistCachedDatabaseAsync())(function (err) { const contents = fs.readFileSync(testDb, 'utf8') assert.isNull(err) fs.existsSync(testDb).should.equal(true) @@ -848,7 +849,7 @@ describe('Persistence', function () { fs.writeFileSync(testDb + '~', 'bloup', 'utf8') fs.existsSync(testDb + '~').should.equal(true) - d.persistence.persistCachedDatabase(function (err) { + callbackify(() => d.persistence.persistCachedDatabaseAsync())(function (err) { const contents = fs.readFileSync(testDb, 'utf8') assert.isNull(err) fs.existsSync(testDb).should.equal(true) @@ -873,7 +874,7 @@ describe('Persistence', function () { fs.existsSync(testDb).should.equal(false) fs.existsSync(testDb + '~').should.equal(true) - d.persistence.persistCachedDatabase(function (err) { + callbackify(() => d.persistence.persistCachedDatabaseAsync())(function (err) { const contents = fs.readFileSync(testDb, 'utf8') assert.isNull(err) fs.existsSync(testDb).should.equal(true) @@ -912,8 +913,8 @@ describe('Persistence', function () { let theDb, theDb2, doc1, doc2 waterfall([ - apply(storage.ensureFileDoesntExist, dbFile), - apply(storage.ensureFileDoesntExist, dbFile + '~'), + apply(callbackify(storage.ensureFileDoesntExistAsync), dbFile), + apply(callbackify(storage.ensureFileDoesntExistAsync), dbFile + '~'), function (cb) { theDb = new Datastore({ filename: dbFile }) theDb.loadDatabase(cb) @@ -1051,25 +1052,4 @@ describe('Persistence', function () { }) }) }) // ==== End of 'Prevent dataloss when persisting data' ==== - - describe('ensureFileDoesntExist', function () { - it('Doesnt do anything if file already doesnt exist', function (done) { - storage.ensureFileDoesntExist('workspace/nonexisting', function (err) { - assert.isNull(err) - fs.existsSync('workspace/nonexisting').should.equal(false) - done() - }) - }) - - it('Deletes file if it stat', function (done) { - fs.writeFileSync('workspace/existing', 'hello world', 'utf8') - fs.existsSync('workspace/existing').should.equal(true) - - storage.ensureFileDoesntExist('workspace/existing', function (err) { - assert.isNull(err) - fs.existsSync('workspace/existing').should.equal(false) - done() - }) - }) - }) // ==== End of 'ensureFileDoesntExist' ==== }) diff --git a/test/utils.test.js b/test/utils.test.js index 8b0497d..d7272ee 100644 --- a/test/utils.test.js +++ b/test/utils.test.js @@ -39,3 +39,4 @@ module.exports.waterfall = waterfall module.exports.each = each module.exports.wait = wait module.exports.exists = exists +module.exports.callbackify = callbackify diff --git a/test_lac/openFds.test.js b/test_lac/openFds.test.js index 4512087..da449d6 100644 --- a/test_lac/openFds.test.js +++ b/test_lac/openFds.test.js @@ -1,6 +1,7 @@ const fs = require('fs') const { waterfall, whilst } = require('../test/utils.test.js') const Nedb = require('../lib/datastore') +const { callbackify } = require('util') const db = new Nedb({ filename: './workspace/openfds.db', autoload: true }) const N = 64 let i @@ -48,7 +49,7 @@ waterfall([ i = 0 whilst(function () { return i < 2 * N + 1 } , function (cb) { - db.persistence.persistCachedDatabase(function (err) { + callbackify(() => db.persistence.persistCachedDatabaseAsync())(function (err) { if (err) { return cb(err) } i += 1 return cb() diff --git a/typings-tests.ts b/typings-tests.ts index 3144ec0..ed98aa9 100644 --- a/typings-tests.ts +++ b/typings-tests.ts @@ -20,10 +20,6 @@ db.loadDatabase() db = new Datastore({ filename: 'path/to/datafile_2', autoload: true }) // You can issue commands right away -// Type 4: Persistent datastore for a Node Webkit app called 'nwtest' -// For example on Linux, the datafile will be ~/.config/nwtest/nedb-data/something.db -db = new Datastore({ filename: 'something.db' }) - // Of course you can create multiple datastores if you need several // collections. In this case it's usually a good idea to use autoload for all collections. const dbContainer: any = {}