mirror of https://github.com/seald/nedb
commit
6708541c72
@ -1,86 +1,282 @@ |
|||||||
/** |
/** |
||||||
* Way data is stored for this database |
* Way data is stored for this database |
||||||
* For a Node.js/Node Webkit database it's the file system |
|
||||||
* For a browser-side database it's localforage, which uses the best backend available (IndexedDB then WebSQL then localStorage) |
|
||||||
* For a react-native database, we use @react-native-async-storage/async-storage |
|
||||||
* |
* |
||||||
* This version is the react-native version |
* This version is the React-Native version and uses [@react-native-async-storage/async-storage]{@link https://github.com/react-native-async-storage/async-storage}.
|
||||||
|
* @module storageReactNative |
||||||
|
* @see module:storageBrowser |
||||||
|
* @see module:storage |
||||||
|
* @private |
||||||
*/ |
*/ |
||||||
|
|
||||||
const AsyncStorage = require('@react-native-async-storage/async-storage').default |
const AsyncStorage = require('@react-native-async-storage/async-storage').default |
||||||
|
const { callbackify } = require('util') |
||||||
|
|
||||||
const exists = (filename, cback) => { |
/** |
||||||
// eslint-disable-next-line node/handle-callback-err
|
* Async version of {@link module:storageReactNative.exists}. |
||||||
AsyncStorage.getItem(filename, (err, value) => { |
* @param {string} file |
||||||
if (value !== null) { |
* @return {Promise<boolean>} |
||||||
return cback(true) |
* @async |
||||||
} else { |
* @alias module:storageReactNative.existsAsync |
||||||
return cback(false) |
* @see module:storageReactNative.exists |
||||||
} |
*/ |
||||||
}) |
const existsAsync = async file => { |
||||||
|
try { |
||||||
|
const value = await AsyncStorage.getItem(file) |
||||||
|
if (value !== null) return true // Even if value is undefined, AsyncStorage returns null
|
||||||
|
return false |
||||||
|
} catch (error) { |
||||||
|
return false |
||||||
|
} |
||||||
} |
} |
||||||
|
/** |
||||||
|
* @callback module:storageReactNative~existsCallback |
||||||
|
* @param {boolean} exists |
||||||
|
*/ |
||||||
|
|
||||||
|
/** |
||||||
|
* Callback returns true if file exists |
||||||
|
* @function |
||||||
|
* @param {string} file |
||||||
|
* @param {module:storageReactNative~existsCallback} cb |
||||||
|
* @alias module:storageReactNative.exists |
||||||
|
*/ |
||||||
|
const exists = callbackify(existsAsync) |
||||||
|
|
||||||
const rename = (filename, newFilename, callback) => { |
/** |
||||||
// eslint-disable-next-line node/handle-callback-err
|
* Async version of {@link module:storageReactNative.rename}. |
||||||
AsyncStorage.getItem(filename, (err, value) => { |
* @param {string} oldPath |
||||||
if (value === null) { |
* @param {string} newPath |
||||||
this.storage.removeItem(newFilename, callback) |
* @return {Promise<void>} |
||||||
} else { |
* @alias module:storageReactNative.renameAsync |
||||||
this.storage.setItem(newFilename, value, () => { |
* @async |
||||||
this.storage.removeItem(filename, callback) |
* @see module:storageReactNative.rename |
||||||
}) |
*/ |
||||||
|
const renameAsync = async (oldPath, newPath) => { |
||||||
|
try { |
||||||
|
const value = await AsyncStorage.getItem(oldPath) |
||||||
|
if (value === null) await AsyncStorage.removeItem(newPath) |
||||||
|
else { |
||||||
|
await AsyncStorage.setItem(newPath, value) |
||||||
|
await AsyncStorage.removeItem(oldPath) |
||||||
} |
} |
||||||
}) |
} catch (err) { |
||||||
|
console.warn('An error happened while renaming, skip') |
||||||
|
} |
||||||
} |
} |
||||||
|
|
||||||
const writeFile = (filename, contents, options, callback) => { |
/** |
||||||
// Options do not matter in a react-native setup
|
* Moves the item from one path to another |
||||||
if (typeof options === 'function') { callback = options } |
* @function |
||||||
AsyncStorage.setItem(filename, contents, callback) |
* @param {string} oldPath |
||||||
|
* @param {string} newPath |
||||||
|
* @param {NoParamCallback} c |
||||||
|
* @return {void} |
||||||
|
* @alias module:storageReactNative.rename |
||||||
|
*/ |
||||||
|
const rename = callbackify(renameAsync) |
||||||
|
|
||||||
|
/** |
||||||
|
* Async version of {@link module:storageReactNative.writeFile}. |
||||||
|
* @param {string} file |
||||||
|
* @param {string} data |
||||||
|
* @param {object} [options] |
||||||
|
* @return {Promise<void>} |
||||||
|
* @alias module:storageReactNative.writeFileAsync |
||||||
|
* @async |
||||||
|
* @see module:storageReactNative.writeFile |
||||||
|
*/ |
||||||
|
const writeFileAsync = async (file, data, options) => { |
||||||
|
// Options do not matter in react-native setup
|
||||||
|
try { |
||||||
|
await AsyncStorage.setItem(file, data) |
||||||
|
} catch (error) { |
||||||
|
console.warn('An error happened while writing, skip') |
||||||
|
} |
||||||
} |
} |
||||||
|
|
||||||
const appendFile = (filename, toAppend, options, callback) => { |
/** |
||||||
// Options do not matter in a react-native setup
|
* Saves the item at given path |
||||||
if (typeof options === 'function') { callback = options } |
* @function |
||||||
|
* @param {string} path |
||||||
|
* @param {string} data |
||||||
|
* @param {object} options |
||||||
|
* @param {function} callback |
||||||
|
* @alias module:storageReactNative.writeFile |
||||||
|
*/ |
||||||
|
const writeFile = callbackify(writeFileAsync) |
||||||
|
|
||||||
// eslint-disable-next-line node/handle-callback-err
|
/** |
||||||
AsyncStorage.getItem(filename, (err, contents) => { |
* Async version of {@link module:storageReactNative.appendFile}. |
||||||
contents = contents || '' |
* @function |
||||||
contents += toAppend |
* @param {string} filename |
||||||
AsyncStorage.setItem(filename, contents, callback) |
* @param {string} toAppend |
||||||
}) |
* @param {object} [options] |
||||||
|
* @return {Promise<void>} |
||||||
|
* @alias module:storageReactNative.appendFileAsync |
||||||
|
* @async |
||||||
|
* @see module:storageReactNative.appendFile |
||||||
|
*/ |
||||||
|
const appendFileAsync = async (filename, toAppend, options) => { |
||||||
|
// Options do not matter in react-native setup
|
||||||
|
try { |
||||||
|
const contents = (await AsyncStorage.getItem(filename)) || '' |
||||||
|
await AsyncStorage.setItem(filename, contents + toAppend) |
||||||
|
} catch (error) { |
||||||
|
console.warn('An error happened appending to file writing, skip') |
||||||
|
} |
||||||
} |
} |
||||||
|
|
||||||
const readFile = (filename, options, callback) => { |
/** |
||||||
// Options do not matter in a react-native setup
|
* Append to the item at given path |
||||||
if (typeof options === 'function') { callback = options } |
* @function |
||||||
// eslint-disable-next-line node/handle-callback-err
|
* @param {string} filename |
||||||
AsyncStorage.getItem(filename, (err, contents) => { |
* @param {string} toAppend |
||||||
return callback(null, contents || '') |
* @param {object} [options] |
||||||
}) |
* @param {function} callback |
||||||
|
* @alias module:storageReactNative.appendFile |
||||||
|
*/ |
||||||
|
const appendFile = callbackify(appendFileAsync) |
||||||
|
|
||||||
|
/** |
||||||
|
* Async version of {@link module:storageReactNative.readFile}. |
||||||
|
* @function |
||||||
|
* @param {string} filename |
||||||
|
* @param {object} [options] |
||||||
|
* @return {Promise<string>} |
||||||
|
* @alias module:storageReactNative.readFileAsync |
||||||
|
* @async |
||||||
|
* @see module:storageReactNative.readFile |
||||||
|
*/ |
||||||
|
const readFileAsync = async (filename, options) => { |
||||||
|
try { |
||||||
|
return (await AsyncStorage.getItem(filename)) || '' |
||||||
|
} catch (error) { |
||||||
|
console.warn('An error happened while reading, skip') |
||||||
|
return '' |
||||||
|
} |
||||||
} |
} |
||||||
|
|
||||||
const unlink = (filename, callback) => { |
/** |
||||||
AsyncStorage.removeItem(filename, callback) |
* Read data at given path |
||||||
|
* @function |
||||||
|
* @param {string} filename |
||||||
|
* @param {object} options |
||||||
|
* @param {function} callback |
||||||
|
* @alias module:storageReactNative.readFile |
||||||
|
*/ |
||||||
|
const readFile = callbackify(readFileAsync) |
||||||
|
|
||||||
|
/** |
||||||
|
* Async version of {@link module:storageReactNative.unlink}. |
||||||
|
* @function |
||||||
|
* @param {string} filename |
||||||
|
* @return {Promise<void>} |
||||||
|
* @async |
||||||
|
* @alias module:storageReactNative.unlinkAsync |
||||||
|
* @see module:storageReactNative.unlink |
||||||
|
*/ |
||||||
|
const unlinkAsync = async filename => { |
||||||
|
try { |
||||||
|
await AsyncStorage.removeItem(filename) |
||||||
|
} catch (error) { |
||||||
|
console.warn('An error happened while unlinking, skip') |
||||||
|
} |
||||||
} |
} |
||||||
|
|
||||||
// Nothing to do, no directories will be used on react-native
|
/** |
||||||
const mkdir = (dir, options, callback) => callback() |
* Remove the data at given path |
||||||
|
* @function |
||||||
|
* @param {string} path |
||||||
|
* @param {function} callback |
||||||
|
* @alias module:storageReactNative.unlink |
||||||
|
*/ |
||||||
|
const unlink = callbackify(unlinkAsync) |
||||||
|
|
||||||
|
/** |
||||||
|
* Shim for {@link module:storage.mkdirAsync}, nothing to do, no directories will be used on the react-native. |
||||||
|
* @function |
||||||
|
* @param {string} dir |
||||||
|
* @param {object} [options] |
||||||
|
* @return {Promise<void|string>} |
||||||
|
* @alias module:storageReactNative.mkdirAsync |
||||||
|
* @async |
||||||
|
*/ |
||||||
|
const mkdirAsync = (dir, options) => Promise.resolve() |
||||||
|
|
||||||
|
/** |
||||||
|
* Shim for {@link module:storage.mkdir}, nothing to do, no directories will be used on the react-native. |
||||||
|
* @function |
||||||
|
* @param {string} path |
||||||
|
* @param {object} options |
||||||
|
* @param {function} callback |
||||||
|
* @alias module:storageReactNative.mkdir |
||||||
|
*/ |
||||||
|
const mkdir = callbackify(mkdirAsync) |
||||||
|
|
||||||
// Nothing to do, no data corruption possible on react-native
|
/** |
||||||
const ensureDatafileIntegrity = (filename, callback) => callback(null) |
* Shim for {@link module:storage.ensureDatafileIntegrityAsync}, nothing to do, no data corruption possible in the react-native. |
||||||
|
* @param {string} filename |
||||||
|
* @return {Promise<void>} |
||||||
|
* @alias module:storageReactNative.ensureDatafileIntegrityAsync |
||||||
|
*/ |
||||||
|
const ensureDatafileIntegrityAsync = (filename) => Promise.resolve() |
||||||
|
|
||||||
|
/** |
||||||
|
* Shim for {@link module:storage.ensureDatafileIntegrity}, nothing to do, no data corruption possible in the react-native. |
||||||
|
* @function |
||||||
|
* @param {string} filename |
||||||
|
* @param {NoParamCallback} callback signature: err |
||||||
|
* @alias module:storageReactNative.ensureDatafileIntegrity |
||||||
|
*/ |
||||||
|
const ensureDatafileIntegrity = callbackify(ensureDatafileIntegrityAsync) |
||||||
|
|
||||||
const crashSafeWriteFileLines = (filename, lines, callback) => { |
/** |
||||||
|
* Async version of {@link module:storageReactNative.crashSafeWriteFileLines}. |
||||||
|
* @param {string} filename |
||||||
|
* @param {string[]} lines |
||||||
|
* @return {Promise<void>} |
||||||
|
* @alias module:storageReactNative.crashSafeWriteFileLinesAsync |
||||||
|
* @see module:storageReactNative.crashSafeWriteFileLines |
||||||
|
*/ |
||||||
|
const crashSafeWriteFileLinesAsync = async (filename, lines) => { |
||||||
lines.push('') // Add final new line
|
lines.push('') // Add final new line
|
||||||
writeFile(filename, lines.join('\n'), callback) |
await writeFileAsync(filename, lines.join('\n')) |
||||||
} |
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost) |
||||||
|
* @function |
||||||
|
* @param {string} filename |
||||||
|
* @param {string[]} lines |
||||||
|
* @param {NoParamCallback} [callback] Optional callback, signature: err |
||||||
|
* @alias module:storageReactNative.crashSafeWriteFileLines |
||||||
|
*/ |
||||||
|
const crashSafeWriteFileLines = callbackify(crashSafeWriteFileLinesAsync) |
||||||
|
|
||||||
// Interface
|
// Interface
|
||||||
module.exports.exists = exists |
module.exports.exists = exists |
||||||
|
module.exports.existsAsync = existsAsync |
||||||
|
|
||||||
module.exports.rename = rename |
module.exports.rename = rename |
||||||
|
module.exports.renameAsync = renameAsync |
||||||
|
|
||||||
module.exports.writeFile = writeFile |
module.exports.writeFile = writeFile |
||||||
|
module.exports.writeFileAsync = writeFileAsync |
||||||
|
|
||||||
module.exports.crashSafeWriteFileLines = crashSafeWriteFileLines |
module.exports.crashSafeWriteFileLines = crashSafeWriteFileLines |
||||||
|
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync |
||||||
|
|
||||||
module.exports.appendFile = appendFile |
module.exports.appendFile = appendFile |
||||||
|
module.exports.appendFileAsync = appendFileAsync |
||||||
|
|
||||||
module.exports.readFile = readFile |
module.exports.readFile = readFile |
||||||
|
module.exports.readFileAsync = readFileAsync |
||||||
|
|
||||||
module.exports.unlink = unlink |
module.exports.unlink = unlink |
||||||
|
module.exports.unlinkAsync = unlinkAsync |
||||||
|
|
||||||
module.exports.mkdir = mkdir |
module.exports.mkdir = mkdir |
||||||
|
module.exports.mkdirAsync = mkdirAsync |
||||||
|
|
||||||
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity |
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity |
||||||
|
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync |
||||||
|
@ -0,0 +1,5 @@ |
|||||||
|
'use strict' |
||||||
|
|
||||||
|
module.exports = { |
||||||
|
plugins: ['plugins/markdown'] |
||||||
|
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,48 @@ |
|||||||
|
/** |
||||||
|
* Responsible for sequentially executing actions on the database |
||||||
|
* @private |
||||||
|
*/ |
||||||
|
class Waterfall { |
||||||
|
/** |
||||||
|
* Instantiate a new Waterfall. |
||||||
|
*/ |
||||||
|
constructor () { |
||||||
|
/** |
||||||
|
* This is the internal Promise object which resolves when all the tasks of the `Waterfall` are done. |
||||||
|
* |
||||||
|
* It will change any time `this.waterfall` is called. |
||||||
|
* |
||||||
|
* @type {Promise} |
||||||
|
*/ |
||||||
|
this.guardian = Promise.resolve() |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* |
||||||
|
* @param {AsyncFunction} func |
||||||
|
* @return {AsyncFunction} |
||||||
|
*/ |
||||||
|
waterfall (func) { |
||||||
|
return (...args) => { |
||||||
|
this.guardian = this.guardian.then(() => { |
||||||
|
return func(...args) |
||||||
|
.then(result => ({ error: false, result }), result => ({ error: true, result })) |
||||||
|
}) |
||||||
|
return this.guardian.then(({ error, result }) => { |
||||||
|
if (error) return Promise.reject(result) |
||||||
|
else return Promise.resolve(result) |
||||||
|
}) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Shorthand for chaining a promise to the Waterfall |
||||||
|
* @param {Promise} promise |
||||||
|
* @return {Promise} |
||||||
|
*/ |
||||||
|
chain (promise) { |
||||||
|
return this.waterfall(() => promise)() |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
module.exports = Waterfall |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,519 @@ |
|||||||
|
/* eslint-env mocha */ |
||||||
|
const testDb = 'workspace/test.db' |
||||||
|
const { promises: fs } = require('fs') |
||||||
|
const assert = require('assert').strict |
||||||
|
const path = require('path') |
||||||
|
const Datastore = require('../lib/datastore') |
||||||
|
const Persistence = require('../lib/persistence') |
||||||
|
const Cursor = require('../lib/cursor') |
||||||
|
const { exists } = require('./utils.test.js') |
||||||
|
|
||||||
|
describe('Cursor Async', function () { |
||||||
|
let d |
||||||
|
|
||||||
|
beforeEach(async () => { |
||||||
|
d = new Datastore({ filename: testDb }) |
||||||
|
assert.equal(d.filename, testDb) |
||||||
|
assert.equal(d.inMemoryOnly, false) |
||||||
|
await Persistence.ensureDirectoryExistsAsync(path.dirname(testDb)) |
||||||
|
if (await exists(testDb)) await fs.unlink(testDb) |
||||||
|
await d.loadDatabaseAsync() |
||||||
|
assert.equal(d.getAllData().length, 0) |
||||||
|
}) |
||||||
|
|
||||||
|
describe('Without sorting', function () { |
||||||
|
beforeEach(async () => { |
||||||
|
await d.insertAsync({ age: 5 }) |
||||||
|
await d.insertAsync({ age: 57 }) |
||||||
|
await d.insertAsync({ age: 52 }) |
||||||
|
await d.insertAsync({ age: 23 }) |
||||||
|
await d.insertAsync({ age: 89 }) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Without query, an empty query or a simple query and no skip or limit', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 5) |
||||||
|
assert.equal(docs.filter(function (doc) { return doc.age === 5 })[0].age, 5) |
||||||
|
assert.equal(docs.filter(function (doc) { return doc.age === 57 })[0].age, 57) |
||||||
|
assert.equal(docs.filter(function (doc) { return doc.age === 52 })[0].age, 52) |
||||||
|
assert.equal(docs.filter(function (doc) { return doc.age === 23 })[0].age, 23) |
||||||
|
assert.equal(docs.filter(function (doc) { return doc.age === 89 })[0].age, 89) |
||||||
|
const cursor2 = new Cursor(d, {}) |
||||||
|
const docs2 = await cursor2 |
||||||
|
assert.equal(docs2.length, 5) |
||||||
|
assert.equal(docs2.filter(function (doc) { return doc.age === 5 })[0].age, 5) |
||||||
|
assert.equal(docs2.filter(function (doc) { return doc.age === 57 })[0].age, 57) |
||||||
|
assert.equal(docs2.filter(function (doc) { return doc.age === 52 })[0].age, 52) |
||||||
|
assert.equal(docs2.filter(function (doc) { return doc.age === 23 })[0].age, 23) |
||||||
|
assert.equal(docs2.filter(function (doc) { return doc.age === 89 })[0].age, 89) |
||||||
|
const cursor3 = new Cursor(d, { age: { $gt: 23 } }) |
||||||
|
const docs3 = await cursor3 |
||||||
|
assert.equal(docs3.length, 3) |
||||||
|
assert.equal(docs3.filter(function (doc) { return doc.age === 57 })[0].age, 57) |
||||||
|
assert.equal(docs3.filter(function (doc) { return doc.age === 52 })[0].age, 52) |
||||||
|
assert.equal(docs3.filter(function (doc) { return doc.age === 89 })[0].age, 89) |
||||||
|
}) |
||||||
|
|
||||||
|
it('With an empty collection', async () => { |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 0) |
||||||
|
}) |
||||||
|
|
||||||
|
it('With a limit', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
cursor.limit(3) |
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 3) |
||||||
|
// No way to predict which results are returned of course ...
|
||||||
|
}) |
||||||
|
|
||||||
|
it('With a skip', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor.skip(2) |
||||||
|
assert.equal(docs.length, 3) |
||||||
|
// No way to predict which results are returned of course ...
|
||||||
|
}) |
||||||
|
|
||||||
|
it('With a limit and a skip and method chaining', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
cursor.limit(4).skip(3) // Only way to know that the right number of results was skipped is if limit + skip > number of results
|
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 2) |
||||||
|
// No way to predict which results are returned of course ...
|
||||||
|
}) |
||||||
|
}) // ===== End of 'Without sorting' =====
|
||||||
|
|
||||||
|
describe('Sorting of the results', function () { |
||||||
|
beforeEach(async () => { |
||||||
|
// We don't know the order in which docs will be inserted but we ensure correctness by testing both sort orders
|
||||||
|
await d.insertAsync({ age: 5 }) |
||||||
|
await d.insertAsync({ age: 57 }) |
||||||
|
await d.insertAsync({ age: 52 }) |
||||||
|
await d.insertAsync({ age: 23 }) |
||||||
|
await d.insertAsync({ age: 89 }) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Using one sort', async () => { |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
cursor.sort({ age: 1 }) |
||||||
|
const docs = await cursor |
||||||
|
// Results are in ascending order
|
||||||
|
for (let i = 0; i < docs.length - 1; i += 1) { |
||||||
|
assert(docs[i].age < docs[i + 1].age) |
||||||
|
} |
||||||
|
|
||||||
|
cursor.sort({ age: -1 }) |
||||||
|
const docs2 = await cursor |
||||||
|
// Results are in descending order
|
||||||
|
for (let i = 0; i < docs2.length - 1; i += 1) { |
||||||
|
assert(docs2[i].age > docs2[i + 1].age) |
||||||
|
} |
||||||
|
}) |
||||||
|
|
||||||
|
it('Sorting strings with custom string comparison function', async () => { |
||||||
|
const db = new Datastore({ |
||||||
|
inMemoryOnly: true, |
||||||
|
autoload: true, |
||||||
|
compareStrings: function (a, b) { return a.length - b.length } |
||||||
|
}) |
||||||
|
|
||||||
|
await db.insertAsync({ name: 'alpha' }) |
||||||
|
await db.insertAsync({ name: 'charlie' }) |
||||||
|
await db.insertAsync({ name: 'zulu' }) |
||||||
|
|
||||||
|
const docs = await db.findAsync({}).sort({ name: 1 }) |
||||||
|
assert.equal(docs.map(x => x.name)[0], 'zulu') |
||||||
|
assert.equal(docs.map(x => x.name)[1], 'alpha') |
||||||
|
assert.equal(docs.map(x => x.name)[2], 'charlie') |
||||||
|
|
||||||
|
delete db.compareStrings |
||||||
|
const docs2 = await db.findAsync({}).sort({ name: 1 }) |
||||||
|
assert.equal(docs2.map(x => x.name)[0], 'alpha') |
||||||
|
assert.equal(docs2.map(x => x.name)[1], 'charlie') |
||||||
|
assert.equal(docs2.map(x => x.name)[2], 'zulu') |
||||||
|
}) |
||||||
|
|
||||||
|
it('With an empty collection', async () => { |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
const cursor = new Cursor(d) |
||||||
|
cursor.sort({ age: 1 }) |
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 0) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Ability to chain sorting and exec', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor.sort({ age: 1 }) |
||||||
|
// Results are in ascending order
|
||||||
|
for (let i = 0; i < docs.length - 1; i += 1) { |
||||||
|
assert.ok(docs[i].age < docs[i + 1].age) |
||||||
|
} |
||||||
|
|
||||||
|
const cursor2 = new Cursor(d) |
||||||
|
const docs2 = await cursor2.sort({ age: -1 }) |
||||||
|
// Results are in descending order
|
||||||
|
for (let i = 0; i < docs2.length - 1; i += 1) { |
||||||
|
assert(docs2[i].age > docs2[i + 1].age) |
||||||
|
} |
||||||
|
}) |
||||||
|
|
||||||
|
it('Using limit and sort', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor.sort({ age: 1 }).limit(3) |
||||||
|
assert.equal(docs.length, 3) |
||||||
|
assert.equal(docs[0].age, 5) |
||||||
|
assert.equal(docs[1].age, 23) |
||||||
|
assert.equal(docs[2].age, 52) |
||||||
|
const cursor2 = new Cursor(d) |
||||||
|
const docs2 = await cursor2.sort({ age: -1 }).limit(2) |
||||||
|
assert.equal(docs2.length, 2) |
||||||
|
assert.equal(docs2[0].age, 89) |
||||||
|
assert.equal(docs2[1].age, 57) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Using a limit higher than total number of docs shouldn\'t cause an error', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor.sort({ age: 1 }).limit(7) |
||||||
|
assert.equal(docs.length, 5) |
||||||
|
assert.equal(docs[0].age, 5) |
||||||
|
assert.equal(docs[1].age, 23) |
||||||
|
assert.equal(docs[2].age, 52) |
||||||
|
assert.equal(docs[3].age, 57) |
||||||
|
assert.equal(docs[4].age, 89) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Using limit and skip with sort', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor.sort({ age: 1 }).limit(1).skip(2) |
||||||
|
assert.equal(docs.length, 1) |
||||||
|
assert.equal(docs[0].age, 52) |
||||||
|
const cursor2 = new Cursor(d) |
||||||
|
const docs2 = await cursor2.sort({ age: 1 }).limit(3).skip(1) |
||||||
|
assert.equal(docs2.length, 3) |
||||||
|
assert.equal(docs2[0].age, 23) |
||||||
|
assert.equal(docs2[1].age, 52) |
||||||
|
assert.equal(docs2[2].age, 57) |
||||||
|
const cursor3 = new Cursor(d) |
||||||
|
const docs3 = await cursor3.sort({ age: -1 }).limit(2).skip(2) |
||||||
|
assert.equal(docs3.length, 2) |
||||||
|
assert.equal(docs3[0].age, 52) |
||||||
|
assert.equal(docs3[1].age, 23) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Using too big a limit and a skip with sort', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor.sort({ age: 1 }).limit(8).skip(2) |
||||||
|
assert.equal(docs.length, 3) |
||||||
|
assert.equal(docs[0].age, 52) |
||||||
|
assert.equal(docs[1].age, 57) |
||||||
|
assert.equal(docs[2].age, 89) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Using too big a skip with sort should return no result', async () => { |
||||||
|
const cursor = new Cursor(d) |
||||||
|
const docs = await cursor.sort({ age: 1 }).skip(5) |
||||||
|
assert.equal(docs.length, 0) |
||||||
|
const cursor2 = new Cursor(d) |
||||||
|
const docs2 = await cursor2.sort({ age: 1 }).skip(7) |
||||||
|
assert.equal(docs2.length, 0) |
||||||
|
|
||||||
|
const cursor3 = new Cursor(d) |
||||||
|
const docs3 = await cursor3.sort({ age: 1 }).limit(3).skip(7) |
||||||
|
assert.equal(docs3.length, 0) |
||||||
|
const cursor4 = new Cursor(d) |
||||||
|
const docs4 = await cursor4.sort({ age: 1 }).limit(6).skip(7) |
||||||
|
assert.equal(docs4.length, 0) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Sorting strings', async () => { |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
await d.insertAsync({ name: 'jako' }) |
||||||
|
await d.insertAsync({ name: 'jakeb' }) |
||||||
|
await d.insertAsync({ name: 'sue' }) |
||||||
|
|
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
const docs = await cursor.sort({ name: 1 }) |
||||||
|
assert.equal(docs.length, 3) |
||||||
|
assert.equal(docs[0].name, 'jakeb') |
||||||
|
assert.equal(docs[1].name, 'jako') |
||||||
|
assert.equal(docs[2].name, 'sue') |
||||||
|
const cursor2 = new Cursor(d, {}) |
||||||
|
const docs2 = await cursor2.sort({ name: -1 }) |
||||||
|
assert.equal(docs2.length, 3) |
||||||
|
assert.equal(docs2[0].name, 'sue') |
||||||
|
assert.equal(docs2[1].name, 'jako') |
||||||
|
assert.equal(docs2[2].name, 'jakeb') |
||||||
|
}) |
||||||
|
|
||||||
|
it('Sorting nested fields with dates', async () => { |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
const doc1 = await d.insertAsync({ event: { recorded: new Date(400) } }) |
||||||
|
const doc2 = await d.insertAsync({ event: { recorded: new Date(60000) } }) |
||||||
|
const doc3 = await d.insertAsync({ event: { recorded: new Date(32) } }) |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
const docs = await cursor.sort({ 'event.recorded': 1 }) |
||||||
|
assert.equal(docs.length, 3) |
||||||
|
assert.equal(docs[0]._id, doc3._id) |
||||||
|
assert.equal(docs[1]._id, doc1._id) |
||||||
|
assert.equal(docs[2]._id, doc2._id) |
||||||
|
|
||||||
|
const cursor2 = new Cursor(d, {}) |
||||||
|
const docs2 = await cursor2.sort({ 'event.recorded': -1 }) |
||||||
|
assert.equal(docs2.length, 3) |
||||||
|
assert.equal(docs2[0]._id, doc2._id) |
||||||
|
assert.equal(docs2[1]._id, doc1._id) |
||||||
|
assert.equal(docs2[2]._id, doc3._id) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Sorting when some fields are undefined', async () => { |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
|
||||||
|
await d.insertAsync({ name: 'jako', other: 2 }) |
||||||
|
await d.insertAsync({ name: 'jakeb', other: 3 }) |
||||||
|
await d.insertAsync({ name: 'sue' }) |
||||||
|
await d.insertAsync({ name: 'henry', other: 4 }) |
||||||
|
|
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
// eslint-disable-next-line node/handle-callback-err
|
||||||
|
const docs = await cursor.sort({ other: 1 }) |
||||||
|
assert.equal(docs.length, 4) |
||||||
|
assert.equal(docs[0].name, 'sue') |
||||||
|
assert.equal(docs[0].other, undefined) |
||||||
|
assert.equal(docs[1].name, 'jako') |
||||||
|
assert.equal(docs[1].other, 2) |
||||||
|
assert.equal(docs[2].name, 'jakeb') |
||||||
|
assert.equal(docs[2].other, 3) |
||||||
|
assert.equal(docs[3].name, 'henry') |
||||||
|
assert.equal(docs[3].other, 4) |
||||||
|
const cursor2 = new Cursor(d, { name: { $in: ['suzy', 'jakeb', 'jako'] } }) |
||||||
|
const docs2 = await cursor2.sort({ other: -1 }) |
||||||
|
assert.equal(docs2.length, 2) |
||||||
|
assert.equal(docs2[0].name, 'jakeb') |
||||||
|
assert.equal(docs2[0].other, 3) |
||||||
|
assert.equal(docs2[1].name, 'jako') |
||||||
|
assert.equal(docs2[1].other, 2) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Sorting when all fields are undefined', async () => { |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
await d.insertAsync({ name: 'jako' }) |
||||||
|
await d.insertAsync({ name: 'jakeb' }) |
||||||
|
await d.insertAsync({ name: 'sue' }) |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
const docs = await cursor.sort({ other: 1 }) |
||||||
|
assert.equal(docs.length, 3) |
||||||
|
|
||||||
|
const cursor2 = new Cursor(d, { name: { $in: ['sue', 'jakeb', 'jakob'] } }) |
||||||
|
const docs2 = await cursor2.sort({ other: -1 }) |
||||||
|
assert.equal(docs2.length, 2) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Multiple consecutive sorts', async () => { |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
|
||||||
|
await d.insertAsync({ name: 'jako', age: 43, nid: 1 }) |
||||||
|
await d.insertAsync({ name: 'jakeb', age: 43, nid: 2 }) |
||||||
|
await d.insertAsync({ name: 'sue', age: 12, nid: 3 }) |
||||||
|
await d.insertAsync({ name: 'zoe', age: 23, nid: 4 }) |
||||||
|
await d.insertAsync({ name: 'jako', age: 35, nid: 5 }) |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
// eslint-disable-next-line node/handle-callback-err
|
||||||
|
const docs = await cursor.sort({ name: 1, age: -1 }) |
||||||
|
assert.equal(docs.length, 5) |
||||||
|
|
||||||
|
assert.equal(docs[0].nid, 2) |
||||||
|
assert.equal(docs[1].nid, 1) |
||||||
|
assert.equal(docs[2].nid, 5) |
||||||
|
assert.equal(docs[3].nid, 3) |
||||||
|
assert.equal(docs[4].nid, 4) |
||||||
|
const cursor2 = new Cursor(d, {}) |
||||||
|
const docs2 = await cursor2.sort({ name: 1, age: 1 }) |
||||||
|
assert.equal(docs2.length, 5) |
||||||
|
|
||||||
|
assert.equal(docs2[0].nid, 2) |
||||||
|
assert.equal(docs2[1].nid, 5) |
||||||
|
assert.equal(docs2[2].nid, 1) |
||||||
|
assert.equal(docs2[3].nid, 3) |
||||||
|
assert.equal(docs2[4].nid, 4) |
||||||
|
const cursor3 = new Cursor(d, {}) |
||||||
|
const docs3 = await cursor3.sort({ age: 1, name: 1 }) |
||||||
|
assert.equal(docs3.length, 5) |
||||||
|
|
||||||
|
assert.equal(docs3[0].nid, 3) |
||||||
|
assert.equal(docs3[1].nid, 4) |
||||||
|
assert.equal(docs3[2].nid, 5) |
||||||
|
assert.equal(docs3[3].nid, 2) |
||||||
|
assert.equal(docs3[4].nid, 1) |
||||||
|
|
||||||
|
const cursor4 = new Cursor(d, {}) |
||||||
|
const docs4 = await cursor4.sort({ age: 1, name: -1 }) |
||||||
|
assert.equal(docs4.length, 5) |
||||||
|
|
||||||
|
assert.equal(docs4[0].nid, 3) |
||||||
|
assert.equal(docs4[1].nid, 4) |
||||||
|
assert.equal(docs4[2].nid, 5) |
||||||
|
assert.equal(docs4[3].nid, 1) |
||||||
|
assert.equal(docs4[4].nid, 2) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Similar data, multiple consecutive sorts', async () => { |
||||||
|
let id |
||||||
|
const companies = ['acme', 'milkman', 'zoinks'] |
||||||
|
const entities = [] |
||||||
|
await d.removeAsync({}, { multi: true }) |
||||||
|
id = 1 |
||||||
|
for (let i = 0; i < companies.length; i++) { |
||||||
|
for (let j = 5; j <= 100; j += 5) { |
||||||
|
entities.push({ |
||||||
|
company: companies[i], |
||||||
|
cost: j, |
||||||
|
nid: id |
||||||
|
}) |
||||||
|
id++ |
||||||
|
} |
||||||
|
} |
||||||
|
await Promise.all(entities.map(entity => d.insertAsync(entity))) |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
const docs = await cursor.sort({ company: 1, cost: 1 }) |
||||||
|
assert.equal(docs.length, 60) |
||||||
|
|
||||||
|
for (let i = 0; i < docs.length; i++) { |
||||||
|
assert.equal(docs[i].nid, i + 1) |
||||||
|
} |
||||||
|
}) |
||||||
|
}) // ===== End of 'Sorting' =====
|
||||||
|
|
||||||
|
describe('Projections', function () { |
||||||
|
let doc1 |
||||||
|
let doc2 |
||||||
|
let doc3 |
||||||
|
let doc4 |
||||||
|
let doc0 |
||||||
|
|
||||||
|
beforeEach(async () => { |
||||||
|
// We don't know the order in which docs will be inserted but we ensure correctness by testing both sort orders
|
||||||
|
doc0 = await d.insertAsync({ age: 5, name: 'Jo', planet: 'B', toys: { bebe: true, ballon: 'much' } }) |
||||||
|
doc1 = await d.insertAsync({ age: 57, name: 'Louis', planet: 'R', toys: { ballon: 'yeah', bebe: false } }) |
||||||
|
doc2 = await d.insertAsync({ age: 52, name: 'Grafitti', planet: 'C', toys: { bebe: 'kind of' } }) |
||||||
|
doc3 = await d.insertAsync({ age: 23, name: 'LM', planet: 'S' }) |
||||||
|
doc4 = await d.insertAsync({ age: 89, planet: 'Earth' }) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Takes all results if no projection or empty object given', async () => { |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
cursor.sort({ age: 1 }) // For easier finding
|
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 5) |
||||||
|
assert.deepStrictEqual(docs[0], doc0) |
||||||
|
assert.deepStrictEqual(docs[1], doc3) |
||||||
|
assert.deepStrictEqual(docs[2], doc2) |
||||||
|
assert.deepStrictEqual(docs[3], doc1) |
||||||
|
assert.deepStrictEqual(docs[4], doc4) |
||||||
|
|
||||||
|
cursor.projection({}) |
||||||
|
const docs2 = await cursor |
||||||
|
assert.equal(docs2.length, 5) |
||||||
|
assert.deepStrictEqual(docs2[0], doc0) |
||||||
|
assert.deepStrictEqual(docs2[1], doc3) |
||||||
|
assert.deepStrictEqual(docs2[2], doc2) |
||||||
|
assert.deepStrictEqual(docs2[3], doc1) |
||||||
|
assert.deepStrictEqual(docs2[4], doc4) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Can take only the expected fields', async () => { |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
cursor.sort({ age: 1 }) // For easier finding
|
||||||
|
cursor.projection({ age: 1, name: 1 }) |
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 5) |
||||||
|
// Takes the _id by default
|
||||||
|
assert.deepStrictEqual(docs[0], { age: 5, name: 'Jo', _id: doc0._id }) |
||||||
|
assert.deepStrictEqual(docs[1], { age: 23, name: 'LM', _id: doc3._id }) |
||||||
|
assert.deepStrictEqual(docs[2], { age: 52, name: 'Grafitti', _id: doc2._id }) |
||||||
|
assert.deepStrictEqual(docs[3], { age: 57, name: 'Louis', _id: doc1._id }) |
||||||
|
assert.deepStrictEqual(docs[4], { age: 89, _id: doc4._id }) // No problems if one field to take doesn't exist
|
||||||
|
|
||||||
|
cursor.projection({ age: 1, name: 1, _id: 0 }) |
||||||
|
const docs2 = await cursor |
||||||
|
assert.equal(docs2.length, 5) |
||||||
|
assert.deepStrictEqual(docs2[0], { age: 5, name: 'Jo' }) |
||||||
|
assert.deepStrictEqual(docs2[1], { age: 23, name: 'LM' }) |
||||||
|
assert.deepStrictEqual(docs2[2], { age: 52, name: 'Grafitti' }) |
||||||
|
assert.deepStrictEqual(docs2[3], { age: 57, name: 'Louis' }) |
||||||
|
assert.deepStrictEqual(docs2[4], { age: 89 }) // No problems if one field to take doesn't exist
|
||||||
|
}) |
||||||
|
|
||||||
|
it('Can omit only the expected fields', async () => { |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
cursor.sort({ age: 1 }) // For easier finding
|
||||||
|
cursor.projection({ age: 0, name: 0 }) |
||||||
|
const docs = await cursor |
||||||
|
assert.equal(docs.length, 5) |
||||||
|
// Takes the _id by default
|
||||||
|
assert.deepStrictEqual(docs[0], { planet: 'B', _id: doc0._id, toys: { bebe: true, ballon: 'much' } }) |
||||||
|
assert.deepStrictEqual(docs[1], { planet: 'S', _id: doc3._id }) |
||||||
|
assert.deepStrictEqual(docs[2], { planet: 'C', _id: doc2._id, toys: { bebe: 'kind of' } }) |
||||||
|
assert.deepStrictEqual(docs[3], { planet: 'R', _id: doc1._id, toys: { bebe: false, ballon: 'yeah' } }) |
||||||
|
assert.deepStrictEqual(docs[4], { planet: 'Earth', _id: doc4._id }) |
||||||
|
|
||||||
|
cursor.projection({ age: 0, name: 0, _id: 0 }) |
||||||
|
const docs2 = await cursor |
||||||
|
assert.equal(docs2.length, 5) |
||||||
|
assert.deepStrictEqual(docs2[0], { planet: 'B', toys: { bebe: true, ballon: 'much' } }) |
||||||
|
assert.deepStrictEqual(docs2[1], { planet: 'S' }) |
||||||
|
assert.deepStrictEqual(docs2[2], { planet: 'C', toys: { bebe: 'kind of' } }) |
||||||
|
assert.deepStrictEqual(docs2[3], { planet: 'R', toys: { bebe: false, ballon: 'yeah' } }) |
||||||
|
assert.deepStrictEqual(docs2[4], { planet: 'Earth' }) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Cannot use both modes except for _id', async () => { |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
cursor.sort({ age: 1 }) // For easier finding
|
||||||
|
cursor.projection({ age: 1, name: 0 }) |
||||||
|
await assert.rejects(() => cursor) |
||||||
|
|
||||||
|
cursor.projection({ age: 1, _id: 0 }) |
||||||
|
const docs = await cursor |
||||||
|
assert.deepStrictEqual(docs[0], { age: 5 }) |
||||||
|
assert.deepStrictEqual(docs[1], { age: 23 }) |
||||||
|
assert.deepStrictEqual(docs[2], { age: 52 }) |
||||||
|
assert.deepStrictEqual(docs[3], { age: 57 }) |
||||||
|
assert.deepStrictEqual(docs[4], { age: 89 }) |
||||||
|
|
||||||
|
cursor.projection({ age: 0, toys: 0, planet: 0, _id: 1 }) |
||||||
|
const docs2 = await cursor |
||||||
|
assert.deepStrictEqual(docs2[0], { name: 'Jo', _id: doc0._id }) |
||||||
|
assert.deepStrictEqual(docs2[1], { name: 'LM', _id: doc3._id }) |
||||||
|
assert.deepStrictEqual(docs2[2], { name: 'Grafitti', _id: doc2._id }) |
||||||
|
assert.deepStrictEqual(docs2[3], { name: 'Louis', _id: doc1._id }) |
||||||
|
assert.deepStrictEqual(docs2[4], { _id: doc4._id }) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Projections on embedded documents - omit type', async () => { |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
cursor.sort({ age: 1 }) // For easier finding
|
||||||
|
cursor.projection({ name: 0, planet: 0, 'toys.bebe': 0, _id: 0 }) |
||||||
|
const docs = await cursor |
||||||
|
assert.deepStrictEqual(docs[0], { age: 5, toys: { ballon: 'much' } }) |
||||||
|
assert.deepStrictEqual(docs[1], { age: 23 }) |
||||||
|
assert.deepStrictEqual(docs[2], { age: 52, toys: {} }) |
||||||
|
assert.deepStrictEqual(docs[3], { age: 57, toys: { ballon: 'yeah' } }) |
||||||
|
assert.deepStrictEqual(docs[4], { age: 89 }) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Projections on embedded documents - pick type', async () => { |
||||||
|
const cursor = new Cursor(d, {}) |
||||||
|
cursor.sort({ age: 1 }) // For easier finding
|
||||||
|
cursor.projection({ name: 1, 'toys.ballon': 1, _id: 0 }) |
||||||
|
const docs = await cursor |
||||||
|
assert.deepStrictEqual(docs[0], { name: 'Jo', toys: { ballon: 'much' } }) |
||||||
|
assert.deepStrictEqual(docs[1], { name: 'LM' }) |
||||||
|
assert.deepStrictEqual(docs[2], { name: 'Grafitti' }) |
||||||
|
assert.deepStrictEqual(docs[3], { name: 'Louis', toys: { ballon: 'yeah' } }) |
||||||
|
assert.deepStrictEqual(docs[4], {}) |
||||||
|
}) |
||||||
|
}) // ==== End of 'Projections' ====
|
||||||
|
}) |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,83 @@ |
|||||||
|
/* eslint-env mocha */ |
||||||
|
const testDb = 'workspace/test.db' |
||||||
|
const { promises: fs } = require('fs') |
||||||
|
const assert = require('assert').strict |
||||||
|
const path = require('path') |
||||||
|
const Datastore = require('../lib/datastore') |
||||||
|
const Persistence = require('../lib/persistence') |
||||||
|
const { exists } = require('./utils.test.js') |
||||||
|
|
||||||
|
// Test that operations are executed in the right order
|
||||||
|
// We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends
|
||||||
|
const testRightOrder = async d => { |
||||||
|
const docs = await d.findAsync({}) |
||||||
|
assert.equal(docs.length, 0) |
||||||
|
|
||||||
|
await d.insertAsync({ a: 1 }) |
||||||
|
await d.updateAsync({ a: 1 }, { a: 2 }, {}) |
||||||
|
const docs2 = await d.findAsync({}) |
||||||
|
assert.equal(docs2[0].a, 2) |
||||||
|
d.updateAsync({ a: 2 }, { a: 3 }, {}) // not awaiting
|
||||||
|
d.executor.pushAsync(async () => { throw new Error('Some error') }) // not awaiting
|
||||||
|
const docs3 = await d.findAsync({}) |
||||||
|
assert.equal(docs3[0].a, 3) |
||||||
|
} |
||||||
|
|
||||||
|
// Note: The following test does not have any assertion because it
|
||||||
|
// is meant to address the deprecation warning:
|
||||||
|
// (node) warning: Recursive process.nextTick detected. This will break in the next version of node. Please use setImmediate for recursive deferral.
|
||||||
|
// see
|
||||||
|
const testEventLoopStarvation = async d => { |
||||||
|
const times = 1001 |
||||||
|
let i = 0 |
||||||
|
while (i < times) { |
||||||
|
i++ |
||||||
|
d.findAsync({ bogus: 'search' }) |
||||||
|
} |
||||||
|
await d.findAsync({ bogus: 'search' }) |
||||||
|
} |
||||||
|
|
||||||
|
// Test that operations are executed in the right order even with no callback
|
||||||
|
const testExecutorWorksWithoutCallback = async d => { |
||||||
|
d.insertAsync({ a: 1 }) |
||||||
|
d.insertAsync({ a: 2 }) |
||||||
|
const docs = await d.findAsync({}) |
||||||
|
assert.equal(docs.length, 2) |
||||||
|
} |
||||||
|
|
||||||
|
describe('Executor async', function () { |
||||||
|
describe('With persistent database', async () => { |
||||||
|
let d |
||||||
|
|
||||||
|
beforeEach(async () => { |
||||||
|
d = new Datastore({ filename: testDb }) |
||||||
|
assert.equal(d.filename, testDb) |
||||||
|
assert.equal(d.inMemoryOnly, false) |
||||||
|
await Persistence.ensureDirectoryExistsAsync(path.dirname(testDb)) |
||||||
|
if (await exists(testDb)) await fs.unlink(testDb) |
||||||
|
await d.loadDatabaseAsync() |
||||||
|
assert.equal(d.getAllData().length, 0) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Operations are executed in the right order', () => testRightOrder(d)) |
||||||
|
|
||||||
|
it('Does not starve event loop and raise warning when more than 1000 callbacks are in queue', () => testEventLoopStarvation(d)) |
||||||
|
|
||||||
|
it('Works in the right order even with no supplied callback', () => testExecutorWorksWithoutCallback(d)) |
||||||
|
}) |
||||||
|
}) // ==== End of 'With persistent database' ====
|
||||||
|
|
||||||
|
describe('With non persistent database', function () { |
||||||
|
let d |
||||||
|
|
||||||
|
beforeEach(async () => { |
||||||
|
d = new Datastore({ inMemoryOnly: true }) |
||||||
|
assert.equal(d.inMemoryOnly, true) |
||||||
|
await d.loadDatabaseAsync() |
||||||
|
assert.equal(d.getAllData().length, 0) |
||||||
|
}) |
||||||
|
|
||||||
|
it('Operations are executed in the right order', () => testRightOrder(d)) |
||||||
|
|
||||||
|
it('Works in the right order even with no supplied callback', () => testExecutorWorksWithoutCallback(d)) |
||||||
|
}) // ==== End of 'With non persistent database' ====
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,46 @@ |
|||||||
|
const { callbackify, promisify } = require('util') |
||||||
|
const { promises: fs, constants: fsConstants } = require('fs') |
||||||
|
|
||||||
|
const waterfallAsync = async tasks => { |
||||||
|
for (const task of tasks) { |
||||||
|
await promisify(task)() |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
const waterfall = callbackify(waterfallAsync) |
||||||
|
|
||||||
|
const eachAsync = async (arr, iterator) => Promise.all(arr.map(el => promisify(iterator)(el))) |
||||||
|
|
||||||
|
const each = callbackify(eachAsync) |
||||||
|
|
||||||
|
const apply = function (fn) { |
||||||
|
const args = Array.prototype.slice.call(arguments, 1) |
||||||
|
return function () { |
||||||
|
return fn.apply( |
||||||
|
null, args.concat(Array.prototype.slice.call(arguments)) |
||||||
|
) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
const whilstAsync = async (test, fn) => { |
||||||
|
while (test()) await promisify(fn)() |
||||||
|
} |
||||||
|
|
||||||
|
const whilst = callbackify(whilstAsync) |
||||||
|
|
||||||
|
const wait = delay => new Promise(resolve => { |
||||||
|
setTimeout(resolve, delay) |
||||||
|
}) |
||||||
|
const exists = path => fs.access(path, fsConstants.FS_OK).then(() => true, () => false) |
||||||
|
|
||||||
|
// eslint-disable-next-line node/no-callback-literal
|
||||||
|
const existsCallback = (path, callback) => fs.access(path, fsConstants.FS_OK).then(() => callback(true), () => callback(false)) |
||||||
|
|
||||||
|
module.exports.whilst = whilst |
||||||
|
module.exports.apply = apply |
||||||
|
module.exports.waterfall = waterfall |
||||||
|
module.exports.each = each |
||||||
|
module.exports.wait = wait |
||||||
|
module.exports.exists = exists |
||||||
|
module.exports.existsCallback = existsCallback |
||||||
|
module.exports.callbackify = callbackify |
@ -1,133 +1,59 @@ |
|||||||
/* eslint-env mocha */ |
/* eslint-env mocha */ |
||||||
/* global DEBUG */ |
|
||||||
/** |
/** |
||||||
* Load and modify part of fs to ensure writeFile will crash after writing 5000 bytes |
* Load and modify part of fs to ensure writeFile will crash after writing 5000 bytes |
||||||
*/ |
*/ |
||||||
const fs = require('fs') |
const fs = require('fs') |
||||||
|
const { Writable } = require('stream') |
||||||
function rethrow () { |
const { callbackify } = require('util') |
||||||
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
|
|
||||||
// is fairly slow to generate.
|
fs.promises.writeFile = async function (path, data) { |
||||||
if (DEBUG) { |
let onePassDone = false |
||||||
const backtrace = new Error() |
const options = { encoding: 'utf8', mode: 0o666, flag: 'w' } // we don't care about the actual options passed
|
||||||
return function (err) { |
|
||||||
if (err) { |
const filehandle = await fs.promises.open(path, options.flag, options.mode) |
||||||
backtrace.stack = err.name + ': ' + err.message + |
const buffer = (data instanceof Buffer) ? data : Buffer.from('' + data, options.encoding || 'utf8') |
||||||
backtrace.stack.substr(backtrace.name.length) |
let length = buffer.length |
||||||
throw backtrace |
let offset = 0 |
||||||
} |
|
||||||
} |
try { |
||||||
} |
while (length > 0) { |
||||||
|
if (onePassDone) { process.exit(1) } // Crash on purpose before rewrite done
|
||||||
return function (err) { |
const { bytesWritten } = await filehandle.write(buffer, offset, Math.min(5000, length)) // Force write by chunks of 5000 bytes to ensure data will be incomplete on crash
|
||||||
if (err) { |
onePassDone = true |
||||||
throw err // Forgot a callback but don't know where? Use NODE_DEBUG=fs
|
offset += bytesWritten |
||||||
|
length -= bytesWritten |
||||||
} |
} |
||||||
|
} finally { |
||||||
|
await filehandle.close() |
||||||
} |
} |
||||||
} |
} |
||||||
|
|
||||||
function maybeCallback (cb) { |
class FakeFsWriteStream extends Writable { |
||||||
return typeof cb === 'function' ? cb : rethrow() |
constructor (filename) { |
||||||
} |
super() |
||||||
|
this.filename = filename |
||||||
function isFd (path) { |
this._content = Buffer.alloc(0) |
||||||
return (path >>> 0) === path |
|
||||||
} |
|
||||||
|
|
||||||
function assertEncoding (encoding) { |
|
||||||
if (encoding && !Buffer.isEncoding(encoding)) { |
|
||||||
throw new Error('Unknown encoding: ' + encoding) |
|
||||||
} |
} |
||||||
} |
|
||||||
|
|
||||||
let onePassDone = false |
|
||||||
|
|
||||||
function writeAll (fd, isUserFd, buffer, offset, length, position, callback_) { |
|
||||||
const callback = maybeCallback(arguments[arguments.length - 1]) |
|
||||||
|
|
||||||
if (onePassDone) { process.exit(1) } // Crash on purpose before rewrite done
|
|
||||||
const l = Math.min(5000, length) // Force write by chunks of 5000 bytes to ensure data will be incomplete on crash
|
|
||||||
|
|
||||||
// write(fd, buffer, offset, length, position, callback)
|
_write (chunk, encoding, callback) { |
||||||
fs.write(fd, buffer, offset, l, position, function (writeErr, written) { |
this._content = Buffer.concat([this._content, Buffer.from(chunk, encoding)]) |
||||||
if (writeErr) { |
callback() |
||||||
if (isUserFd) { |
|
||||||
if (callback) callback(writeErr) |
|
||||||
} else { |
|
||||||
fs.close(fd, function () { |
|
||||||
if (callback) callback(writeErr) |
|
||||||
}) |
|
||||||
} |
|
||||||
} else { |
|
||||||
onePassDone = true |
|
||||||
if (written === length) { |
|
||||||
if (isUserFd) { |
|
||||||
if (callback) callback(null) |
|
||||||
} else { |
|
||||||
fs.close(fd, callback) |
|
||||||
} |
|
||||||
} else { |
|
||||||
offset += written |
|
||||||
length -= written |
|
||||||
if (position !== null) { |
|
||||||
position += written |
|
||||||
} |
|
||||||
writeAll(fd, isUserFd, buffer, offset, length, position, callback) |
|
||||||
} |
|
||||||
} |
|
||||||
}) |
|
||||||
} |
|
||||||
|
|
||||||
fs.writeFile = function (path, data, options, callback_) { |
|
||||||
const callback = maybeCallback(arguments[arguments.length - 1]) |
|
||||||
|
|
||||||
if (!options || typeof options === 'function') { |
|
||||||
options = { encoding: 'utf8', mode: 438, flag: 'w' } // Mode 438 == 0o666 (compatibility with older Node releases)
|
|
||||||
} else if (typeof options === 'string') { |
|
||||||
options = { encoding: options, mode: 438, flag: 'w' } // Mode 438 == 0o666 (compatibility with older Node releases)
|
|
||||||
} else if (typeof options !== 'object') { |
|
||||||
throw new Error(`throwOptionsError${options}`) |
|
||||||
} |
} |
||||||
|
|
||||||
assertEncoding(options.encoding) |
_end (chunk, encoding, callback) { |
||||||
|
this._content = Buffer.concat([this._content, Buffer.from(chunk, encoding)]) |
||||||
const flag = options.flag || 'w' |
callback() |
||||||
|
|
||||||
if (isFd(path)) { |
|
||||||
writeFd(path, true) |
|
||||||
return |
|
||||||
} |
} |
||||||
|
|
||||||
fs.open(path, flag, options.mode, function (openErr, fd) { |
close (callback) { |
||||||
if (openErr) { |
callbackify(fs.promises.writeFile)(this.filename, this._content, 'utf8', callback) |
||||||
if (callback) callback(openErr) |
|
||||||
} else { |
|
||||||
writeFd(fd, false) |
|
||||||
} |
|
||||||
}) |
|
||||||
|
|
||||||
function writeFd (fd, isUserFd) { |
|
||||||
const buffer = (data instanceof Buffer) ? data : Buffer.from('' + data, options.encoding || 'utf8') |
|
||||||
const position = /a/.test(flag) ? null : 0 |
|
||||||
|
|
||||||
writeAll(fd, isUserFd, buffer, 0, buffer.length, position, callback) |
|
||||||
} |
} |
||||||
} |
} |
||||||
|
|
||||||
fs.createWriteStream = function (path) { |
fs.createWriteStream = path => new FakeFsWriteStream(path) |
||||||
let content = '' |
|
||||||
return { |
|
||||||
write (data) { |
|
||||||
content += data |
|
||||||
}, |
|
||||||
close (callback) { |
|
||||||
fs.writeFile(path, content, callback) |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// End of fs modification
|
// End of fs monkey patching
|
||||||
const Nedb = require('../lib/datastore.js') |
const Nedb = require('../lib/datastore.js') |
||||||
const db = new Nedb({ filename: 'workspace/lac.db' }) |
const db = new Nedb({ filename: 'workspace/lac.db' }) |
||||||
|
|
||||||
db.loadDatabase() |
db.loadDatabaseAsync() // no need to await
|
||||||
|
@ -1,64 +1,61 @@ |
|||||||
const fs = require('fs') |
const fs = require('fs') |
||||||
const async = require('async') |
const fsPromises = fs.promises |
||||||
const Nedb = require('../lib/datastore') |
const Nedb = require('../lib/datastore') |
||||||
const db = new Nedb({ filename: './workspace/openfds.db', autoload: true }) |
|
||||||
const N = 64 |
const N = 64 |
||||||
let i |
|
||||||
let fds |
|
||||||
|
|
||||||
function multipleOpen (filename, N, callback) { |
// A console.error triggers an error of the parent test
|
||||||
async.whilst(function () { return i < N } |
|
||||||
, function (cb) { |
const test = async () => { |
||||||
fs.open(filename, 'r', function (err, fd) { |
let filehandles = [] |
||||||
i += 1 |
try { |
||||||
if (fd) { fds.push(fd) } |
for (let i = 0; i < 2 * N + 1; i++) { |
||||||
return cb(err) |
const filehandle = await fsPromises.open('./test_lac/openFdsTestFile', 'r') |
||||||
}) |
filehandles.push(filehandle) |
||||||
} |
} |
||||||
, callback) |
console.error('No error occurred while opening a file too many times') |
||||||
} |
process.exit(1) |
||||||
|
} catch (error) { |
||||||
|
if (error.code !== 'EMFILE') { |
||||||
|
console.error(error) |
||||||
|
process.exit(1) |
||||||
|
} |
||||||
|
} finally { |
||||||
|
for (const filehandle of filehandles) { |
||||||
|
await filehandle.close() |
||||||
|
} |
||||||
|
filehandles = [] |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
for (let i = 0; i < N; i++) { |
||||||
|
const filehandle = await fsPromises.open('./test_lac/openFdsTestFile2', 'r') |
||||||
|
filehandles.push(filehandle) |
||||||
|
} |
||||||
|
} catch (error) { |
||||||
|
console.error(`An unexpected error occurred when opening file not too many times with error: ${error}`) |
||||||
|
process.exit(1) |
||||||
|
} finally { |
||||||
|
for (const filehandle of filehandles) { |
||||||
|
await filehandle.close() |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
async.waterfall([ |
try { |
||||||
// Check that ulimit has been set to the correct value
|
const db = new Nedb({ filename: './workspace/openfds.db' }) |
||||||
function (cb) { |
await db.loadDatabaseAsync() |
||||||
i = 0 |
await db.removeAsync({}, { multi: true }) |
||||||
fds = [] |
await db.insertAsync({ hello: 'world' }) |
||||||
multipleOpen('./test_lac/openFdsTestFile', 2 * N + 1, function (err) { |
|
||||||
if (!err) { console.log('No error occured while opening a file too many times') } |
|
||||||
fds.forEach(function (fd) { fs.closeSync(fd) }) |
|
||||||
return cb() |
|
||||||
}) |
|
||||||
}, |
|
||||||
function (cb) { |
|
||||||
i = 0 |
|
||||||
fds = [] |
|
||||||
multipleOpen('./test_lac/openFdsTestFile2', N, function (err) { |
|
||||||
if (err) { console.log('An unexpected error occured when opening file not too many times: ' + err) } |
|
||||||
fds.forEach(function (fd) { fs.closeSync(fd) }) |
|
||||||
return cb() |
|
||||||
}) |
|
||||||
}, |
|
||||||
// Then actually test NeDB persistence
|
|
||||||
function () { |
|
||||||
db.remove({}, { multi: true }, function (err) { |
|
||||||
if (err) { console.log(err) } |
|
||||||
db.insert({ hello: 'world' }, function (err) { |
|
||||||
if (err) { console.log(err) } |
|
||||||
|
|
||||||
i = 0 |
for (let i = 0; i < 2 * N + 1; i++) { |
||||||
async.whilst(function () { return i < 2 * N + 1 } |
await db.persistence.persistCachedDatabaseAsync() |
||||||
, function (cb) { |
} |
||||||
db.persistence.persistCachedDatabase(function (err) { |
} catch (error) { |
||||||
if (err) { return cb(err) } |
console.error(`Got unexpected error during one persistence operation with error: ${error}`) |
||||||
i += 1 |
|
||||||
return cb() |
|
||||||
}) |
|
||||||
} |
|
||||||
, function (err) { |
|
||||||
if (err) { console.log('Got unexpected error during one peresistence operation: ' + err) } |
|
||||||
} |
|
||||||
) |
|
||||||
}) |
|
||||||
}) |
|
||||||
} |
} |
||||||
]) |
} |
||||||
|
try { |
||||||
|
test() |
||||||
|
} catch (error) { |
||||||
|
console.error(error) |
||||||
|
process.exit(1) |
||||||
|
} |
||||||
|
Loading…
Reference in new issue