standard everything except browser-version

pull/2/head
Timothée Rebours 4 years ago
parent aa3302e5dc
commit 185ae680b8
  1. 22
      LICENSE
  2. 19
      LICENSE.md
  3. 12
      README.md
  4. 292
      benchmarks/commonUtilities.js
  5. 65
      benchmarks/ensureIndex.js
  6. 46
      benchmarks/find.js
  7. 48
      benchmarks/findOne.js
  8. 46
      benchmarks/findWithIn.js
  9. 44
      benchmarks/insert.js
  10. 52
      benchmarks/loadDatabase.js
  11. 56
      benchmarks/remove.js
  12. 57
      benchmarks/update.js
  13. 4
      index.js
  14. 189
      lib/cursor.js
  15. 11
      lib/customUtils.js
  16. 640
      lib/datastore.js
  17. 63
      lib/executor.js
  18. 245
      lib/indexes.js
  19. 610
      lib/model.js
  20. 342
      lib/persistence.js
  21. 125
      lib/storage.js
  22. 31
      package.json
  23. 1254
      test/cursor.test.js
  24. 32
      test/customUtil.test.js
  25. 3937
      test/db.test.js
  26. 238
      test/executor.test.js
  27. 1231
      test/indexes.test.js
  28. 2
      test/mocha.opts
  29. 2140
      test/model.test.js
  30. 1311
      test/persistence.test.js
  31. 94
      test_lac/loadAndCrash.test.js
  32. 83
      test_lac/openFds.test.js

@ -1,22 +0,0 @@
(The MIT License)
Copyright (c) 2013 Louis Chatriot <louis.chatriot@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,19 @@
Copyright (c) 2013 Louis Chatriot <louis.chatriot@gmail.com>
Copyright (c) 2021 Seald [contact@seald.io](mailto:contact@seald.io);
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including without
limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom
the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -34,7 +34,7 @@ It is a subset of MongoDB's API (the most used operations).
* <a href="#inserting-documents">Inserting documents</a> * <a href="#inserting-documents">Inserting documents</a>
* <a href="#finding-documents">Finding documents</a> * <a href="#finding-documents">Finding documents</a>
* <a href="#basic-querying">Basic Querying</a> * <a href="#basic-querying">Basic Querying</a>
* <a href="#operators-lt-lte-gt-gte-in-nin-ne-exists-regex">Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $exists, $regex)</a> * <a href="#operators-lt-lte-gt-gte-in-nin-ne-stat-regex">Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $stat, $regex)</a>
* <a href="#array-fields">Array fields</a> * <a href="#array-fields">Array fields</a>
* <a href="#logical-operators-or-and-not-where">Logical operators $or, $and, $not, $where</a> * <a href="#logical-operators-or-and-not-where">Logical operators $or, $and, $not, $where</a>
* <a href="#sorting-and-paginating">Sorting and paginating</a> * <a href="#sorting-and-paginating">Sorting and paginating</a>
@ -236,14 +236,14 @@ db.findOne({ _id: 'id1' }, function (err, doc) {
}); });
``` ```
#### Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $exists, $regex) #### Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $stat, $regex)
The syntax is `{ field: { $op: value } }` where `$op` is any comparison operator: The syntax is `{ field: { $op: value } }` where `$op` is any comparison operator:
* `$lt`, `$lte`: less than, less than or equal * `$lt`, `$lte`: less than, less than or equal
* `$gt`, `$gte`: greater than, greater than or equal * `$gt`, `$gte`: greater than, greater than or equal
* `$in`: member of. `value` must be an array of values * `$in`: member of. `value` must be an array of values
* `$ne`, `$nin`: not equal, not a member of * `$ne`, `$nin`: not equal, not a member of
* `$exists`: checks whether the document posses the property `field`. `value` should be true or false * `$stat`: checks whether the document posses the property `field`. `value` should be true or false
* `$regex`: checks whether a string is matched by the regular expression. Contrary to MongoDB, the use of `$options` with `$regex` is not supported, because it doesn't give you more power than regex flags. Basic queries are more readable so only use the `$regex` operator when you need to use another operator with it (see example below) * `$regex`: checks whether a string is matched by the regular expression. Contrary to MongoDB, the use of `$options` with `$regex` is not supported, because it doesn't give you more power than regex flags. Basic queries are more readable so only use the `$regex` operator when you need to use another operator with it (see example below)
```javascript ```javascript
@ -262,8 +262,8 @@ db.find({ planet: { $in: ['Earth', 'Jupiter'] }}, function (err, docs) {
// docs contains Earth and Jupiter // docs contains Earth and Jupiter
}); });
// Using $exists // Using $stat
db.find({ satellites: { $exists: true } }, function (err, docs) { db.find({ satellites: { $stat: true } }, function (err, docs) {
// docs contains only Mars // docs contains only Mars
}); });
@ -724,4 +724,4 @@ You don't have time? You can support NeDB by sending bitcoins to this address: 1
## License ## License
See [License](LICENSE) See [License](LICENSE.md)

@ -1,65 +1,58 @@
/** /**
* Functions that are used in several benchmark tests * Functions that are used in several benchmark tests
*/ */
const fs = require('fs')
var customUtils = require('../lib/customUtils') const path = require('path')
, fs = require('fs') const Datastore = require('../lib/datastore')
, path = require('path') const Persistence = require('../lib/persistence')
, Datastore = require('../lib/datastore') let executeAsap
, Persistence = require('../lib/persistence')
, executeAsap // process.nextTick or setImmediate depending on your Node version
;
try { try {
executeAsap = setImmediate; executeAsap = setImmediate
} catch (e) { } catch (e) {
executeAsap = process.nextTick; executeAsap = process.nextTick
} }
/** /**
* Configure the benchmark * Configure the benchmark
*/ */
module.exports.getConfiguration = function (benchDb) { module.exports.getConfiguration = function (benchDb) {
var d, n const program = require('commander')
, program = require('commander')
;
program program
.option('-n --number [number]', 'Size of the collection to test on', parseInt) .option('-n --number [number]', 'Size of the collection to test on', parseInt)
.option('-i --with-index', 'Use an index') .option('-i --with-index', 'Use an index')
.option('-m --in-memory', 'Test with an in-memory only store') .option('-m --in-memory', 'Test with an in-memory only store')
.parse(process.argv); .parse(process.argv)
n = program.number || 10000; const n = program.number || 10000
console.log("----------------------------"); console.log('----------------------------')
console.log("Test with " + n + " documents"); console.log('Test with ' + n + ' documents')
console.log(program.withIndex ? "Use an index" : "Don't use an index"); console.log(program.withIndex ? 'Use an index' : "Don't use an index")
console.log(program.inMemory ? "Use an in-memory datastore" : "Use a persistent datastore"); console.log(program.inMemory ? 'Use an in-memory datastore' : 'Use a persistent datastore')
console.log("----------------------------"); console.log('----------------------------')
d = new Datastore({ filename: benchDb const d = new Datastore({
, inMemoryOnly: program.inMemory filename: benchDb,
}); inMemoryOnly: program.inMemory
})
return { n: n, d: d, program: program };
};
return { n: n, d: d, program: program }
}
/** /**
* Ensure the workspace exists and the db datafile is empty * Ensure the workspace stat and the db datafile is empty
*/ */
module.exports.prepareDb = function (filename, cb) { module.exports.prepareDb = function (filename, cb) {
Persistence.ensureDirectoryExists(path.dirname(filename), function () { Persistence.ensureDirectoryExists(path.dirname(filename), function () {
fs.exists(filename, function (exists) { fs.access(filename, fs.constants.FS_OK, function (err) {
if (exists) { if (!err) {
fs.unlink(filename, cb); fs.unlink(filename, cb)
} else { return cb(); } } else { return cb() }
}); })
}); })
}; }
/** /**
* Return an array with the numbers from 0 to n-1, in a random order * Return an array with the numbers from 0 to n-1, in a random order
@ -67,242 +60,221 @@ module.exports.prepareDb = function (filename, cb) {
* Useful to get fair tests * Useful to get fair tests
*/ */
function getRandomArray (n) { function getRandomArray (n) {
var res = [] const res = []
, i, j, temp let i
; let j
let temp
for (i = 0; i < n; i += 1) { res[i] = i; } for (i = 0; i < n; i += 1) { res[i] = i }
for (i = n - 1; i >= 1; i -= 1) { for (i = n - 1; i >= 1; i -= 1) {
j = Math.floor((i + 1) * Math.random()); j = Math.floor((i + 1) * Math.random())
temp = res[i]; temp = res[i]
res[i] = res[j]; res[i] = res[j]
res[j] = temp; res[j] = temp
} }
return res; return res
}; };
module.exports.getRandomArray = getRandomArray; module.exports.getRandomArray = getRandomArray
/** /**
* Insert a certain number of documents for testing * Insert a certain number of documents for testing
*/ */
module.exports.insertDocs = function (d, n, profiler, cb) { module.exports.insertDocs = function (d, n, profiler, cb) {
var beg = new Date() const order = getRandomArray(n)
, order = getRandomArray(n)
;
profiler.step('Begin inserting ' + n + ' docs'); profiler.step('Begin inserting ' + n + ' docs')
function runFrom (i) { function runFrom (i) {
if (i === n) { // Finished if (i === n) { // Finished
var opsPerSecond = Math.floor(1000* n / profiler.elapsedSinceLastStep()); const opsPerSecond = Math.floor(1000 * n / profiler.elapsedSinceLastStep())
console.log("===== RESULT (insert) ===== " + opsPerSecond + " ops/s"); console.log('===== RESULT (insert) ===== ' + opsPerSecond + ' ops/s')
profiler.step('Finished inserting ' + n + ' docs'); profiler.step('Finished inserting ' + n + ' docs')
profiler.insertOpsPerSecond = opsPerSecond; profiler.insertOpsPerSecond = opsPerSecond
return cb(); return cb()
} }
// eslint-disable-next-line node/handle-callback-err
d.insert({ docNumber: order[i] }, function (err) { d.insert({ docNumber: order[i] }, function (err) {
executeAsap(function () { executeAsap(function () {
runFrom(i + 1); runFrom(i + 1)
}); })
}); })
}
runFrom(0)
} }
runFrom(0);
};
/** /**
* Find documents with find * Find documents with find
*/ */
module.exports.findDocs = function (d, n, profiler, cb) { module.exports.findDocs = function (d, n, profiler, cb) {
var beg = new Date() const order = getRandomArray(n)
, order = getRandomArray(n)
;
profiler.step("Finding " + n + " documents"); profiler.step('Finding ' + n + ' documents')
function runFrom (i) { function runFrom (i) {
if (i === n) { // Finished if (i === n) { // Finished
console.log("===== RESULT (find) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s"); console.log('===== RESULT (find) ===== ' + Math.floor(1000 * n / profiler.elapsedSinceLastStep()) + ' ops/s')
profiler.step('Finished finding ' + n + ' docs'); profiler.step('Finished finding ' + n + ' docs')
return cb(); return cb()
} }
// eslint-disable-next-line node/handle-callback-err
d.find({ docNumber: order[i] }, function (err, docs) { d.find({ docNumber: order[i] }, function (err, docs) {
if (docs.length !== 1 || docs[0].docNumber !== order[i]) { return cb('One find didnt work'); } if (docs.length !== 1 || docs[0].docNumber !== order[i]) { return cb(new Error('One find didnt work')) }
executeAsap(function () { executeAsap(function () {
runFrom(i + 1); runFrom(i + 1)
}); })
}); })
}
runFrom(0)
} }
runFrom(0);
};
/** /**
* Find documents with find and the $in operator * Find documents with find and the $in operator
*/ */
module.exports.findDocsWithIn = function (d, n, profiler, cb) { module.exports.findDocsWithIn = function (d, n, profiler, cb) {
var beg = new Date() const ins = []
, order = getRandomArray(n) const arraySize = Math.min(10, n)
, ins = [], i, j
, arraySize = Math.min(10, n) // The array for $in needs to be smaller than n (inclusive)
;
// Preparing all the $in arrays, will take some time // Preparing all the $in arrays, will take some time
for (i = 0; i < n; i += 1) { for (let i = 0; i < n; i += 1) {
ins[i] = []; ins[i] = []
for (j = 0; j < arraySize; j += 1) { for (let j = 0; j < arraySize; j += 1) {
ins[i].push((i + j) % n); ins[i].push((i + j) % n)
} }
} }
profiler.step("Finding " + n + " documents WITH $IN OPERATOR"); profiler.step('Finding ' + n + ' documents WITH $IN OPERATOR')
function runFrom (i) { function runFrom (i) {
if (i === n) { // Finished if (i === n) { // Finished
console.log("===== RESULT (find with in selector) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s"); console.log('===== RESULT (find with in selector) ===== ' + Math.floor(1000 * n / profiler.elapsedSinceLastStep()) + ' ops/s')
profiler.step('Finished finding ' + n + ' docs'); profiler.step('Finished finding ' + n + ' docs')
return cb(); return cb()
} }
// eslint-disable-next-line node/handle-callback-err
d.find({ docNumber: { $in: ins[i] } }, function (err, docs) { d.find({ docNumber: { $in: ins[i] } }, function (err, docs) {
if (docs.length !== arraySize) { return cb('One find didnt work'); } if (docs.length !== arraySize) { return cb(new Error('One find didnt work')) }
executeAsap(function () { executeAsap(function () {
runFrom(i + 1); runFrom(i + 1)
}); })
}); })
}
runFrom(0)
} }
runFrom(0);
};
/** /**
* Find documents with findOne * Find documents with findOne
*/ */
module.exports.findOneDocs = function (d, n, profiler, cb) { module.exports.findOneDocs = function (d, n, profiler, cb) {
var beg = new Date() const order = getRandomArray(n)
, order = getRandomArray(n)
;
profiler.step("FindingOne " + n + " documents"); profiler.step('FindingOne ' + n + ' documents')
function runFrom (i) { function runFrom (i) {
if (i === n) { // Finished if (i === n) { // Finished
console.log("===== RESULT (findOne) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s"); console.log('===== RESULT (findOne) ===== ' + Math.floor(1000 * n / profiler.elapsedSinceLastStep()) + ' ops/s')
profiler.step('Finished finding ' + n + ' docs'); profiler.step('Finished finding ' + n + ' docs')
return cb(); return cb()
} }
// eslint-disable-next-line node/handle-callback-err
d.findOne({ docNumber: order[i] }, function (err, doc) { d.findOne({ docNumber: order[i] }, function (err, doc) {
if (!doc || doc.docNumber !== order[i]) { return cb('One find didnt work'); } if (!doc || doc.docNumber !== order[i]) { return cb(new Error('One find didnt work')) }
executeAsap(function () { executeAsap(function () {
runFrom(i + 1); runFrom(i + 1)
}); })
}); })
}
runFrom(0)
} }
runFrom(0);
};
/** /**
* Update documents * Update documents
* options is the same as the options object for update * options is the same as the options object for update
*/ */
module.exports.updateDocs = function (options, d, n, profiler, cb) { module.exports.updateDocs = function (options, d, n, profiler, cb) {
var beg = new Date() const order = getRandomArray(n)
, order = getRandomArray(n)
;
profiler.step("Updating " + n + " documents"); profiler.step('Updating ' + n + ' documents')
function runFrom (i) { function runFrom (i) {
if (i === n) { // Finished if (i === n) { // Finished
console.log("===== RESULT (update) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s"); console.log('===== RESULT (update) ===== ' + Math.floor(1000 * n / profiler.elapsedSinceLastStep()) + ' ops/s')
profiler.step('Finished updating ' + n + ' docs'); profiler.step('Finished updating ' + n + ' docs')
return cb(); return cb()
} }
// Will not actually modify the document but will take the same time // Will not actually modify the document but will take the same time
d.update({ docNumber: order[i] }, { docNumber: order[i] }, options, function (err, nr) { d.update({ docNumber: order[i] }, { docNumber: order[i] }, options, function (err, nr) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (nr !== 1) { return cb('One update didnt work'); } if (nr !== 1) { return cb(new Error('One update didnt work')) }
executeAsap(function () { executeAsap(function () {
runFrom(i + 1); runFrom(i + 1)
}); })
}); })
}
runFrom(0)
} }
runFrom(0);
};
/** /**
* Remove documents * Remove documents
* options is the same as the options object for update * options is the same as the options object for update
*/ */
module.exports.removeDocs = function (options, d, n, profiler, cb) { module.exports.removeDocs = function (options, d, n, profiler, cb) {
var beg = new Date() const order = getRandomArray(n)
, order = getRandomArray(n)
;
profiler.step("Removing " + n + " documents"); profiler.step('Removing ' + n + ' documents')
function runFrom (i) { function runFrom (i) {
if (i === n) { // Finished if (i === n) { // Finished
// opsPerSecond corresponds to 1 insert + 1 remove, needed to keep collection size at 10,000 // opsPerSecond corresponds to 1 insert + 1 remove, needed to keep collection size at 10,000
// We need to subtract the time taken by one insert to get the time actually taken by one remove // We need to subtract the time taken by one insert to get the time actually taken by one remove
var opsPerSecond = Math.floor(1000 * n / profiler.elapsedSinceLastStep()); const opsPerSecond = Math.floor(1000 * n / profiler.elapsedSinceLastStep())
var removeOpsPerSecond = Math.floor(1 / ((1 / opsPerSecond) - (1 / profiler.insertOpsPerSecond))) const removeOpsPerSecond = Math.floor(1 / ((1 / opsPerSecond) - (1 / profiler.insertOpsPerSecond)))
console.log("===== RESULT (remove) ===== " + removeOpsPerSecond + " ops/s"); console.log('===== RESULT (remove) ===== ' + removeOpsPerSecond + ' ops/s')
profiler.step('Finished removing ' + n + ' docs'); profiler.step('Finished removing ' + n + ' docs')
return cb(); return cb()
} }
d.remove({ docNumber: order[i] }, options, function (err, nr) { d.remove({ docNumber: order[i] }, options, function (err, nr) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (nr !== 1) { return cb('One remove didnt work'); } if (nr !== 1) { return cb(new Error('One remove didnt work')) }
// eslint-disable-next-line node/handle-callback-err
d.insert({ docNumber: order[i] }, function (err) { // We need to reinsert the doc so that we keep the collection's size at n d.insert({ docNumber: order[i] }, function (err) { // We need to reinsert the doc so that we keep the collection's size at n
// So actually we're calculating the average time taken by one insert + one remove // So actually we're calculating the average time taken by one insert + one remove
executeAsap(function () { executeAsap(function () {
runFrom(i + 1); runFrom(i + 1)
}); })
}); })
}); })
}
runFrom(0)
} }
runFrom(0);
};
/** /**
* Load database * Load database
*/ */
module.exports.loadDatabase = function (d, n, profiler, cb) { module.exports.loadDatabase = function (d, n, profiler, cb) {
var beg = new Date() profiler.step('Loading the database ' + n + ' times')
, order = getRandomArray(n)
;
profiler.step("Loading the database " + n + " times");
function runFrom (i) { function runFrom (i) {
if (i === n) { // Finished if (i === n) { // Finished
console.log("===== RESULT ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s"); console.log('===== RESULT ===== ' + Math.floor(1000 * n / profiler.elapsedSinceLastStep()) + ' ops/s')
profiler.step('Finished loading a database' + n + ' times'); profiler.step('Finished loading a database' + n + ' times')
return cb(); return cb()
} }
// eslint-disable-next-line node/handle-callback-err
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
executeAsap(function () { executeAsap(function () {
runFrom(i + 1); runFrom(i + 1)
}); })
}); })
}
runFrom(0)
} }
runFrom(0);
};

@ -1,51 +1,48 @@
var Datastore = require('../lib/datastore') const Datastore = require('../lib/datastore')
, benchDb = 'workspace/insert.bench.db' const benchDb = 'workspace/insert.bench.db'
, async = require('async') const async = require('async')
, commonUtilities = require('./commonUtilities') const commonUtilities = require('./commonUtilities')
, execTime = require('exec-time') const ExecTime = require('exec-time')
, profiler = new execTime('INSERT BENCH') const profiler = new ExecTime('INSERT BENCH')
, d = new Datastore(benchDb) const d = new Datastore(benchDb)
, program = require('commander') const program = require('commander')
, n
;
program program
.option('-n --number [number]', 'Size of the collection to test on', parseInt) .option('-n --number [number]', 'Size of the collection to test on', parseInt)
.option('-i --with-index', 'Test with an index') .option('-i --with-index', 'Test with an index')
.parse(process.argv); .parse(process.argv)
n = program.number || 10000; const n = program.number || 10000
console.log("----------------------------"); console.log('----------------------------')
console.log("Test with " + n + " documents"); console.log('Test with ' + n + ' documents')
console.log("----------------------------"); console.log('----------------------------')
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
cb(); cb()
}); })
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
, function (cb) { function (cb) {
var i; let i
profiler.step('Begin calling ensureIndex ' + n + ' times'); profiler.step('Begin calling ensureIndex ' + n + ' times')
for (i = 0; i < n; i += 1) { for (i = 0; i < n; i += 1) {
d.ensureIndex({ fieldName: 'docNumber' }); d.ensureIndex({ fieldName: 'docNumber' })
delete d.indexes.docNumber; delete d.indexes.docNumber
} }
console.log("Average time for one ensureIndex: " + (profiler.elapsedSinceLastStep() / n) + "ms"); console.log('Average time for one ensureIndex: ' + (profiler.elapsedSinceLastStep() / n) + 'ms')
profiler.step('Finished calling ensureIndex ' + n + ' times'); profiler.step('Finished calling ensureIndex ' + n + ' times')
} }
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); }
});
if (err) { return console.log('An error was encountered: ', err) }
})

@ -1,30 +1,26 @@
var Datastore = require('../lib/datastore') const benchDb = 'workspace/find.bench.db'
, benchDb = 'workspace/find.bench.db' const async = require('async')
, fs = require('fs') const ExecTime = require('exec-time')
, path = require('path') const profiler = new ExecTime('FIND BENCH')
, async = require('async') const commonUtilities = require('./commonUtilities')
, execTime = require('exec-time') const config = commonUtilities.getConfiguration(benchDb)
, profiler = new execTime('FIND BENCH') const d = config.d
, commonUtilities = require('./commonUtilities') const n = config.n
, config = commonUtilities.getConfiguration(benchDb)
, d = config.d
, n = config.n
;
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); } if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }) }
cb(); cb()
}); })
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
, async.apply(commonUtilities.findDocs, d, n, profiler) async.apply(commonUtilities.findDocs, d, n, profiler)
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); } if (err) { return console.log('An error was encountered: ', err) }
}); })

@ -1,31 +1,27 @@
var Datastore = require('../lib/datastore') const benchDb = 'workspace/findOne.bench.db'
, benchDb = 'workspace/findOne.bench.db' const async = require('async')
, fs = require('fs') const ExecTime = require('exec-time')
, path = require('path') const profiler = new ExecTime('FINDONE BENCH')
, async = require('async') const commonUtilities = require('./commonUtilities')
, execTime = require('exec-time') const config = commonUtilities.getConfiguration(benchDb)
, profiler = new execTime('FINDONE BENCH') const d = config.d
, commonUtilities = require('./commonUtilities') const n = config.n
, config = commonUtilities.getConfiguration(benchDb)
, d = config.d
, n = config.n
;
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); } if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }) }
cb(); cb()
}); })
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
, function (cb) { setTimeout(function () {cb();}, 500); } function (cb) { setTimeout(function () { cb() }, 500) },
, async.apply(commonUtilities.findOneDocs, d, n, profiler) async.apply(commonUtilities.findOneDocs, d, n, profiler)
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); } if (err) { return console.log('An error was encountered: ', err) }
}); })

@ -1,30 +1,26 @@
var Datastore = require('../lib/datastore') const benchDb = 'workspace/find.bench.db'
, benchDb = 'workspace/find.bench.db' const async = require('async')
, fs = require('fs') const ExecTime = require('exec-time')
, path = require('path') const profiler = new ExecTime('FIND BENCH')
, async = require('async') const commonUtilities = require('./commonUtilities')
, execTime = require('exec-time') const config = commonUtilities.getConfiguration(benchDb)
, profiler = new execTime('FIND BENCH') const d = config.d
, commonUtilities = require('./commonUtilities') const n = config.n
, config = commonUtilities.getConfiguration(benchDb)
, d = config.d
, n = config.n
;
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); } if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }) }
cb(); cb()
}); })
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
, async.apply(commonUtilities.findDocsWithIn, d, n, profiler) async.apply(commonUtilities.findDocsWithIn, d, n, profiler)
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); } if (err) { return console.log('An error was encountered: ', err) }
}); })

@ -1,33 +1,31 @@
var Datastore = require('../lib/datastore') const benchDb = 'workspace/insert.bench.db'
, benchDb = 'workspace/insert.bench.db' const async = require('async')
, async = require('async') const ExecTime = require('exec-time')
, execTime = require('exec-time') const profiler = new ExecTime('INSERT BENCH')
, profiler = new execTime('INSERT BENCH') const commonUtilities = require('./commonUtilities')
, commonUtilities = require('./commonUtilities') const config = commonUtilities.getConfiguration(benchDb)
, config = commonUtilities.getConfiguration(benchDb) const d = config.d
, d = config.d let n = config.n
, n = config.n
;
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (config.program.withIndex) { if (config.program.withIndex) {
d.ensureIndex({ fieldName: 'docNumber' }); d.ensureIndex({ fieldName: 'docNumber' })
n = 2 * n; // We will actually insert twice as many documents n = 2 * n // We will actually insert twice as many documents
// because the index is slower when the collection is already // because the index is slower when the collection is already
// big. So the result given by the algorithm will be a bit worse than // big. So the result given by the algorithm will be a bit worse than
// actual performance // actual performance
} }
cb(); cb()
}); })
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler)
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); } if (err) { return console.log('An error was encountered: ', err) }
}); })

@ -1,38 +1,34 @@
var Datastore = require('../lib/datastore') const Datastore = require('../lib/datastore')
, benchDb = 'workspace/loaddb.bench.db' const benchDb = 'workspace/loaddb.bench.db'
, fs = require('fs') const async = require('async')
, path = require('path') const commonUtilities = require('./commonUtilities')
, async = require('async') const ExecTime = require('exec-time')
, commonUtilities = require('./commonUtilities') const profiler = new ExecTime('LOADDB BENCH')
, execTime = require('exec-time') const d = new Datastore(benchDb)
, profiler = new execTime('LOADDB BENCH') const program = require('commander')
, d = new Datastore(benchDb)
, program = require('commander')
, n
;
program program
.option('-n --number [number]', 'Size of the collection to test on', parseInt) .option('-n --number [number]', 'Size of the collection to test on', parseInt)
.option('-i --with-index', 'Test with an index') .option('-i --with-index', 'Test with an index')
.parse(process.argv); .parse(process.argv)
n = program.number || 10000; const n = program.number || 10000
console.log("----------------------------"); console.log('----------------------------')
console.log("Test with " + n + " documents"); console.log('Test with ' + n + ' documents')
console.log(program.withIndex ? "Use an index" : "Don't use an index"); console.log(program.withIndex ? 'Use an index' : "Don't use an index")
console.log("----------------------------"); console.log('----------------------------')
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(cb); d.loadDatabase(cb)
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
, async.apply(commonUtilities.loadDatabase, d, n, profiler) async.apply(commonUtilities.loadDatabase, d, n, profiler)
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); } if (err) { return console.log('An error was encountered: ', err) }
}); })

@ -1,38 +1,34 @@
var Datastore = require('../lib/datastore') const benchDb = 'workspace/remove.bench.db'
, benchDb = 'workspace/remove.bench.db' const async = require('async')
, fs = require('fs') const ExecTime = require('exec-time')
, path = require('path') const profiler = new ExecTime('REMOVE BENCH')
, async = require('async') const commonUtilities = require('./commonUtilities')
, execTime = require('exec-time') const config = commonUtilities.getConfiguration(benchDb)
, profiler = new execTime('REMOVE BENCH') const d = config.d
, commonUtilities = require('./commonUtilities') const n = config.n
, config = commonUtilities.getConfiguration(benchDb)
, d = config.d
, n = config.n
;
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); } if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }) }
cb(); cb()
}); })
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
// Test with remove only one document // Test with remove only one document
, function (cb) { profiler.step('MULTI: FALSE'); return cb(); } function (cb) { profiler.step('MULTI: FALSE'); return cb() },
, async.apply(commonUtilities.removeDocs, { multi: false }, d, n, profiler) async.apply(commonUtilities.removeDocs, { multi: false }, d, n, profiler),
// Test with multiple documents // Test with multiple documents
, function (cb) { d.remove({}, { multi: true }, function () { return cb(); }); } function (cb) { d.remove({}, { multi: true }, function () { return cb() }) },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
, function (cb) { profiler.step('MULTI: TRUE'); return cb(); } function (cb) { profiler.step('MULTI: TRUE'); return cb() },
, async.apply(commonUtilities.removeDocs, { multi: true }, d, n, profiler) async.apply(commonUtilities.removeDocs, { multi: true }, d, n, profiler)
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); } if (err) { return console.log('An error was encountered: ', err) }
}); })

@ -1,39 +1,36 @@
var Datastore = require('../lib/datastore') const benchDb = 'workspace/update.bench.db'
, benchDb = 'workspace/update.bench.db' const async = require('async')
, fs = require('fs') const ExecTime = require('exec-time')
, path = require('path') const profiler = new ExecTime('UPDATE BENCH')
, async = require('async') const commonUtilities = require('./commonUtilities')
, execTime = require('exec-time') const config = commonUtilities.getConfiguration(benchDb)
, profiler = new execTime('UPDATE BENCH') const d = config.d
, commonUtilities = require('./commonUtilities') const n = config.n
, config = commonUtilities.getConfiguration(benchDb)
, d = config.d
, n = config.n
;
async.waterfall([ async.waterfall([
async.apply(commonUtilities.prepareDb, benchDb) async.apply(commonUtilities.prepareDb, benchDb),
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); } if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }) }
cb(); cb()
}); })
} },
, function (cb) { profiler.beginProfiling(); return cb(); } function (cb) { profiler.beginProfiling(); return cb() },
, async.apply(commonUtilities.insertDocs, d, n, profiler) async.apply(commonUtilities.insertDocs, d, n, profiler),
// Test with update only one document // Test with update only one document
, function (cb) { profiler.step('MULTI: FALSE'); return cb(); } function (cb) { profiler.step('MULTI: FALSE'); return cb() },
, async.apply(commonUtilities.updateDocs, { multi: false }, d, n, profiler) async.apply(commonUtilities.updateDocs, { multi: false }, d, n, profiler),
// Test with multiple documents // Test with multiple documents
, function (cb) { d.remove({}, { multi: true }, function (err) { return cb(); }); } // eslint-disable-next-line node/handle-callback-err
, async.apply(commonUtilities.insertDocs, d, n, profiler) function (cb) { d.remove({}, { multi: true }, function (err) { return cb() }) },
, function (cb) { profiler.step('MULTI: TRUE'); return cb(); } async.apply(commonUtilities.insertDocs, d, n, profiler),
, async.apply(commonUtilities.updateDocs, { multi: true }, d, n, profiler) function (cb) { profiler.step('MULTI: TRUE'); return cb() },
async.apply(commonUtilities.updateDocs, { multi: true }, d, n, profiler)
], function (err) { ], function (err) {
profiler.step("Benchmark finished"); profiler.step('Benchmark finished')
if (err) { return console.log("An error was encountered: ", err); } if (err) { return console.log('An error was encountered: ', err) }
}); })

@ -1,3 +1,3 @@
var Datastore = require('./lib/datastore'); const Datastore = require('./lib/datastore')
module.exports = Datastore; module.exports = Datastore

@ -1,112 +1,104 @@
/** /**
* Manage access to data, be it to find, update or remove it * Manage access to data, be it to find, update or remove it
*/ */
var model = require('./model') const model = require('./model')
, _ = require('underscore') const _ = require('underscore')
;
class Cursor {
/** /**
* Create a new cursor for this collection * Create a new cursor for this collection
* @param {Datastore} db - The datastore this cursor is bound to * @param {Datastore} db - The datastore this cursor is bound to
* @param {Query} query - The query this cursor will operate on * @param {Query} query - The query this cursor will operate on
* @param {Function} execFn - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove * @param {Function} execFn - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove
*/ */
function Cursor (db, query, execFn) { constructor (db, query, execFn) {
this.db = db; this.db = db
this.query = query || {}; this.query = query || {}
if (execFn) { this.execFn = execFn; } if (execFn) { this.execFn = execFn }
} }
/** /**
* Set a limit to the number of results * Set a limit to the number of results
*/ */
Cursor.prototype.limit = function(limit) { limit (limit) {
this._limit = limit; this._limit = limit
return this; return this
}; }
/** /**
* Skip a the number of results * Skip a the number of results
*/ */
Cursor.prototype.skip = function(skip) { skip (skip) {
this._skip = skip; this._skip = skip
return this; return this
}; }
/** /**
* Sort results of the query * Sort results of the query
* @param {SortQuery} sortQuery - SortQuery is { field: order }, field can use the dot-notation, order is 1 for ascending and -1 for descending * @param {SortQuery} sortQuery - SortQuery is { field: order }, field can use the dot-notation, order is 1 for ascending and -1 for descending
*/ */
Cursor.prototype.sort = function(sortQuery) { sort (sortQuery) {
this._sort = sortQuery; this._sort = sortQuery
return this; return this
}; }
/** /**
* Add the use of a projection * Add the use of a projection
* @param {Object} projection - MongoDB-style projection. {} means take all fields. Then it's { key1: 1, key2: 1 } to take only key1 and key2 * @param {Object} projection - MongoDB-style projection. {} means take all fields. Then it's { key1: 1, key2: 1 } to take only key1 and key2
* { key1: 0, key2: 0 } to omit only key1 and key2. Except _id, you can't mix takes and omits * { key1: 0, key2: 0 } to omit only key1 and key2. Except _id, you can't mix takes and omits
*/ */
Cursor.prototype.projection = function(projection) { projection (projection) {
this._projection = projection; this._projection = projection
return this; return this
}; }
/** /**
* Apply the projection * Apply the projection
*/ */
Cursor.prototype.project = function (candidates) { project (candidates) {
var res = [], self = this const res = []
, keepId, action, keys const self = this
; let action
if (this._projection === undefined || Object.keys(this._projection).length === 0) { if (this._projection === undefined || Object.keys(this._projection).length === 0) {
return candidates; return candidates
} }
keepId = this._projection._id === 0 ? false : true; const keepId = this._projection._id !== 0
this._projection = _.omit(this._projection, '_id'); this._projection = _.omit(this._projection, '_id')
// Check for consistency // Check for consistency
keys = Object.keys(this._projection); const keys = Object.keys(this._projection)
keys.forEach(function (k) { keys.forEach(function (k) {
if (action !== undefined && self._projection[k] !== action) { throw new Error("Can't both keep and omit fields except for _id"); } if (action !== undefined && self._projection[k] !== action) { throw new Error('Can\'t both keep and omit fields except for _id') }
action = self._projection[k]; action = self._projection[k]
}); })
// Do the actual projection // Do the actual projection
candidates.forEach(function (candidate) { candidates.forEach(function (candidate) {
var toPush; let toPush
if (action === 1) { // pick-type projection if (action === 1) { // pick-type projection
toPush = { $set: {} }; toPush = { $set: {} }
keys.forEach(function (k) { keys.forEach(function (k) {
toPush.$set[k] = model.getDotValue(candidate, k); toPush.$set[k] = model.getDotValue(candidate, k)
if (toPush.$set[k] === undefined) { delete toPush.$set[k]; } if (toPush.$set[k] === undefined) { delete toPush.$set[k] }
}); })
toPush = model.modify({}, toPush); toPush = model.modify({}, toPush)
} else { // omit-type projection } else { // omit-type projection
toPush = { $unset: {} }; toPush = { $unset: {} }
keys.forEach(function (k) { toPush.$unset[k] = true }); keys.forEach(function (k) { toPush.$unset[k] = true })
toPush = model.modify(candidate, toPush); toPush = model.modify(candidate, toPush)
} }
if (keepId) { if (keepId) {
toPush._id = candidate._id; toPush._id = candidate._id
} else { } else {
delete toPush._id; delete toPush._id
} }
res.push(toPush); res.push(toPush)
}); })
return res;
};
return res
}
/** /**
* Get all matching elements * Get all matching elements
@ -115,22 +107,26 @@ Cursor.prototype.project = function (candidates) {
* *
* @param {Function} callback - Signature: err, results * @param {Function} callback - Signature: err, results
*/ */
Cursor.prototype._exec = function(_callback) { _exec (_callback) {
var res = [], added = 0, skipped = 0, self = this let res = []
, error = null let added = 0
, i, keys, key let skipped = 0
; const self = this
let error = null
let i
let keys
let key
function callback (error, res) { function callback (error, res) {
if (self.execFn) { if (self.execFn) {
return self.execFn(error, res, _callback); return self.execFn(error, res, _callback)
} else { } else {
return _callback(error, res); return _callback(error, res)
} }
} }
this.db.getCandidates(this.query, function (err, candidates) { this.db.getCandidates(this.query, function (err, candidates) {
if (err) { return callback(err); } if (err) { return callback(err) }
try { try {
for (i = 0; i < candidates.length; i += 1) { for (i = 0; i < candidates.length; i += 1) {
@ -138,67 +134,68 @@ Cursor.prototype._exec = function(_callback) {
// If a sort is defined, wait for the results to be sorted before applying limit and skip // If a sort is defined, wait for the results to be sorted before applying limit and skip
if (!self._sort) { if (!self._sort) {
if (self._skip && self._skip > skipped) { if (self._skip && self._skip > skipped) {
skipped += 1; skipped += 1
} else { } else {
res.push(candidates[i]); res.push(candidates[i])
added += 1; added += 1
if (self._limit && self._limit <= added) { break; } if (self._limit && self._limit <= added) { break }
} }
} else { } else {
res.push(candidates[i]); res.push(candidates[i])
} }
} }
} }
} catch (err) { } catch (err) {
return callback(err); return callback(err)
} }
// Apply all sorts // Apply all sorts
if (self._sort) { if (self._sort) {
keys = Object.keys(self._sort); keys = Object.keys(self._sort)
// Sorting // Sorting
var criteria = []; const criteria = []
for (i = 0; i < keys.length; i++) { for (i = 0; i < keys.length; i++) {
key = keys[i]; key = keys[i]
criteria.push({ key: key, direction: self._sort[key] }); criteria.push({ key: key, direction: self._sort[key] })
} }
res.sort(function (a, b) { res.sort(function (a, b) {
var criterion, compare, i; let criterion
let compare
let i
for (i = 0; i < criteria.length; i++) { for (i = 0; i < criteria.length; i++) {
criterion = criteria[i]; criterion = criteria[i]
compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), self.db.compareStrings); compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), self.db.compareStrings)
if (compare !== 0) { if (compare !== 0) {
return compare; return compare
} }
} }
return 0; return 0
}); })
// Applying limit and skip // Applying limit and skip
var limit = self._limit || res.length const limit = self._limit || res.length
, skip = self._skip || 0; const skip = self._skip || 0
res = res.slice(skip, skip + limit); res = res.slice(skip, skip + limit)
} }
// Apply projection // Apply projection
try { try {
res = self.project(res); res = self.project(res)
} catch (e) { } catch (e) {
error = e; error = e
res = undefined; res = undefined
} }
return callback(error, res); return callback(error, res)
}); })
}; }
Cursor.prototype.exec = function () {
this.db.executor.push({ this: this, fn: this._exec, arguments: arguments });
};
exec () {
this.db.executor.push({ this: this, fn: this._exec, arguments: arguments })
}
}
// Interface // Interface
module.exports = Cursor; module.exports = Cursor

@ -1,5 +1,4 @@
var crypto = require('crypto') const crypto = require('crypto')
;
/** /**
* Return a random alphanumerical string of length len * Return a random alphanumerical string of length len
@ -12,11 +11,9 @@ var crypto = require('crypto')
function uid (len) { function uid (len) {
return crypto.randomBytes(Math.ceil(Math.max(8, len * 2))) return crypto.randomBytes(Math.ceil(Math.max(8, len * 2)))
.toString('base64') .toString('base64')
.replace(/[+\/]/g, '') .replace(/[+/]/g, '')
.slice(0, len); .slice(0, len)
} }
// Interface // Interface
module.exports.uid = uid; module.exports.uid = uid

@ -1,15 +1,14 @@
var customUtils = require('./customUtils') const customUtils = require('./customUtils')
, model = require('./model') const model = require('./model')
, async = require('async') const async = require('async')
, Executor = require('./executor') const Executor = require('./executor')
, Index = require('./indexes') const Index = require('./indexes')
, util = require('util') const util = require('util')
, _ = require('underscore') const _ = require('underscore')
, Persistence = require('./persistence') const Persistence = require('./persistence')
, Cursor = require('./cursor') const Cursor = require('./cursor')
;
class Datastore {
/** /**
* Create a new collection * Create a new collection
* @param {String} options.filename Optional, datastore will be in-memory only if not provided * @param {String} options.filename Optional, datastore will be in-memory only if not provided
@ -26,88 +25,86 @@ var customUtils = require('./customUtils')
* Event Emitter - Events * Event Emitter - Events
* * compaction.done - Fired whenever a compaction operation was finished * * compaction.done - Fired whenever a compaction operation was finished
*/ */
function Datastore (options) { constructor (options) {
var filename; let filename
// Retrocompatibility with v0.6 and before // Retrocompatibility with v0.6 and before
if (typeof options === 'string') { if (typeof options === 'string') {
filename = options; filename = options
this.inMemoryOnly = false; // Default this.inMemoryOnly = false // Default
} else { } else {
options = options || {}; options = options || {}
filename = options.filename; filename = options.filename
this.inMemoryOnly = options.inMemoryOnly || false; this.inMemoryOnly = options.inMemoryOnly || false
this.autoload = options.autoload || false; this.autoload = options.autoload || false
this.timestampData = options.timestampData || false; this.timestampData = options.timestampData || false
} }
// Determine whether in memory or persistent // Determine whether in memory or persistent
if (!filename || typeof filename !== 'string' || filename.length === 0) { if (!filename || typeof filename !== 'string' || filename.length === 0) {
this.filename = null; this.filename = null
this.inMemoryOnly = true; this.inMemoryOnly = true
} else { } else {
this.filename = filename; this.filename = filename
} }
// String comparison function // String comparison function
this.compareStrings = options.compareStrings; this.compareStrings = options.compareStrings
// Persistence handling // Persistence handling
this.persistence = new Persistence({ db: this, nodeWebkitAppName: options.nodeWebkitAppName this.persistence = new Persistence({
, afterSerialization: options.afterSerialization db: this,
, beforeDeserialization: options.beforeDeserialization nodeWebkitAppName: options.nodeWebkitAppName,
, corruptAlertThreshold: options.corruptAlertThreshold afterSerialization: options.afterSerialization,
}); beforeDeserialization: options.beforeDeserialization,
corruptAlertThreshold: options.corruptAlertThreshold
})
// This new executor is ready if we don't use persistence // This new executor is ready if we don't use persistence
// If we do, it will only be ready once loadDatabase is called // If we do, it will only be ready once loadDatabase is called
this.executor = new Executor(); this.executor = new Executor()
if (this.inMemoryOnly) { this.executor.ready = true; } if (this.inMemoryOnly) { this.executor.ready = true }
// Indexed by field name, dot notation can be used // Indexed by field name, dot notation can be used
// _id is always indexed and since _ids are generated randomly the underlying // _id is always indexed and since _ids are generated randomly the underlying
// binary is always well-balanced // binary is always well-balanced
this.indexes = {}; this.indexes = {}
this.indexes._id = new Index({ fieldName: '_id', unique: true }); this.indexes._id = new Index({ fieldName: '_id', unique: true })
this.ttlIndexes = {}; this.ttlIndexes = {}
// Queue a load of the database right away and call the onload handler // Queue a load of the database right away and call the onload handler
// By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception // By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception
if (this.autoload) { this.loadDatabase(options.onload || function (err) { if (this.autoload) {
if (err) { throw err; } this.loadDatabase(options.onload || function (err) {
}); } if (err) { throw err }
})
}
} }
util.inherits(Datastore, require('events').EventEmitter);
/** /**
* Load the database from the datafile, and trigger the execution of buffered commands if any * Load the database from the datafile, and trigger the execution of buffered commands if any
*/ */
Datastore.prototype.loadDatabase = function () { loadDatabase () {
this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: arguments }, true); this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: arguments }, true)
}; }
/** /**
* Get an array of all the data in the database * Get an array of all the data in the database
*/ */
Datastore.prototype.getAllData = function () { getAllData () {
return this.indexes._id.getAll(); return this.indexes._id.getAll()
}; }
/** /**
* Reset all currently defined indexes * Reset all currently defined indexes
*/ */
Datastore.prototype.resetIndexes = function (newData) { resetIndexes (newData) {
var self = this; const self = this
Object.keys(this.indexes).forEach(function (i) { Object.keys(this.indexes).forEach(function (i) {
self.indexes[i].reset(newData); self.indexes[i].reset(newData)
}); })
}; }
/** /**
* Ensure an index is kept for this field. Same parameters as lib/indexes * Ensure an index is kept for this field. Same parameters as lib/indexes
@ -119,125 +116,122 @@ Datastore.prototype.resetIndexes = function (newData) {
* @param {Number} options.expireAfterSeconds - Optional, if set this index becomes a TTL index (only works on Date fields, not arrays of Date) * @param {Number} options.expireAfterSeconds - Optional, if set this index becomes a TTL index (only works on Date fields, not arrays of Date)
* @param {Function} cb Optional callback, signature: err * @param {Function} cb Optional callback, signature: err
*/ */
Datastore.prototype.ensureIndex = function (options, cb) { ensureIndex (options, cb) {
var err let err
, callback = cb || function () {}; const callback = cb || function () {}
options = options || {}; options = options || {}
if (!options.fieldName) { if (!options.fieldName) {
err = new Error("Cannot create an index without a fieldName"); err = new Error('Cannot create an index without a fieldName')
err.missingFieldName = true; err.missingFieldName = true
return callback(err); return callback(err)
} }
if (this.indexes[options.fieldName]) { return callback(null); } if (this.indexes[options.fieldName]) { return callback(null) }
this.indexes[options.fieldName] = new Index(options); this.indexes[options.fieldName] = new Index(options)
if (options.expireAfterSeconds !== undefined) { this.ttlIndexes[options.fieldName] = options.expireAfterSeconds; } // With this implementation index creation is not necessary to ensure TTL but we stick with MongoDB's API here if (options.expireAfterSeconds !== undefined) { this.ttlIndexes[options.fieldName] = options.expireAfterSeconds } // With this implementation index creation is not necessary to ensure TTL but we stick with MongoDB's API here
try { try {
this.indexes[options.fieldName].insert(this.getAllData()); this.indexes[options.fieldName].insert(this.getAllData())
} catch (e) { } catch (e) {
delete this.indexes[options.fieldName]; delete this.indexes[options.fieldName]
return callback(e); return callback(e)
} }
// We may want to force all options to be persisted including defaults, not just the ones passed the index creation function // We may want to force all options to be persisted including defaults, not just the ones passed the index creation function
this.persistence.persistNewState([{ $$indexCreated: options }], function (err) { this.persistence.persistNewState([{ $$indexCreated: options }], function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
return callback(null); return callback(null)
}); })
}; }
/** /**
* Remove an index * Remove an index
* @param {String} fieldName * @param {String} fieldName
* @param {Function} cb Optional callback, signature: err * @param {Function} cb Optional callback, signature: err
*/ */
Datastore.prototype.removeIndex = function (fieldName, cb) { removeIndex (fieldName, cb) {
var callback = cb || function () {}; const callback = cb || function () {}
delete this.indexes[fieldName]; delete this.indexes[fieldName]
this.persistence.persistNewState([{ $$indexRemoved: fieldName }], function (err) { this.persistence.persistNewState([{ $$indexRemoved: fieldName }], function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
return callback(null); return callback(null)
}); })
}; }
/** /**
* Add one or several document(s) to all indexes * Add one or several document(s) to all indexes
*/ */
Datastore.prototype.addToIndexes = function (doc) { addToIndexes (doc) {
var i, failingIndex, error let i
, keys = Object.keys(this.indexes) let failingIndex
; let error
const keys = Object.keys(this.indexes)
for (i = 0; i < keys.length; i += 1) { for (i = 0; i < keys.length; i += 1) {
try { try {
this.indexes[keys[i]].insert(doc); this.indexes[keys[i]].insert(doc)
} catch (e) { } catch (e) {
failingIndex = i; failingIndex = i
error = e; error = e
break; break
} }
} }
// If an error happened, we need to rollback the insert on all other indexes // If an error happened, we need to rollback the insert on all other indexes
if (error) { if (error) {
for (i = 0; i < failingIndex; i += 1) { for (i = 0; i < failingIndex; i += 1) {
this.indexes[keys[i]].remove(doc); this.indexes[keys[i]].remove(doc)
} }
throw error; throw error
}
} }
};
/** /**
* Remove one or several document(s) from all indexes * Remove one or several document(s) from all indexes
*/ */
Datastore.prototype.removeFromIndexes = function (doc) { removeFromIndexes (doc) {
var self = this; const self = this
Object.keys(this.indexes).forEach(function (i) { Object.keys(this.indexes).forEach(function (i) {
self.indexes[i].remove(doc); self.indexes[i].remove(doc)
}); })
}; }
/** /**
* Update one or several documents in all indexes * Update one or several documents in all indexes
* To update multiple documents, oldDoc must be an array of { oldDoc, newDoc } pairs * To update multiple documents, oldDoc must be an array of { oldDoc, newDoc } pairs
* If one update violates a constraint, all changes are rolled back * If one update violates a constraint, all changes are rolled back
*/ */
Datastore.prototype.updateIndexes = function (oldDoc, newDoc) { updateIndexes (oldDoc, newDoc) {
var i, failingIndex, error let i
, keys = Object.keys(this.indexes) let failingIndex
; let error
const keys = Object.keys(this.indexes)
for (i = 0; i < keys.length; i += 1) { for (i = 0; i < keys.length; i += 1) {
try { try {
this.indexes[keys[i]].update(oldDoc, newDoc); this.indexes[keys[i]].update(oldDoc, newDoc)
} catch (e) { } catch (e) {
failingIndex = i; failingIndex = i
error = e; error = e
break; break
} }
} }
// If an error happened, we need to rollback the update on all other indexes // If an error happened, we need to rollback the update on all other indexes
if (error) { if (error) {
for (i = 0; i < failingIndex; i += 1) { for (i = 0; i < failingIndex; i += 1) {
this.indexes[keys[i]].revertUpdate(oldDoc, newDoc); this.indexes[keys[i]].revertUpdate(oldDoc, newDoc)
} }
throw error; throw error
}
} }
};
/** /**
* Return the list of candidates for a given query * Return the list of candidates for a given query
@ -252,86 +246,88 @@ Datastore.prototype.updateIndexes = function (oldDoc, newDoc) {
* @param {Boolean} dontExpireStaleDocs Optional, defaults to false, if true don't remove stale docs. Useful for the remove function which shouldn't be impacted by expirations * @param {Boolean} dontExpireStaleDocs Optional, defaults to false, if true don't remove stale docs. Useful for the remove function which shouldn't be impacted by expirations
* @param {Function} callback Signature err, candidates * @param {Function} callback Signature err, candidates
*/ */
Datastore.prototype.getCandidates = function (query, dontExpireStaleDocs, callback) { getCandidates (query, dontExpireStaleDocs, callback) {
var indexNames = Object.keys(this.indexes) const indexNames = Object.keys(this.indexes)
, self = this const self = this
, usableQueryKeys; let usableQueryKeys
if (typeof dontExpireStaleDocs === 'function') { if (typeof dontExpireStaleDocs === 'function') {
callback = dontExpireStaleDocs; callback = dontExpireStaleDocs
dontExpireStaleDocs = false; dontExpireStaleDocs = false
} }
async.waterfall([ async.waterfall([
// STEP 1: get candidates list by checking indexes from most to least frequent usecase // STEP 1: get candidates list by checking indexes from most to least frequent usecase
function (cb) { function (cb) {
// For a basic match // For a basic match
usableQueryKeys = []; usableQueryKeys = []
Object.keys(query).forEach(function (k) { Object.keys(query).forEach(function (k) {
if (typeof query[k] === 'string' || typeof query[k] === 'number' || typeof query[k] === 'boolean' || util.isDate(query[k]) || query[k] === null) { if (typeof query[k] === 'string' || typeof query[k] === 'number' || typeof query[k] === 'boolean' || util.types.isDate(query[k]) || query[k] === null) {
usableQueryKeys.push(k); usableQueryKeys.push(k)
} }
}); })
usableQueryKeys = _.intersection(usableQueryKeys, indexNames); usableQueryKeys = _.intersection(usableQueryKeys, indexNames)
if (usableQueryKeys.length > 0) { if (usableQueryKeys.length > 0) {
return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]])); return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]]))
} }
// For a $in match // For a $in match
usableQueryKeys = []; usableQueryKeys = []
Object.keys(query).forEach(function (k) { Object.keys(query).forEach(function (k) {
if (query[k] && query[k].hasOwnProperty('$in')) { if (query[k] && Object.prototype.hasOwnProperty.call(query[k], '$in')) {
usableQueryKeys.push(k); usableQueryKeys.push(k)
} }
}); })
usableQueryKeys = _.intersection(usableQueryKeys, indexNames); usableQueryKeys = _.intersection(usableQueryKeys, indexNames)
if (usableQueryKeys.length > 0) { if (usableQueryKeys.length > 0) {
return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]].$in)); return cb(null, self.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]].$in))
} }
// For a comparison match // For a comparison match
usableQueryKeys = []; usableQueryKeys = []
Object.keys(query).forEach(function (k) { Object.keys(query).forEach(function (k) {
if (query[k] && (query[k].hasOwnProperty('$lt') || query[k].hasOwnProperty('$lte') || query[k].hasOwnProperty('$gt') || query[k].hasOwnProperty('$gte'))) { if (query[k] && (Object.prototype.hasOwnProperty.call(query[k], '$lt') || Object.prototype.hasOwnProperty.call(query[k], '$lte') || Object.prototype.hasOwnProperty.call(query[k], '$gt') || Object.prototype.hasOwnProperty.call(query[k], '$gte'))) {
usableQueryKeys.push(k); usableQueryKeys.push(k)
} }
}); })
usableQueryKeys = _.intersection(usableQueryKeys, indexNames); usableQueryKeys = _.intersection(usableQueryKeys, indexNames)
if (usableQueryKeys.length > 0) { if (usableQueryKeys.length > 0) {
return cb(null, self.indexes[usableQueryKeys[0]].getBetweenBounds(query[usableQueryKeys[0]])); return cb(null, self.indexes[usableQueryKeys[0]].getBetweenBounds(query[usableQueryKeys[0]]))
} }
// By default, return all the DB data // By default, return all the DB data
return cb(null, self.getAllData()); return cb(null, self.getAllData())
} },
// STEP 2: remove all expired documents // STEP 2: remove all expired documents
, function (docs) { function (docs) {
if (dontExpireStaleDocs) { return callback(null, docs); } if (dontExpireStaleDocs) { return callback(null, docs) }
var expiredDocsIds = [], validDocs = [], ttlIndexesFieldNames = Object.keys(self.ttlIndexes); const expiredDocsIds = []
const validDocs = []
const ttlIndexesFieldNames = Object.keys(self.ttlIndexes)
docs.forEach(function (doc) { docs.forEach(function (doc) {
var valid = true; let valid = true
ttlIndexesFieldNames.forEach(function (i) { ttlIndexesFieldNames.forEach(function (i) {
if (doc[i] !== undefined && util.isDate(doc[i]) && Date.now() > doc[i].getTime() + self.ttlIndexes[i] * 1000) { if (doc[i] !== undefined && util.types.isDate(doc[i]) && Date.now() > doc[i].getTime() + self.ttlIndexes[i] * 1000) {
valid = false; valid = false
} }
}); })
if (valid) { validDocs.push(doc); } else { expiredDocsIds.push(doc._id); } if (valid) { validDocs.push(doc) } else { expiredDocsIds.push(doc._id) }
}); })
async.eachSeries(expiredDocsIds, function (_id, cb) { async.eachSeries(expiredDocsIds, function (_id, cb) {
self._remove({ _id: _id }, {}, function (err) { self._remove({ _id: _id }, {}, function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
return cb(); return cb()
}); })
// eslint-disable-next-line node/handle-callback-err
}, function (err) { }, function (err) {
return callback(null, validDocs); // TODO: handle error
}); return callback(null, validDocs)
}]); })
}; }])
}
/** /**
* Insert a new document * Insert a new document
@ -339,120 +335,120 @@ Datastore.prototype.getCandidates = function (query, dontExpireStaleDocs, callba
* *
* @api private Use Datastore.insert which has the same signature * @api private Use Datastore.insert which has the same signature
*/ */
Datastore.prototype._insert = function (newDoc, cb) { _insert (newDoc, cb) {
var callback = cb || function () {} const callback = cb || function () {}
, preparedDoc let preparedDoc
;
try { try {
preparedDoc = this.prepareDocumentForInsertion(newDoc) preparedDoc = this.prepareDocumentForInsertion(newDoc)
this._insertInCache(preparedDoc); this._insertInCache(preparedDoc)
} catch (e) { } catch (e) {
return callback(e); return callback(e)
} }
this.persistence.persistNewState(util.isArray(preparedDoc) ? preparedDoc : [preparedDoc], function (err) { this.persistence.persistNewState(Array.isArray(preparedDoc) ? preparedDoc : [preparedDoc], function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
return callback(null, model.deepCopy(preparedDoc)); return callback(null, model.deepCopy(preparedDoc))
}); })
}; }
/** /**
* Create a new _id that's not already in use * Create a new _id that's not already in use
*/ */
Datastore.prototype.createNewId = function () { createNewId () {
var tentativeId = customUtils.uid(16); let tentativeId = customUtils.uid(16)
// Try as many times as needed to get an unused _id. As explained in customUtils, the probability of this ever happening is extremely small, so this is O(1) // Try as many times as needed to get an unused _id. As explained in customUtils, the probability of this ever happening is extremely small, so this is O(1)
if (this.indexes._id.getMatching(tentativeId).length > 0) { if (this.indexes._id.getMatching(tentativeId).length > 0) {
tentativeId = this.createNewId(); tentativeId = this.createNewId()
}
return tentativeId
} }
return tentativeId;
};
/** /**
* Prepare a document (or array of documents) to be inserted in a database * Prepare a document (or array of documents) to be inserted in a database
* Meaning adds _id and timestamps if necessary on a copy of newDoc to avoid any side effect on user input * Meaning adds _id and timestamps if necessary on a copy of newDoc to avoid any side effect on user input
* @api private * @api private
*/ */
Datastore.prototype.prepareDocumentForInsertion = function (newDoc) { prepareDocumentForInsertion (newDoc) {
var preparedDoc, self = this; let preparedDoc
const self = this
if (util.isArray(newDoc)) { if (Array.isArray(newDoc)) {
preparedDoc = []; preparedDoc = []
newDoc.forEach(function (doc) { preparedDoc.push(self.prepareDocumentForInsertion(doc)); }); newDoc.forEach(function (doc) { preparedDoc.push(self.prepareDocumentForInsertion(doc)) })
} else { } else {
preparedDoc = model.deepCopy(newDoc); preparedDoc = model.deepCopy(newDoc)
if (preparedDoc._id === undefined) { preparedDoc._id = this.createNewId(); } if (preparedDoc._id === undefined) { preparedDoc._id = this.createNewId() }
var now = new Date(); const now = new Date()
if (this.timestampData && preparedDoc.createdAt === undefined) { preparedDoc.createdAt = now; } if (this.timestampData && preparedDoc.createdAt === undefined) { preparedDoc.createdAt = now }
if (this.timestampData && preparedDoc.updatedAt === undefined) { preparedDoc.updatedAt = now; } if (this.timestampData && preparedDoc.updatedAt === undefined) { preparedDoc.updatedAt = now }
model.checkObject(preparedDoc); model.checkObject(preparedDoc)
} }
return preparedDoc; return preparedDoc
}; }
/** /**
* If newDoc is an array of documents, this will insert all documents in the cache * If newDoc is an array of documents, this will insert all documents in the cache
* @api private * @api private
*/ */
Datastore.prototype._insertInCache = function (preparedDoc) { _insertInCache (preparedDoc) {
if (util.isArray(preparedDoc)) { if (Array.isArray(preparedDoc)) {
this._insertMultipleDocsInCache(preparedDoc); this._insertMultipleDocsInCache(preparedDoc)
} else { } else {
this.addToIndexes(preparedDoc); this.addToIndexes(preparedDoc)
}
} }
};
/** /**
* If one insertion fails (e.g. because of a unique constraint), roll back all previous * If one insertion fails (e.g. because of a unique constraint), roll back all previous
* inserts and throws the error * inserts and throws the error
* @api private * @api private
*/ */
Datastore.prototype._insertMultipleDocsInCache = function (preparedDocs) { _insertMultipleDocsInCache (preparedDocs) {
var i, failingI, error; let i
let failingI
let error
for (i = 0; i < preparedDocs.length; i += 1) { for (i = 0; i < preparedDocs.length; i += 1) {
try { try {
this.addToIndexes(preparedDocs[i]); this.addToIndexes(preparedDocs[i])
} catch (e) { } catch (e) {
error = e; error = e
failingI = i; failingI = i
break; break
} }
} }
if (error) { if (error) {
for (i = 0; i < failingI; i += 1) { for (i = 0; i < failingI; i += 1) {
this.removeFromIndexes(preparedDocs[i]); this.removeFromIndexes(preparedDocs[i])
} }
throw error; throw error
}
} }
};
Datastore.prototype.insert = function () {
this.executor.push({ this: this, fn: this._insert, arguments: arguments });
};
insert () {
this.executor.push({ this: this, fn: this._insert, arguments: arguments })
}
/** /**
* Count all documents matching the query * Count all documents matching the query
* @param {Object} query MongoDB-style query * @param {Object} query MongoDB-style query
*/ */
Datastore.prototype.count = function(query, callback) { count (query, callback) {
var cursor = new Cursor(this, query, function(err, docs, callback) { const cursor = new Cursor(this, query, function (err, docs, callback) {
if (err) { return callback(err); } if (err) { return callback(err) }
return callback(null, docs.length); return callback(null, docs.length)
}); })
if (typeof callback === 'function') { if (typeof callback === 'function') {
cursor.exec(callback); cursor.exec(callback)
} else { } else {
return cursor; return cursor
}
} }
};
/** /**
* Find all documents matching the query * Find all documents matching the query
@ -460,76 +456,75 @@ Datastore.prototype.count = function(query, callback) {
* @param {Object} query MongoDB-style query * @param {Object} query MongoDB-style query
* @param {Object} projection MongoDB-style projection * @param {Object} projection MongoDB-style projection
*/ */
Datastore.prototype.find = function (query, projection, callback) { find (query, projection, callback) {
switch (arguments.length) { switch (arguments.length) {
case 1: case 1:
projection = {}; projection = {}
// callback is undefined, will return a cursor // callback is undefined, will return a cursor
break; break
case 2: case 2:
if (typeof projection === 'function') { if (typeof projection === 'function') {
callback = projection; callback = projection
projection = {}; projection = {}
} // If not assume projection is an object and callback undefined } // If not assume projection is an object and callback undefined
break; break
} }
var cursor = new Cursor(this, query, function(err, docs, callback) { const cursor = new Cursor(this, query, function (err, docs, callback) {
var res = [], i; const res = []
let i
if (err) { return callback(err); } if (err) { return callback(err) }
for (i = 0; i < docs.length; i += 1) { for (i = 0; i < docs.length; i += 1) {
res.push(model.deepCopy(docs[i])); res.push(model.deepCopy(docs[i]))
} }
return callback(null, res); return callback(null, res)
}); })
cursor.projection(projection); cursor.projection(projection)
if (typeof callback === 'function') { if (typeof callback === 'function') {
cursor.exec(callback); cursor.exec(callback)
} else { } else {
return cursor; return cursor
}
} }
};
/** /**
* Find one document matching the query * Find one document matching the query
* @param {Object} query MongoDB-style query * @param {Object} query MongoDB-style query
* @param {Object} projection MongoDB-style projection * @param {Object} projection MongoDB-style projection
*/ */
Datastore.prototype.findOne = function (query, projection, callback) { findOne (query, projection, callback) {
switch (arguments.length) { switch (arguments.length) {
case 1: case 1:
projection = {}; projection = {}
// callback is undefined, will return a cursor // callback is undefined, will return a cursor
break; break
case 2: case 2:
if (typeof projection === 'function') { if (typeof projection === 'function') {
callback = projection; callback = projection
projection = {}; projection = {}
} // If not assume projection is an object and callback undefined } // If not assume projection is an object and callback undefined
break; break
} }
var cursor = new Cursor(this, query, function(err, docs, callback) { const cursor = new Cursor(this, query, function (err, docs, callback) {
if (err) { return callback(err); } if (err) { return callback(err) }
if (docs.length === 1) { if (docs.length === 1) {
return callback(null, model.deepCopy(docs[0])); return callback(null, model.deepCopy(docs[0]))
} else { } else {
return callback(null, null); return callback(null, null)
} }
}); })
cursor.projection(projection).limit(1); cursor.projection(projection).limit(1)
if (typeof callback === 'function') { if (typeof callback === 'function') {
cursor.exec(callback); cursor.exec(callback)
} else { } else {
return cursor; return cursor
}
} }
};
/** /**
* Update all docs matching query * Update all docs matching query
@ -556,106 +551,107 @@ Datastore.prototype.findOne = function (query, projection, callback) {
* *
* @api private Use Datastore.update which has the same signature * @api private Use Datastore.update which has the same signature
*/ */
Datastore.prototype._update = function (query, updateQuery, options, cb) { _update (query, updateQuery, options, cb) {
var callback const self = this
, self = this let numReplaced = 0
, numReplaced = 0 let i
, multi, upsert
, i if (typeof options === 'function') {
; cb = options
options = {}
if (typeof options === 'function') { cb = options; options = {}; } }
callback = cb || function () {}; const callback = cb || function () {}
multi = options.multi !== undefined ? options.multi : false; const multi = options.multi !== undefined ? options.multi : false
upsert = options.upsert !== undefined ? options.upsert : false; const upsert = options.upsert !== undefined ? options.upsert : false
async.waterfall([ async.waterfall([
function (cb) { // If upsert option is set, check whether we need to insert the doc function (cb) { // If upsert option is set, check whether we need to insert the doc
if (!upsert) { return cb(); } if (!upsert) { return cb() }
// Need to use an internal function not tied to the executor to avoid deadlock // Need to use an internal function not tied to the executor to avoid deadlock
var cursor = new Cursor(self, query); const cursor = new Cursor(self, query)
cursor.limit(1)._exec(function (err, docs) { cursor.limit(1)._exec(function (err, docs) {
if (err) { return callback(err); } if (err) { return callback(err) }
if (docs.length === 1) { if (docs.length === 1) {
return cb(); return cb()
} else { } else {
var toBeInserted; let toBeInserted
try { try {
model.checkObject(updateQuery); model.checkObject(updateQuery)
// updateQuery is a simple object with no modifier, use it as the document to insert // updateQuery is a simple object with no modifier, use it as the document to insert
toBeInserted = updateQuery; toBeInserted = updateQuery
} catch (e) { } catch (e) {
// updateQuery contains modifiers, use the find query as the base, // updateQuery contains modifiers, use the find query as the base,
// strip it from all operators and update it according to updateQuery // strip it from all operators and update it according to updateQuery
try { try {
toBeInserted = model.modify(model.deepCopy(query, true), updateQuery); toBeInserted = model.modify(model.deepCopy(query, true), updateQuery)
} catch (err) { } catch (err) {
return callback(err); return callback(err)
} }
} }
return self._insert(toBeInserted, function (err, newDoc) { return self._insert(toBeInserted, function (err, newDoc) {
if (err) { return callback(err); } if (err) { return callback(err) }
return callback(null, 1, newDoc, true); return callback(null, 1, newDoc, true)
}); })
} }
}); })
} },
, function () { // Perform the update function () { // Perform the update
var modifiedDoc , modifications = [], createdAt; let modifiedDoc
const modifications = []
let createdAt
self.getCandidates(query, function (err, candidates) { self.getCandidates(query, function (err, candidates) {
if (err) { return callback(err); } if (err) { return callback(err) }
// Preparing update (if an error is thrown here neither the datafile nor // Preparing update (if an error is thrown here neither the datafile nor
// the in-memory indexes are affected) // the in-memory indexes are affected)
try { try {
for (i = 0; i < candidates.length; i += 1) { for (i = 0; i < candidates.length; i += 1) {
if (model.match(candidates[i], query) && (multi || numReplaced === 0)) { if (model.match(candidates[i], query) && (multi || numReplaced === 0)) {
numReplaced += 1; numReplaced += 1
if (self.timestampData) { createdAt = candidates[i].createdAt; } if (self.timestampData) { createdAt = candidates[i].createdAt }
modifiedDoc = model.modify(candidates[i], updateQuery); modifiedDoc = model.modify(candidates[i], updateQuery)
if (self.timestampData) { if (self.timestampData) {
modifiedDoc.createdAt = createdAt; modifiedDoc.createdAt = createdAt
modifiedDoc.updatedAt = new Date(); modifiedDoc.updatedAt = new Date()
} }
modifications.push({ oldDoc: candidates[i], newDoc: modifiedDoc }); modifications.push({ oldDoc: candidates[i], newDoc: modifiedDoc })
} }
} }
} catch (err) { } catch (err) {
return callback(err); return callback(err)
} }
// Change the docs in memory // Change the docs in memory
try { try {
self.updateIndexes(modifications); self.updateIndexes(modifications)
} catch (err) { } catch (err) {
return callback(err); return callback(err)
} }
// Update the datafile // Update the datafile
var updatedDocs = _.pluck(modifications, 'newDoc'); const updatedDocs = _.pluck(modifications, 'newDoc')
self.persistence.persistNewState(updatedDocs, function (err) { self.persistence.persistNewState(updatedDocs, function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
if (!options.returnUpdatedDocs) { if (!options.returnUpdatedDocs) {
return callback(null, numReplaced); return callback(null, numReplaced)
} else { } else {
var updatedDocsDC = []; let updatedDocsDC = []
updatedDocs.forEach(function (doc) { updatedDocsDC.push(model.deepCopy(doc)); }); updatedDocs.forEach(function (doc) { updatedDocsDC.push(model.deepCopy(doc)) })
if (! multi) { updatedDocsDC = updatedDocsDC[0]; } if (!multi) { updatedDocsDC = updatedDocsDC[0] }
return callback(null, numReplaced, updatedDocsDC); return callback(null, numReplaced, updatedDocsDC)
}
})
})
}])
} }
});
});
}]);
};
Datastore.prototype.update = function () {
this.executor.push({ this: this, fn: this._update, arguments: arguments });
};
update () {
this.executor.push({ this: this, fn: this._update, arguments: arguments })
}
/** /**
* Remove all docs matching the query * Remove all docs matching the query
@ -667,39 +663,43 @@ Datastore.prototype.update = function () {
* *
* @api private Use Datastore.remove which has the same signature * @api private Use Datastore.remove which has the same signature
*/ */
Datastore.prototype._remove = function (query, options, cb) { _remove (query, options, cb) {
var callback const self = this
, self = this, numRemoved = 0, removedDocs = [], multi let numRemoved = 0
; const removedDocs = []
if (typeof options === 'function') { cb = options; options = {}; } if (typeof options === 'function') {
callback = cb || function () {}; cb = options
multi = options.multi !== undefined ? options.multi : false; options = {}
}
const callback = cb || function () {}
const multi = options.multi !== undefined ? options.multi : false
this.getCandidates(query, true, function (err, candidates) { this.getCandidates(query, true, function (err, candidates) {
if (err) { return callback(err); } if (err) { return callback(err) }
try { try {
candidates.forEach(function (d) { candidates.forEach(function (d) {
if (model.match(d, query) && (multi || numRemoved === 0)) { if (model.match(d, query) && (multi || numRemoved === 0)) {
numRemoved += 1; numRemoved += 1
removedDocs.push({ $$deleted: true, _id: d._id }); removedDocs.push({ $$deleted: true, _id: d._id })
self.removeFromIndexes(d); self.removeFromIndexes(d)
} }
}); })
} catch (err) { return callback(err); } } catch (err) { return callback(err) }
self.persistence.persistNewState(removedDocs, function (err) { self.persistence.persistNewState(removedDocs, function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
return callback(null, numRemoved); return callback(null, numRemoved)
}); })
}); })
}; }
Datastore.prototype.remove = function () {
this.executor.push({ this: this, fn: this._remove, arguments: arguments });
};
remove () {
this.executor.push({ this: this, fn: this._remove, arguments: arguments })
}
}
util.inherits(Datastore, require('events').EventEmitter)
module.exports = Datastore; module.exports = Datastore

@ -1,47 +1,44 @@
/** /**
* Responsible for sequentially executing actions on the database * Responsible for sequentially executing actions on the database
*/ */
const async = require('async')
var async = require('async') class Executor {
; constructor () {
this.buffer = []
function Executor () { this.ready = false
this.buffer = [];
this.ready = false;
// This queue will execute all commands, one-by-one in order // This queue will execute all commands, one-by-one in order
this.queue = async.queue(function (task, cb) { this.queue = async.queue(function (task, cb) {
var newArguments = []; const newArguments = []
// task.arguments is an array-like object on which adding a new field doesn't work, so we transform it into a real array // task.arguments is an array-like object on which adding a new field doesn't work, so we transform it into a real array
for (var i = 0; i < task.arguments.length; i += 1) { newArguments.push(task.arguments[i]); } for (let i = 0; i < task.arguments.length; i += 1) { newArguments.push(task.arguments[i]) }
var lastArg = task.arguments[task.arguments.length - 1]; const lastArg = task.arguments[task.arguments.length - 1]
// Always tell the queue task is complete. Execute callback if any was given. // Always tell the queue task is complete. Execute callback if any was given.
if (typeof lastArg === 'function') { if (typeof lastArg === 'function') {
// Callback was supplied // Callback was supplied
newArguments[newArguments.length - 1] = function () { newArguments[newArguments.length - 1] = function () {
if (typeof setImmediate === 'function') { if (typeof setImmediate === 'function') {
setImmediate(cb); setImmediate(cb)
} else { } else {
process.nextTick(cb); process.nextTick(cb)
}
lastArg.apply(null, arguments)
} }
lastArg.apply(null, arguments);
};
} else if (!lastArg && task.arguments.length !== 0) { } else if (!lastArg && task.arguments.length !== 0) {
// false/undefined/null supplied as callbback // false/undefined/null supplied as callback
newArguments[newArguments.length - 1] = function () { cb(); }; newArguments[newArguments.length - 1] = function () { cb() }
} else { } else {
// Nothing supplied as callback // Nothing supplied as callback
newArguments.push(function () { cb(); }); newArguments.push(function () { cb() })
} }
task.fn.apply(task.this, newArguments)
task.fn.apply(task.this, newArguments); }, 1)
}, 1);
} }
/** /**
* If executor is ready, queue task (and process it immediately if executor was idle) * If executor is ready, queue task (and process it immediately if executor was idle)
* If not, buffer task for later processing * If not, buffer task for later processing
@ -52,27 +49,25 @@ function Executor () {
* and the last argument cannot be false/undefined/null * and the last argument cannot be false/undefined/null
* @param {Boolean} forceQueuing Optional (defaults to false) force executor to queue task even if it is not ready * @param {Boolean} forceQueuing Optional (defaults to false) force executor to queue task even if it is not ready
*/ */
Executor.prototype.push = function (task, forceQueuing) { push (task, forceQueuing) {
if (this.ready || forceQueuing) { if (this.ready || forceQueuing) {
this.queue.push(task); this.queue.push(task)
} else { } else {
this.buffer.push(task); this.buffer.push(task)
}
} }
};
/** /**
* Queue all tasks in buffer (in the same order they came in) * Queue all tasks in buffer (in the same order they came in)
* Automatically sets executor as ready * Automatically sets executor as ready
*/ */
Executor.prototype.processBuffer = function () { processBuffer () {
var i; let i
this.ready = true; this.ready = true
for (i = 0; i < this.buffer.length; i += 1) { this.queue.push(this.buffer[i]); } for (i = 0; i < this.buffer.length; i += 1) { this.queue.push(this.buffer[i]) }
this.buffer = []; this.buffer = []
}; }
}
// Interface // Interface
module.exports = Executor; module.exports = Executor

@ -1,30 +1,28 @@
var BinarySearchTree = require('binary-search-tree').AVLTree const BinarySearchTree = require('@seald-io/binary-search-tree').BinarySearchTree
, model = require('./model') const model = require('./model')
, _ = require('underscore') const _ = require('underscore')
, util = require('util')
;
/** /**
* Two indexed pointers are equal iif they point to the same place * Two indexed pointers are equal iif they point to the same place
*/ */
function checkValueEquality (a, b) { function checkValueEquality (a, b) {
return a === b; return a === b
} }
/** /**
* Type-aware projection * Type-aware projection
*/ */
function projectForUnique (elt) { function projectForUnique (elt) {
if (elt === null) { return '$null'; } if (elt === null) { return '$null' }
if (typeof elt === 'string') { return '$string' + elt; } if (typeof elt === 'string') { return '$string' + elt }
if (typeof elt === 'boolean') { return '$boolean' + elt; } if (typeof elt === 'boolean') { return '$boolean' + elt }
if (typeof elt === 'number') { return '$number' + elt; } if (typeof elt === 'number') { return '$number' + elt }
if (util.isArray(elt)) { return '$date' + elt.getTime(); } if (Array.isArray(elt)) { return '$date' + elt.getTime() }
return elt; // Arrays and objects, will check for pointer equality return elt // Arrays and objects, will check for pointer equality
} }
class Index {
/** /**
* Create a new index * Create a new index
* All methods on an index guarantee that either the whole operation was successful and the index changed * All methods on an index guarantee that either the whole operation was successful and the index changed
@ -33,72 +31,73 @@ function projectForUnique (elt) {
* @param {Boolean} options.unique Optional, enforce a unique constraint (default: false) * @param {Boolean} options.unique Optional, enforce a unique constraint (default: false)
* @param {Boolean} options.sparse Optional, allow a sparse index (we can have documents for which fieldName is undefined) (default: false) * @param {Boolean} options.sparse Optional, allow a sparse index (we can have documents for which fieldName is undefined) (default: false)
*/ */
function Index (options) { constructor (options) {
this.fieldName = options.fieldName; this.fieldName = options.fieldName
this.unique = options.unique || false; this.unique = options.unique || false
this.sparse = options.sparse || false; this.sparse = options.sparse || false
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality: checkValueEquality }; this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality: checkValueEquality }
this.reset(); // No data in the beginning this.reset() // No data in the beginning
} }
/** /**
* Reset an index * Reset an index
* @param {Document or Array of documents} newData Optional, data to initialize the index with * @param {Document or Array of documents} newData Optional, data to initialize the index with
* If an error is thrown during insertion, the index is not modified * If an error is thrown during insertion, the index is not modified
*/ */
Index.prototype.reset = function (newData) { reset (newData) {
this.tree = new BinarySearchTree(this.treeOptions); this.tree = new BinarySearchTree(this.treeOptions)
if (newData) { this.insert(newData); }
};
if (newData) { this.insert(newData) }
}
/** /**
* Insert a new document in the index * Insert a new document in the index
* If an array is passed, we insert all its elements (if one insertion fails the index is not modified) * If an array is passed, we insert all its elements (if one insertion fails the index is not modified)
* O(log(n)) * O(log(n))
*/ */
Index.prototype.insert = function (doc) { insert (doc) {
var key, self = this let keys
, keys, i, failingI, error let i
; let failingI
let error
if (util.isArray(doc)) { this.insertMultipleDocs(doc); return; }
if (Array.isArray(doc)) {
this.insertMultipleDocs(doc)
return
}
key = model.getDotValue(doc, this.fieldName); const key = model.getDotValue(doc, this.fieldName)
// We don't index documents that don't contain the field if the index is sparse // We don't index documents that don't contain the field if the index is sparse
if (key === undefined && this.sparse) { return; } if (key === undefined && this.sparse) { return }
if (!util.isArray(key)) { if (!Array.isArray(key)) {
this.tree.insert(key, doc); this.tree.insert(key, doc)
} else { } else {
// If an insert fails due to a unique constraint, roll back all inserts before it // If an insert fails due to a unique constraint, roll back all inserts before it
keys = _.uniq(key, projectForUnique); keys = _.uniq(key, projectForUnique)
for (i = 0; i < keys.length; i += 1) { for (i = 0; i < keys.length; i += 1) {
try { try {
this.tree.insert(keys[i], doc); this.tree.insert(keys[i], doc)
} catch (e) { } catch (e) {
error = e; error = e
failingI = i; failingI = i
break; break
} }
} }
if (error) { if (error) {
for (i = 0; i < failingI; i += 1) { for (i = 0; i < failingI; i += 1) {
this.tree.delete(keys[i], doc); this.tree.delete(keys[i], doc)
} }
throw error; throw error
}
} }
} }
};
/** /**
* Insert an array of documents in the index * Insert an array of documents in the index
@ -106,28 +105,29 @@ Index.prototype.insert = function (doc) {
* *
* @API private * @API private
*/ */
Index.prototype.insertMultipleDocs = function (docs) { insertMultipleDocs (docs) {
var i, error, failingI; let i
let error
let failingI
for (i = 0; i < docs.length; i += 1) { for (i = 0; i < docs.length; i += 1) {
try { try {
this.insert(docs[i]); this.insert(docs[i])
} catch (e) { } catch (e) {
error = e; error = e
failingI = i; failingI = i
break; break
} }
} }
if (error) { if (error) {
for (i = 0; i < failingI; i += 1) { for (i = 0; i < failingI; i += 1) {
this.remove(docs[i]); this.remove(docs[i])
} }
throw error; throw error
}
} }
};
/** /**
* Remove a document from the index * Remove a document from the index
@ -135,43 +135,47 @@ Index.prototype.insertMultipleDocs = function (docs) {
* The remove operation is safe with regards to the 'unique' constraint * The remove operation is safe with regards to the 'unique' constraint
* O(log(n)) * O(log(n))
*/ */
Index.prototype.remove = function (doc) { remove (doc) {
var key, self = this; const self = this
if (util.isArray(doc)) { doc.forEach(function (d) { self.remove(d); }); return; } if (Array.isArray(doc)) {
doc.forEach(function (d) { self.remove(d) })
return
}
key = model.getDotValue(doc, this.fieldName); const key = model.getDotValue(doc, this.fieldName)
if (key === undefined && this.sparse) { return; } if (key === undefined && this.sparse) { return }
if (!util.isArray(key)) { if (!Array.isArray(key)) {
this.tree.delete(key, doc); this.tree.delete(key, doc)
} else { } else {
_.uniq(key, projectForUnique).forEach(function (_key) { _.uniq(key, projectForUnique).forEach(function (_key) {
self.tree.delete(_key, doc); self.tree.delete(_key, doc)
}); })
}
} }
};
/** /**
* Update a document in the index * Update a document in the index
* If a constraint is violated, changes are rolled back and an error thrown * If a constraint is violated, changes are rolled back and an error thrown
* Naive implementation, still in O(log(n)) * Naive implementation, still in O(log(n))
*/ */
Index.prototype.update = function (oldDoc, newDoc) { update (oldDoc, newDoc) {
if (util.isArray(oldDoc)) { this.updateMultipleDocs(oldDoc); return; } if (Array.isArray(oldDoc)) {
this.updateMultipleDocs(oldDoc)
return
}
this.remove(oldDoc); this.remove(oldDoc)
try { try {
this.insert(newDoc); this.insert(newDoc)
} catch (e) { } catch (e) {
this.insert(oldDoc); this.insert(oldDoc)
throw e; throw e
}
} }
};
/** /**
* Update multiple documents in the index * Update multiple documents in the index
@ -181,82 +185,82 @@ Index.prototype.update = function (oldDoc, newDoc) {
* *
* @API private * @API private
*/ */
Index.prototype.updateMultipleDocs = function (pairs) { updateMultipleDocs (pairs) {
var i, failingI, error; let i
let failingI
let error
for (i = 0; i < pairs.length; i += 1) { for (i = 0; i < pairs.length; i += 1) {
this.remove(pairs[i].oldDoc); this.remove(pairs[i].oldDoc)
} }
for (i = 0; i < pairs.length; i += 1) { for (i = 0; i < pairs.length; i += 1) {
try { try {
this.insert(pairs[i].newDoc); this.insert(pairs[i].newDoc)
} catch (e) { } catch (e) {
error = e; error = e
failingI = i; failingI = i
break; break
} }
} }
// If an error was raised, roll back changes in the inverse order // If an error was raised, roll back changes in the inverse order
if (error) { if (error) {
for (i = 0; i < failingI; i += 1) { for (i = 0; i < failingI; i += 1) {
this.remove(pairs[i].newDoc); this.remove(pairs[i].newDoc)
} }
for (i = 0; i < pairs.length; i += 1) { for (i = 0; i < pairs.length; i += 1) {
this.insert(pairs[i].oldDoc); this.insert(pairs[i].oldDoc)
} }
throw error; throw error
}
} }
};
/** /**
* Revert an update * Revert an update
*/ */
Index.prototype.revertUpdate = function (oldDoc, newDoc) { revertUpdate (oldDoc, newDoc) {
var revert = []; const revert = []
if (!util.isArray(oldDoc)) { if (!Array.isArray(oldDoc)) {
this.update(newDoc, oldDoc); this.update(newDoc, oldDoc)
} else { } else {
oldDoc.forEach(function (pair) { oldDoc.forEach(function (pair) {
revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc }); revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc })
}); })
this.update(revert); this.update(revert)
}
} }
};
/** /**
* Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things) * Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things)
* @param {Thing} value Value to match the key against * @param {Thing} value Value to match the key against
* @return {Array of documents} * @return {Array of documents}
*/ */
Index.prototype.getMatching = function (value) { getMatching (value) {
var self = this; const self = this
if (!util.isArray(value)) { if (!Array.isArray(value)) {
return self.tree.search(value); return self.tree.search(value)
} else { } else {
var _res = {}, res = []; const _res = {}
const res = []
value.forEach(function (v) { value.forEach(function (v) {
self.getMatching(v).forEach(function (doc) { self.getMatching(v).forEach(function (doc) {
_res[doc._id] = doc; _res[doc._id] = doc
}); })
}); })
Object.keys(_res).forEach(function (_id) { Object.keys(_res).forEach(function (_id) {
res.push(_res[_id]); res.push(_res[_id])
}); })
return res; return res
}
} }
};
/** /**
* Get all documents in index whose key is between bounds are they are defined by query * Get all documents in index whose key is between bounds are they are defined by query
@ -264,31 +268,28 @@ Index.prototype.getMatching = function (value) {
* @param {Query} query * @param {Query} query
* @return {Array of documents} * @return {Array of documents}
*/ */
Index.prototype.getBetweenBounds = function (query) { getBetweenBounds (query) {
return this.tree.betweenBounds(query); return this.tree.betweenBounds(query)
}; }
/** /**
* Get all elements in the index * Get all elements in the index
* @return {Array of documents} * @return {Array of documents}
*/ */
Index.prototype.getAll = function () { getAll () {
var res = []; const res = []
this.tree.executeOnEveryNode(function (node) { this.tree.executeOnEveryNode(function (node) {
var i; let i
for (i = 0; i < node.data.length; i += 1) { for (i = 0; i < node.data.length; i += 1) {
res.push(node.data[i]); res.push(node.data[i])
} }
}); })
return res;
};
return res
}
}
// Interface // Interface
module.exports = Index; module.exports = Index

File diff suppressed because it is too large Load Diff

@ -4,177 +4,129 @@
* * Persistence.loadDatabase(callback) and callback has signature err * * Persistence.loadDatabase(callback) and callback has signature err
* * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err * * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err
*/ */
const storage = require('./storage')
var storage = require('./storage') const path = require('path')
, path = require('path') const model = require('./model')
, model = require('./model') const async = require('async')
, async = require('async') const customUtils = require('./customUtils')
, customUtils = require('./customUtils') const Index = require('./indexes')
, Index = require('./indexes')
; class Persistence {
/** /**
* Create a new Persistence object for database options.db * Create a new Persistence object for database options.db
* @param {Datastore} options.db * @param {Datastore} options.db
* @param {Boolean} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where * @param {Boolean} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where
* Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion) * Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
*/ */
function Persistence (options) { constructor (options) {
var i, j, randomString; let i
let j
let randomString
this.db = options.db; this.db = options.db
this.inMemoryOnly = this.db.inMemoryOnly; this.inMemoryOnly = this.db.inMemoryOnly
this.filename = this.db.filename; this.filename = this.db.filename
this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1; this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1
if (!this.inMemoryOnly && this.filename && this.filename.charAt(this.filename.length - 1) === '~') { if (!this.inMemoryOnly && this.filename && this.filename.charAt(this.filename.length - 1) === '~') {
throw new Error("The datafile name can't end with a ~, which is reserved for crash safe backup files"); throw new Error('The datafile name can\'t end with a ~, which is reserved for crash safe backup files')
} }
// After serialization and before deserialization hooks with some basic sanity checks // After serialization and before deserialization hooks with some basic sanity checks
if (options.afterSerialization && !options.beforeDeserialization) { if (options.afterSerialization && !options.beforeDeserialization) {
throw new Error("Serialization hook defined but deserialization hook undefined, cautiously refusing to start NeDB to prevent dataloss"); throw new Error('Serialization hook defined but deserialization hook undefined, cautiously refusing to start NeDB to prevent dataloss')
} }
if (!options.afterSerialization && options.beforeDeserialization) { if (!options.afterSerialization && options.beforeDeserialization) {
throw new Error("Serialization hook undefined but deserialization hook defined, cautiously refusing to start NeDB to prevent dataloss"); throw new Error('Serialization hook undefined but deserialization hook defined, cautiously refusing to start NeDB to prevent dataloss')
} }
this.afterSerialization = options.afterSerialization || function (s) { return s; }; this.afterSerialization = options.afterSerialization || function (s) { return s }
this.beforeDeserialization = options.beforeDeserialization || function (s) { return s; }; this.beforeDeserialization = options.beforeDeserialization || function (s) { return s }
for (i = 1; i < 30; i += 1) { for (i = 1; i < 30; i += 1) {
for (j = 0; j < 10; j += 1) { for (j = 0; j < 10; j += 1) {
randomString = customUtils.uid(i); randomString = customUtils.uid(i)
if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) { if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) {
throw new Error("beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss"); throw new Error('beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss')
} }
} }
} }
// For NW apps, store data in the same directory where NW stores application data // For NW apps, store data in the same directory where NW stores application data
if (this.filename && options.nodeWebkitAppName) { if (this.filename && options.nodeWebkitAppName) {
console.log("=================================================================="); console.log('==================================================================')
console.log("WARNING: The nodeWebkitAppName option is deprecated"); console.log('WARNING: The nodeWebkitAppName option is deprecated')
console.log("To get the path to the directory where Node Webkit stores the data"); console.log('To get the path to the directory where Node Webkit stores the data')
console.log("for your app, use the internal nw.gui module like this"); console.log('for your app, use the internal nw.gui module like this')
console.log("require('nw.gui').App.dataPath"); console.log('require(\'nw.gui\').App.dataPath')
console.log("See https://github.com/rogerwang/node-webkit/issues/500"); console.log('See https://github.com/rogerwang/node-webkit/issues/500')
console.log("=================================================================="); console.log('==================================================================')
this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename); this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename)
}
};
/**
* Check if a directory exists and create it on the fly if it is not the case
* cb is optional, signature: err
*/
Persistence.ensureDirectoryExists = function (dir, cb) {
var callback = cb || function () {}
;
storage.mkdirp(dir, function (err) { return callback(err); });
};
/**
* Return the path the datafile if the given filename is relative to the directory where Node Webkit stores
* data for this application. Probably the best place to store data
*/
Persistence.getNWAppFilename = function (appName, relativeFilename) {
var home;
switch (process.platform) {
case 'win32':
case 'win64':
home = process.env.LOCALAPPDATA || process.env.APPDATA;
if (!home) { throw new Error("Couldn't find the base application data folder"); }
home = path.join(home, appName);
break;
case 'darwin':
home = process.env.HOME;
if (!home) { throw new Error("Couldn't find the base application data directory"); }
home = path.join(home, 'Library', 'Application Support', appName);
break;
case 'linux':
home = process.env.HOME;
if (!home) { throw new Error("Couldn't find the base application data directory"); }
home = path.join(home, '.config', appName);
break;
default:
throw new Error("Can't use the Node Webkit relative path for platform " + process.platform);
break;
} }
return path.join(home, 'nedb-data', relativeFilename);
} }
/** /**
* Persist cached database * Persist cached database
* This serves as a compaction function since the cache always contains only the number of documents in the collection * This serves as a compaction function since the cache always contains only the number of documents in the collection
* while the data file is append-only so it may grow larger * while the data file is append-only so it may grow larger
* @param {Function} cb Optional callback, signature: err * @param {Function} cb Optional callback, signature: err
*/ */
Persistence.prototype.persistCachedDatabase = function (cb) { persistCachedDatabase (cb) {
var callback = cb || function () {} const callback = cb || function () {}
, toPersist = '' let toPersist = ''
, self = this const self = this
;
if (this.inMemoryOnly) { return callback(null); } if (this.inMemoryOnly) { return callback(null) }
this.db.getAllData().forEach(function (doc) { this.db.getAllData().forEach(function (doc) {
toPersist += self.afterSerialization(model.serialize(doc)) + '\n'; toPersist += self.afterSerialization(model.serialize(doc)) + '\n'
}); })
Object.keys(this.db.indexes).forEach(function (fieldName) { Object.keys(this.db.indexes).forEach(function (fieldName) {
if (fieldName != "_id") { // The special _id index is managed by datastore.js, the others need to be persisted if (fieldName !== '_id') { // The special _id index is managed by datastore.js, the others need to be persisted
toPersist += self.afterSerialization(model.serialize({ $$indexCreated: { fieldName: fieldName, unique: self.db.indexes[fieldName].unique, sparse: self.db.indexes[fieldName].sparse }})) + '\n'; toPersist += self.afterSerialization(model.serialize({
$$indexCreated: {
fieldName: fieldName,
unique: self.db.indexes[fieldName].unique,
sparse: self.db.indexes[fieldName].sparse
}
})) + '\n'
} }
}); })
storage.crashSafeWriteFile(this.filename, toPersist, function (err) { storage.crashSafeWriteFile(this.filename, toPersist, function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
self.db.emit('compaction.done'); self.db.emit('compaction.done')
return callback(null); return callback(null)
}); })
}; }
/** /**
* Queue a rewrite of the datafile * Queue a rewrite of the datafile
*/ */
Persistence.prototype.compactDatafile = function () { compactDatafile () {
this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [] }); this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [] })
}; }
/** /**
* Set automatic compaction every interval ms * Set automatic compaction every interval ms
* @param {Number} interval in milliseconds, with an enforced minimum of 5 seconds * @param {Number} interval in milliseconds, with an enforced minimum of 5 seconds
*/ */
Persistence.prototype.setAutocompactionInterval = function (interval) { setAutocompactionInterval (interval) {
var self = this const self = this
, minInterval = 5000 const minInterval = 5000
, realInterval = Math.max(interval || 0, minInterval) const realInterval = Math.max(interval || 0, minInterval)
;
this.stopAutocompaction(); this.stopAutocompaction()
this.autocompactionIntervalId = setInterval(function () { this.autocompactionIntervalId = setInterval(function () {
self.compactDatafile(); self.compactDatafile()
}, realInterval); }, realInterval)
}; }
/** /**
* Stop autocompaction (do nothing if autocompaction was not running) * Stop autocompaction (do nothing if autocompaction was not running)
*/ */
Persistence.prototype.stopAutocompaction = function () { stopAutocompaction () {
if (this.autocompactionIntervalId) { clearInterval(this.autocompactionIntervalId); } if (this.autocompactionIntervalId) { clearInterval(this.autocompactionIntervalId) }
}; }
/** /**
* Persist new state for the given newDocs (can be insertion, update or removal) * Persist new state for the given newDocs (can be insertion, update or removal)
@ -182,73 +134,69 @@ Persistence.prototype.stopAutocompaction = function () {
* @param {Array} newDocs Can be empty if no doc was updated/removed * @param {Array} newDocs Can be empty if no doc was updated/removed
* @param {Function} cb Optional, signature: err * @param {Function} cb Optional, signature: err
*/ */
Persistence.prototype.persistNewState = function (newDocs, cb) { persistNewState (newDocs, cb) {
var self = this const self = this
, toPersist = '' let toPersist = ''
, callback = cb || function () {} const callback = cb || function () {}
;
// In-memory only datastore // In-memory only datastore
if (self.inMemoryOnly) { return callback(null); } if (self.inMemoryOnly) { return callback(null) }
newDocs.forEach(function (doc) { newDocs.forEach(function (doc) {
toPersist += self.afterSerialization(model.serialize(doc)) + '\n'; toPersist += self.afterSerialization(model.serialize(doc)) + '\n'
}); })
if (toPersist.length === 0) { return callback(null); } if (toPersist.length === 0) { return callback(null) }
storage.appendFile(self.filename, toPersist, 'utf8', function (err) { storage.appendFile(self.filename, toPersist, 'utf8', function (err) {
return callback(err); return callback(err)
}); })
}; }
/** /**
* From a database's raw data, return the corresponding * From a database's raw data, return the corresponding
* machine understandable collection * machine understandable collection
*/ */
Persistence.prototype.treatRawData = function (rawData) { treatRawData (rawData) {
var data = rawData.split('\n') const data = rawData.split('\n')
, dataById = {} const dataById = {}
, tdata = [] const tdata = []
, i let i
, indexes = {} const indexes = {}
, corruptItems = -1 // Last line of every data file is usually blank so not really corrupt let corruptItems = -1
;
for (i = 0; i < data.length; i += 1) { for (i = 0; i < data.length; i += 1) {
var doc; let doc
try { try {
doc = model.deserialize(this.beforeDeserialization(data[i])); doc = model.deserialize(this.beforeDeserialization(data[i]))
if (doc._id) { if (doc._id) {
if (doc.$$deleted === true) { if (doc.$$deleted === true) {
delete dataById[doc._id]; delete dataById[doc._id]
} else { } else {
dataById[doc._id] = doc; dataById[doc._id] = doc
} }
} else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != undefined) { } else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != null) {
indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated; indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated
} else if (typeof doc.$$indexRemoved === "string") { } else if (typeof doc.$$indexRemoved === 'string') {
delete indexes[doc.$$indexRemoved]; delete indexes[doc.$$indexRemoved]
} }
} catch (e) { } catch (e) {
corruptItems += 1; corruptItems += 1
} }
} }
// A bit lenient on corruption // A bit lenient on corruption
if (data.length > 0 && corruptItems / data.length > this.corruptAlertThreshold) { if (data.length > 0 && corruptItems / data.length > this.corruptAlertThreshold) {
throw new Error("More than " + Math.floor(100 * this.corruptAlertThreshold) + "% of the data file is corrupt, the wrong beforeDeserialization hook may be used. Cautiously refusing to start NeDB to prevent dataloss"); throw new Error('More than ' + Math.floor(100 * this.corruptAlertThreshold) + '% of the data file is corrupt, the wrong beforeDeserialization hook may be used. Cautiously refusing to start NeDB to prevent dataloss')
} }
Object.keys(dataById).forEach(function (k) { Object.keys(dataById).forEach(function (k) {
tdata.push(dataById[k]); tdata.push(dataById[k])
}); })
return { data: tdata, indexes: indexes };
};
return { data: tdata, indexes: indexes }
}
/** /**
* Load the database * Load the database
@ -260,55 +208,99 @@ Persistence.prototype.treatRawData = function (rawData) {
* This operation is very quick at startup for a big collection (60ms for ~10k docs) * This operation is very quick at startup for a big collection (60ms for ~10k docs)
* @param {Function} cb Optional callback, signature: err * @param {Function} cb Optional callback, signature: err
*/ */
Persistence.prototype.loadDatabase = function (cb) { loadDatabase (cb) {
var callback = cb || function () {} const callback = cb || function () {}
, self = this const self = this
;
self.db.resetIndexes(); self.db.resetIndexes()
// In-memory only datastore // In-memory only datastore
if (self.inMemoryOnly) { return callback(null); } if (self.inMemoryOnly) { return callback(null) }
async.waterfall([ async.waterfall([
function (cb) { function (cb) {
// eslint-disable-next-line node/handle-callback-err
Persistence.ensureDirectoryExists(path.dirname(self.filename), function (err) { Persistence.ensureDirectoryExists(path.dirname(self.filename), function (err) {
// TODO: handle error
// eslint-disable-next-line node/handle-callback-err
storage.ensureDatafileIntegrity(self.filename, function (err) { storage.ensureDatafileIntegrity(self.filename, function (err) {
// TODO: handle error
storage.readFile(self.filename, 'utf8', function (err, rawData) { storage.readFile(self.filename, 'utf8', function (err, rawData) {
if (err) { return cb(err); } if (err) { return cb(err) }
let treatedData
try { try {
var treatedData = self.treatRawData(rawData); treatedData = self.treatRawData(rawData)
} catch (e) { } catch (e) {
return cb(e); return cb(e)
} }
// Recreate all indexes in the datafile // Recreate all indexes in the datafile
Object.keys(treatedData.indexes).forEach(function (key) { Object.keys(treatedData.indexes).forEach(function (key) {
self.db.indexes[key] = new Index(treatedData.indexes[key]); self.db.indexes[key] = new Index(treatedData.indexes[key])
}); })
// Fill cached database (i.e. all indexes) with data // Fill cached database (i.e. all indexes) with data
try { try {
self.db.resetIndexes(treatedData.data); self.db.resetIndexes(treatedData.data)
} catch (e) { } catch (e) {
self.db.resetIndexes(); // Rollback any index which didn't fail self.db.resetIndexes() // Rollback any index which didn't fail
return cb(e); return cb(e)
} }
self.db.persistence.persistCachedDatabase(cb); self.db.persistence.persistCachedDatabase(cb)
}); })
}); })
}); })
} }
], function (err) { ], function (err) {
if (err) { return callback(err); } if (err) { return callback(err) }
self.db.executor.processBuffer()
return callback(null)
})
}
/**
* Check if a directory stat and create it on the fly if it is not the case
* cb is optional, signature: err
*/
static ensureDirectoryExists (dir, cb) {
const callback = cb || function () {}
self.db.executor.processBuffer(); storage.mkdir(dir, { recursive: true }, err => { callback(err) })
return callback(null); }
});
};
/**
* Return the path the datafile if the given filename is relative to the directory where Node Webkit stores
* data for this application. Probably the best place to store data
*/
static getNWAppFilename (appName, relativeFilename) {
let home
switch (process.platform) {
case 'win32':
case 'win64':
home = process.env.LOCALAPPDATA || process.env.APPDATA
if (!home) { throw new Error('Couldn\'t find the base application data folder') }
home = path.join(home, appName)
break
case 'darwin':
home = process.env.HOME
if (!home) { throw new Error('Couldn\'t find the base application data directory') }
home = path.join(home, 'Library', 'Application Support', appName)
break
case 'linux':
home = process.env.HOME
if (!home) { throw new Error('Couldn\'t find the base application data directory') }
home = path.join(home, '.config', appName)
break
default:
throw new Error('Can\'t use the Node Webkit relative path for platform ' + process.platform)
}
return path.join(home, 'nedb-data', relativeFilename)
}
}
// Interface // Interface
module.exports = Persistence; module.exports = Persistence

@ -6,34 +6,30 @@
* This version is the Node.js/Node Webkit version * This version is the Node.js/Node Webkit version
* It's essentially fs, mkdirp and crash safe write and read functions * It's essentially fs, mkdirp and crash safe write and read functions
*/ */
const fs = require('fs')
var fs = require('fs') const async = require('async')
, mkdirp = require('mkdirp') const path = require('path')
, async = require('async') const storage = {}
, path = require('path')
, storage = {} // eslint-disable-next-line node/no-callback-literal
; storage.exists = (path, cb) => fs.access(path, fs.constants.F_OK, (err) => { cb(!err) })
storage.rename = fs.rename
storage.exists = fs.exists; storage.writeFile = fs.writeFile
storage.rename = fs.rename; storage.unlink = fs.unlink
storage.writeFile = fs.writeFile; storage.appendFile = fs.appendFile
storage.unlink = fs.unlink; storage.readFile = fs.readFile
storage.appendFile = fs.appendFile; storage.mkdir = fs.mkdir
storage.readFile = fs.readFile;
storage.mkdirp = mkdirp;
/** /**
* Explicit name ... * Explicit name ...
*/ */
storage.ensureFileDoesntExist = function (file, callback) { storage.ensureFileDoesntExist = function (file, callback) {
storage.exists(file, function (exists) { storage.exists(file, function (exists) {
if (!exists) { return callback(null); } if (!exists) { return callback(null) }
storage.unlink(file, function (err) { return callback(err); });
});
};
storage.unlink(file, function (err) { return callback(err) })
})
}
/** /**
* Flush data in OS buffer to storage if corresponding option is set * Flush data in OS buffer to storage if corresponding option is set
@ -42,36 +38,36 @@ storage.ensureFileDoesntExist = function (file, callback) {
* If options is a string, it is assumed that the flush of the file (not dir) called options was requested * If options is a string, it is assumed that the flush of the file (not dir) called options was requested
*/ */
storage.flushToStorage = function (options, callback) { storage.flushToStorage = function (options, callback) {
var filename, flags; let filename
let flags
if (typeof options === 'string') { if (typeof options === 'string') {
filename = options; filename = options
flags = 'r+'; flags = 'r+'
} else { } else {
filename = options.filename; filename = options.filename
flags = options.isDir ? 'r' : 'r+'; flags = options.isDir ? 'r' : 'r+'
} }
// Windows can't fsync (FlushFileBuffers) directories. We can live with this as it cannot cause 100% dataloss // Windows can't fsync (FlushFileBuffers) directories. We can live with this as it cannot cause 100% dataloss
// except in the very rare event of the first time database is loaded and a crash happens // except in the very rare event of the first time database is loaded and a crash happens
if (flags === 'r' && (process.platform === 'win32' || process.platform === 'win64')) { return callback(null); } if (flags === 'r' && (process.platform === 'win32' || process.platform === 'win64')) { return callback(null) }
fs.open(filename, flags, function (err, fd) { fs.open(filename, flags, function (err, fd) {
if (err) { return callback(err); } if (err) { return callback(err) }
fs.fsync(fd, function (errFS) { fs.fsync(fd, function (errFS) {
fs.close(fd, function (errC) { fs.close(fd, function (errC) {
if (errFS || errC) { if (errFS || errC) {
var e = new Error('Failed to flush to storage'); const e = new Error('Failed to flush to storage')
e.errorOnFsync = errFS; e.errorOnFsync = errFS
e.errorOnClose = errC; e.errorOnClose = errC
return callback(e); return callback(e)
} else { } else {
return callback(null); return callback(null)
}
})
})
})
} }
});
});
});
};
/** /**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost) * Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
@ -80,31 +76,30 @@ storage.flushToStorage = function (options, callback) {
* @param {Function} cb Optional callback, signature: err * @param {Function} cb Optional callback, signature: err
*/ */
storage.crashSafeWriteFile = function (filename, data, cb) { storage.crashSafeWriteFile = function (filename, data, cb) {
var callback = cb || function () {} const callback = cb || function () {}
, tempFilename = filename + '~'; const tempFilename = filename + '~'
async.waterfall([ async.waterfall([
async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true }) async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true }),
, function (cb) { function (cb) {
storage.exists(filename, function (exists) { storage.exists(filename, function (exists) {
if (exists) { if (exists) {
storage.flushToStorage(filename, function (err) { return cb(err); }); storage.flushToStorage(filename, function (err) { return cb(err) })
} else { } else {
return cb(); return cb()
} }
}); })
} },
, function (cb) { function (cb) {
storage.writeFile(tempFilename, data, function (err) { return cb(err); }); storage.writeFile(tempFilename, data, function (err) { return cb(err) })
} },
, async.apply(storage.flushToStorage, tempFilename) async.apply(storage.flushToStorage, tempFilename),
, function (cb) { function (cb) {
storage.rename(tempFilename, filename, function (err) { return cb(err); }); storage.rename(tempFilename, filename, function (err) { return cb(err) })
},
async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true })
], function (err) { return callback(err) })
} }
, async.apply(storage.flushToStorage, { filename: path.dirname(filename), isDir: true })
], function (err) { return callback(err); })
};
/** /**
* Ensure the datafile contains all the data, even if there was a crash during a full file write * Ensure the datafile contains all the data, even if there was a crash during a full file write
@ -112,25 +107,23 @@ storage.crashSafeWriteFile = function (filename, data, cb) {
* @param {Function} callback signature: err * @param {Function} callback signature: err
*/ */
storage.ensureDatafileIntegrity = function (filename, callback) { storage.ensureDatafileIntegrity = function (filename, callback) {
var tempFilename = filename + '~'; const tempFilename = filename + '~'
storage.exists(filename, function (filenameExists) { storage.exists(filename, function (filenameExists) {
// Write was successful // Write was successful
if (filenameExists) { return callback(null); } if (filenameExists) { return callback(null) }
storage.exists(tempFilename, function (oldFilenameExists) { storage.exists(tempFilename, function (oldFilenameExists) {
// New database // New database
if (!oldFilenameExists) { if (!oldFilenameExists) {
return storage.writeFile(filename, '', 'utf8', function (err) { callback(err); }); return storage.writeFile(filename, '', 'utf8', function (err) { callback(err) })
} }
// Write failed, use old version // Write failed, use old version
storage.rename(tempFilename, filename, function (err) { return callback(err); }); storage.rename(tempFilename, filename, function (err) { return callback(err) })
}); })
}); })
}; }
// Interface // Interface
module.exports = storage; module.exports = storage

@ -20,27 +20,36 @@
"url": "git@github.com:louischatriot/nedb.git" "url": "git@github.com:louischatriot/nedb.git"
}, },
"dependencies": { "dependencies": {
"@seald-io/binary-search-tree": "^1.0.0",
"async": "0.2.10", "async": "0.2.10",
"binary-search-tree": "0.2.5", "localforage": "^1.9.0",
"localforage": "^1.3.0", "underscore": "^1.13.1"
"mkdirp": "~0.5.1",
"underscore": "~1.4.4"
}, },
"devDependencies": { "devDependencies": {
"chai": "^3.2.0", "chai": "^4.3.4",
"mocha": "1.4.x", "commander": "1.1.1",
"exec-time": "0.0.2",
"mocha": "^8.4.0",
"request": "2.9.x", "request": "2.9.x",
"semver": "^7.3.5",
"sinon": "1.3.x", "sinon": "1.3.x",
"exec-time": "0.0.2", "standard": "^16.0.3"
"commander": "1.1.1"
}, },
"scripts": { "scripts": {
"test": "./node_modules/.bin/mocha --reporter spec --timeout 10000" "test": "mocha --reporter spec --timeout 10000"
}, },
"main": "index", "main": "index.js",
"browser": { "browser": {
"./lib/customUtils.js": "./browser-version/browser-specific/lib/customUtils.js", "./lib/customUtils.js": "./browser-version/browser-specific/lib/customUtils.js",
"./lib/storage.js": "./browser-version/browser-specific/lib/storage.js" "./lib/storage.js": "./browser-version/browser-specific/lib/storage.js"
}, },
"license": "SEE LICENSE IN LICENSE" "license": "MIT",
"publishConfig": {
"access": "public"
},
"standard": {
"ignore": [
"browser-version"
]
}
} }

File diff suppressed because it is too large Load Diff

@ -1,26 +1,20 @@
var should = require('chai').should() /* eslint-env mocha */
, assert = require('chai').assert const chai = require('chai')
, customUtils = require('../lib/customUtils') const customUtils = require('../lib/customUtils')
, fs = require('fs')
;
chai.should()
describe('customUtils', function () { describe('customUtils', function () {
describe('uid', function () { describe('uid', function () {
it('Generates a string of the expected length', function () { it('Generates a string of the expected length', function () {
customUtils.uid(3).length.should.equal(3); customUtils.uid(3).length.should.equal(3)
customUtils.uid(16).length.should.equal(16); customUtils.uid(16).length.should.equal(16)
customUtils.uid(42).length.should.equal(42); customUtils.uid(42).length.should.equal(42)
customUtils.uid(1000).length.should.equal(1000); customUtils.uid(1000).length.should.equal(1000)
}); })
// Very small probability of conflict // Very small probability of conflict
it('Generated uids should not be the same', function () { it('Generated uids should not be the same', function () {
customUtils.uid(56).should.not.equal(customUtils.uid(56)); customUtils.uid(56).should.not.equal(customUtils.uid(56))
}); })
})
}); })
});

File diff suppressed because it is too large Load Diff

@ -1,213 +1,215 @@
var should = require('chai').should() /* eslint-env mocha */
, assert = require('chai').assert const chai = require('chai')
, testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
, fs = require('fs') const fs = require('fs')
, path = require('path') const path = require('path')
, _ = require('underscore') const async = require('async')
, async = require('async') const Datastore = require('../lib/datastore')
, model = require('../lib/model') const Persistence = require('../lib/persistence')
, Datastore = require('../lib/datastore')
, Persistence = require('../lib/persistence') const { assert } = chai
; chai.should()
// Test that even if a callback throws an exception, the next DB operations will still be executed // Test that even if a callback throws an exception, the next DB operations will still be executed
// We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends // We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends
function testThrowInCallback (d, done) { function testThrowInCallback (d, done) {
var currentUncaughtExceptionHandlers = process.listeners('uncaughtException'); const currentUncaughtExceptionHandlers = process.listeners('uncaughtException')
process.removeAllListeners('uncaughtException'); process.removeAllListeners('uncaughtException')
// eslint-disable-next-line node/handle-callback-err
process.on('uncaughtException', function (err) { process.on('uncaughtException', function (err) {
// Do nothing with the error which is only there to test we stay on track // Do nothing with the error which is only there to test we stay on track
}); })
// eslint-disable-next-line node/handle-callback-err
d.find({}, function (err) { d.find({}, function (err) {
process.nextTick(function () { process.nextTick(function () {
// eslint-disable-next-line node/handle-callback-err
d.insert({ bar: 1 }, function (err) { d.insert({ bar: 1 }, function (err) {
process.removeAllListeners('uncaughtException'); process.removeAllListeners('uncaughtException')
for (var i = 0; i < currentUncaughtExceptionHandlers.length; i += 1) { for (let i = 0; i < currentUncaughtExceptionHandlers.length; i += 1) {
process.on('uncaughtException', currentUncaughtExceptionHandlers[i]); process.on('uncaughtException', currentUncaughtExceptionHandlers[i])
} }
done(); done()
}); })
}); })
throw new Error('Some error'); throw new Error('Some error')
}); })
} }
// Test that if the callback is falsy, the next DB operations will still be executed // Test that if the callback is falsy, the next DB operations will still be executed
function testFalsyCallback (d, done) { function testFalsyCallback (d, done) {
d.insert({ a: 1 }, null); d.insert({ a: 1 }, null)
process.nextTick(function () { process.nextTick(function () {
d.update({ a: 1 }, { a: 2 }, {}, null); d.update({ a: 1 }, { a: 2 }, {}, null)
process.nextTick(function () { process.nextTick(function () {
d.update({ a: 2 }, { a: 1 }, null); d.update({ a: 2 }, { a: 1 }, null)
process.nextTick(function () { process.nextTick(function () {
d.remove({ a: 2 }, {}, null); d.remove({ a: 2 }, {}, null)
process.nextTick(function () { process.nextTick(function () {
d.remove({ a: 2 }, null); d.remove({ a: 2 }, null)
process.nextTick(function () { process.nextTick(function () {
d.find({}, done); d.find({}, done)
}); })
}); })
}); })
}); })
}); })
} }
// Test that operations are executed in the right order // Test that operations are executed in the right order
// We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends // We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends
function testRightOrder (d, done) { function testRightOrder (d, done) {
var currentUncaughtExceptionHandlers = process.listeners('uncaughtException'); const currentUncaughtExceptionHandlers = process.listeners('uncaughtException')
process.removeAllListeners('uncaughtException'); process.removeAllListeners('uncaughtException')
// eslint-disable-next-line node/handle-callback-err
process.on('uncaughtException', function (err) { process.on('uncaughtException', function (err) {
// Do nothing with the error which is only there to test we stay on track // Do nothing with the error which is only there to test we stay on track
}); })
// eslint-disable-next-line node/handle-callback-err
d.find({}, function (err, docs) { d.find({}, function (err, docs) {
docs.length.should.equal(0); docs.length.should.equal(0)
d.insert({ a: 1 }, function () { d.insert({ a: 1 }, function () {
d.update({ a: 1 }, { a: 2 }, {}, function () { d.update({ a: 1 }, { a: 2 }, {}, function () {
// eslint-disable-next-line node/handle-callback-err
d.find({}, function (err, docs) { d.find({}, function (err, docs) {
docs[0].a.should.equal(2); docs[0].a.should.equal(2)
process.nextTick(function () { process.nextTick(function () {
d.update({ a: 2 }, { a: 3 }, {}, function () { d.update({ a: 2 }, { a: 3 }, {}, function () {
// eslint-disable-next-line node/handle-callback-err
d.find({}, function (err, docs) { d.find({}, function (err, docs) {
docs[0].a.should.equal(3); docs[0].a.should.equal(3)
process.removeAllListeners('uncaughtException'); process.removeAllListeners('uncaughtException')
for (var i = 0; i < currentUncaughtExceptionHandlers.length; i += 1) { for (let i = 0; i < currentUncaughtExceptionHandlers.length; i += 1) {
process.on('uncaughtException', currentUncaughtExceptionHandlers[i]); process.on('uncaughtException', currentUncaughtExceptionHandlers[i])
} }
done(); done()
}); })
}); })
}); })
throw new Error('Some error'); throw new Error('Some error')
}); })
}); })
}); })
}); })
} }
// Note: The following test does not have any assertion because it // Note: The following test does not have any assertion because it
// is meant to address the deprecation warning: // is meant to address the deprecation warning:
// (node) warning: Recursive process.nextTick detected. This will break in the next version of node. Please use setImmediate for recursive deferral. // (node) warning: Recursive process.nextTick detected. This will break in the next version of node. Please use setImmediate for recursive deferral.
// see // see
var testEventLoopStarvation = function(d, done){ const testEventLoopStarvation = function (d, done) {
var times = 1001; const times = 1001
var i = 0; let i = 0
while (i < times) { while (i < times) {
i++; i++
d.find({"bogus": "search"}, function (err, docs) { // eslint-disable-next-line node/handle-callback-err
}); d.find({ bogus: 'search' }, function (err, docs) {
})
}
done()
} }
done();
};
// Test that operations are executed in the right order even with no callback // Test that operations are executed in the right order even with no callback
function testExecutorWorksWithoutCallback (d, done) { function testExecutorWorksWithoutCallback (d, done) {
d.insert({ a: 1 }); d.insert({ a: 1 })
d.insert({ a: 2 }, false); d.insert({ a: 2 }, false)
// eslint-disable-next-line node/handle-callback-err
d.find({}, function (err, docs) { d.find({}, function (err, docs) {
docs.length.should.equal(2); docs.length.should.equal(2)
done(); done()
}); })
} }
describe('Executor', function () { describe('Executor', function () {
describe('With persistent database', function () { describe('With persistent database', function () {
var d; let d
beforeEach(function (done) { beforeEach(function (done) {
d = new Datastore({ filename: testDb }); d = new Datastore({ filename: testDb })
d.filename.should.equal(testDb); d.filename.should.equal(testDb)
d.inMemoryOnly.should.equal(false); d.inMemoryOnly.should.equal(false)
async.waterfall([ async.waterfall([
function (cb) { function (cb) {
Persistence.ensureDirectoryExists(path.dirname(testDb), function () { Persistence.ensureDirectoryExists(path.dirname(testDb), function () {
fs.exists(testDb, function (exists) { fs.access(testDb, fs.constants.F_OK, function (err) {
if (exists) { if (!err) {
fs.unlink(testDb, cb); fs.unlink(testDb, cb)
} else { return cb(); } } else { return cb() }
}); })
}); })
} },
, function (cb) { function (cb) {
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
assert.isNull(err); assert.isNull(err)
d.getAllData().length.should.equal(0); d.getAllData().length.should.equal(0)
return cb(); return cb()
}); })
} }
], done); ], done)
}); })
it('A throw in a callback doesnt prevent execution of next operations', function (done) { it('A throw in a callback doesnt prevent execution of next operations', function (done) {
testThrowInCallback(d, done); testThrowInCallback(d, done)
}); })
it('A falsy callback doesnt prevent execution of next operations', function (done) { it('A falsy callback doesnt prevent execution of next operations', function (done) {
testFalsyCallback(d, done); testFalsyCallback(d, done)
}); })
it('Operations are executed in the right order', function (done) { it('Operations are executed in the right order', function (done) {
testRightOrder(d, done); testRightOrder(d, done)
}); })
it('Does not starve event loop and raise warning when more than 1000 callbacks are in queue', function (done) { it('Does not starve event loop and raise warning when more than 1000 callbacks are in queue', function (done) {
testEventLoopStarvation(d, done); testEventLoopStarvation(d, done)
}); })
it('Works in the right order even with no supplied callback', function (done) { it('Works in the right order even with no supplied callback', function (done) {
testExecutorWorksWithoutCallback(d, done); testExecutorWorksWithoutCallback(d, done)
}); })
}) // ==== End of 'With persistent database' ====
}); // ==== End of 'With persistent database' ====
describe('With non persistent database', function () { describe('With non persistent database', function () {
var d; let d
beforeEach(function (done) { beforeEach(function (done) {
d = new Datastore({ inMemoryOnly: true }); d = new Datastore({ inMemoryOnly: true })
d.inMemoryOnly.should.equal(true); d.inMemoryOnly.should.equal(true)
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
assert.isNull(err); assert.isNull(err)
d.getAllData().length.should.equal(0); d.getAllData().length.should.equal(0)
return done(); return done()
}); })
}); })
it('A throw in a callback doesnt prevent execution of next operations', function (done) { it('A throw in a callback doesnt prevent execution of next operations', function (done) {
testThrowInCallback(d, done); testThrowInCallback(d, done)
}); })
it('A falsy callback doesnt prevent execution of next operations', function (done) { it('A falsy callback doesnt prevent execution of next operations', function (done) {
testFalsyCallback(d, done); testFalsyCallback(d, done)
}); })
it('Operations are executed in the right order', function (done) { it('Operations are executed in the right order', function (done) {
testRightOrder(d, done); testRightOrder(d, done)
}); })
it('Works in the right order even with no supplied callback', function (done) { it('Works in the right order even with no supplied callback', function (done) {
testExecutorWorksWithoutCallback(d, done); testExecutorWorksWithoutCallback(d, done)
}); })
}) // ==== End of 'With non persistent database' ====
}); // ==== End of 'With non persistent database' ==== })
});

File diff suppressed because it is too large Load Diff

@ -1,2 +0,0 @@
--reporter spec
--timeout 30000

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,123 +1,121 @@
/* eslint-env mocha */
/* global DEBUG */
/** /**
* Load and modify part of fs to ensure writeFile will crash after writing 5000 bytes * Load and modify part of fs to ensure writeFile will crash after writing 5000 bytes
*/ */
var fs = require('fs'); const fs = require('fs')
function rethrow () { function rethrow () {
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
// is fairly slow to generate. // is fairly slow to generate.
if (DEBUG) { if (DEBUG) {
var backtrace = new Error(); const backtrace = new Error()
return function (err) { return function (err) {
if (err) { if (err) {
backtrace.stack = err.name + ': ' + err.message + backtrace.stack = err.name + ': ' + err.message +
backtrace.stack.substr(backtrace.name.length); backtrace.stack.substr(backtrace.name.length)
throw backtrace; throw backtrace
}
} }
};
} }
return function (err) { return function (err) {
if (err) { if (err) {
throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs throw err // Forgot a callback but don't know where? Use NODE_DEBUG=fs
}
} }
};
} }
function maybeCallback (cb) { function maybeCallback (cb) {
return typeof cb === 'function' ? cb : rethrow(); return typeof cb === 'function' ? cb : rethrow()
} }
function isFd (path) { function isFd (path) {
return (path >>> 0) === path; return (path >>> 0) === path
} }
function assertEncoding (encoding) { function assertEncoding (encoding) {
if (encoding && !Buffer.isEncoding(encoding)) { if (encoding && !Buffer.isEncoding(encoding)) {
throw new Error('Unknown encoding: ' + encoding); throw new Error('Unknown encoding: ' + encoding)
} }
} }
var onePassDone = false; let onePassDone = false
function writeAll (fd, isUserFd, buffer, offset, length, position, callback_) { function writeAll (fd, isUserFd, buffer, offset, length, position, callback_) {
var callback = maybeCallback(arguments[arguments.length - 1]); const callback = maybeCallback(arguments[arguments.length - 1])
if (onePassDone) { process.exit(1); } // Crash on purpose before rewrite done if (onePassDone) { process.exit(1) } // Crash on purpose before rewrite done
var l = Math.min(5000, length); // Force write by chunks of 5000 bytes to ensure data will be incomplete on crash const l = Math.min(5000, length) // Force write by chunks of 5000 bytes to ensure data will be incomplete on crash
// write(fd, buffer, offset, length, position, callback) // write(fd, buffer, offset, length, position, callback)
fs.write(fd, buffer, offset, l, position, function (writeErr, written) { fs.write(fd, buffer, offset, l, position, function (writeErr, written) {
if (writeErr) { if (writeErr) {
if (isUserFd) { if (isUserFd) {
if (callback) callback(writeErr); if (callback) callback(writeErr)
} else { } else {
fs.close(fd, function () { fs.close(fd, function () {
if (callback) callback(writeErr); if (callback) callback(writeErr)
}); })
} }
} else { } else {
onePassDone = true; onePassDone = true
if (written === length) { if (written === length) {
if (isUserFd) { if (isUserFd) {
if (callback) callback(null); if (callback) callback(null)
} else { } else {
fs.close(fd, callback); fs.close(fd, callback)
} }
} else { } else {
offset += written; offset += written
length -= written; length -= written
if (position !== null) { if (position !== null) {
position += written; position += written
} }
writeAll(fd, isUserFd, buffer, offset, length, position, callback); writeAll(fd, isUserFd, buffer, offset, length, position, callback)
} }
} }
}); })
} }
fs.writeFile = function (path, data, options, callback_) { fs.writeFile = function (path, data, options, callback_) {
var callback = maybeCallback(arguments[arguments.length - 1]); const callback = maybeCallback(arguments[arguments.length - 1])
if (!options || typeof options === 'function') { if (!options || typeof options === 'function') {
options = { encoding: 'utf8', mode: 438, flag: 'w' }; // Mode 438 == 0o666 (compatibility with older Node releases) options = { encoding: 'utf8', mode: 438, flag: 'w' } // Mode 438 == 0o666 (compatibility with older Node releases)
} else if (typeof options === 'string') { } else if (typeof options === 'string') {
options = { encoding: options, mode: 438, flag: 'w' }; // Mode 438 == 0o666 (compatibility with older Node releases) options = { encoding: options, mode: 438, flag: 'w' } // Mode 438 == 0o666 (compatibility with older Node releases)
} else if (typeof options !== 'object') { } else if (typeof options !== 'object') {
throwOptionsError(options); throw new Error(`throwOptionsError${options}`)
} }
assertEncoding(options.encoding); assertEncoding(options.encoding)
var flag = options.flag || 'w'; const flag = options.flag || 'w'
if (isFd(path)) { if (isFd(path)) {
writeFd(path, true); writeFd(path, true)
return; return
} }
fs.open(path, flag, options.mode, function (openErr, fd) { fs.open(path, flag, options.mode, function (openErr, fd) {
if (openErr) { if (openErr) {
if (callback) callback(openErr); if (callback) callback(openErr)
} else { } else {
writeFd(fd, false); writeFd(fd, false)
} }
}); })
function writeFd (fd, isUserFd) { function writeFd (fd, isUserFd) {
var buffer = (data instanceof Buffer) ? data : new Buffer('' + data, const buffer = (data instanceof Buffer) ? data : Buffer.from('' + data, options.encoding || 'utf8')
options.encoding || 'utf8'); const position = /a/.test(flag) ? null : 0
var position = /a/.test(flag) ? null : 0;
writeAll(fd, isUserFd, buffer, 0, buffer.length, position, callback); writeAll(fd, isUserFd, buffer, 0, buffer.length, position, callback)
}
} }
};
// End of fs modification // End of fs modification
var Nedb = require('../lib/datastore.js') const Nedb = require('../lib/datastore.js')
, db = new Nedb({ filename: 'workspace/lac.db' }) const db = new Nedb({ filename: 'workspace/lac.db' })
;
db.loadDatabase(); db.loadDatabase()

@ -1,67 +1,64 @@
var fs = require('fs') const fs = require('fs')
, child_process = require('child_process') const async = require('async')
, async = require('async') const Nedb = require('../lib/datastore')
, Nedb = require('../lib/datastore') const db = new Nedb({ filename: './workspace/openfds.db', autoload: true })
, db = new Nedb({ filename: './workspace/openfds.db', autoload: true }) const N = 64
, N = 64 // Half the allowed file descriptors let i
, i, fds let fds
;
function multipleOpen (filename, N, callback) { function multipleOpen (filename, N, callback) {
async.whilst( function () { return i < N; } async.whilst(function () { return i < N }
, function (cb) { , function (cb) {
fs.open(filename, 'r', function (err, fd) { fs.open(filename, 'r', function (err, fd) {
i += 1; i += 1
if (fd) { fds.push(fd); } if (fd) { fds.push(fd) }
return cb(err); return cb(err)
}); })
} }
, callback); , callback)
} }
async.waterfall([ async.waterfall([
// Check that ulimit has been set to the correct value // Check that ulimit has been set to the correct value
function (cb) { function (cb) {
i = 0; i = 0
fds = []; fds = []
multipleOpen('./test_lac/openFdsTestFile', 2 * N + 1, function (err) { multipleOpen('./test_lac/openFdsTestFile', 2 * N + 1, function (err) {
if (!err) { console.log("No error occured while opening a file too many times"); } if (!err) { console.log('No error occured while opening a file too many times') }
fds.forEach(function (fd) { fs.closeSync(fd); }); fds.forEach(function (fd) { fs.closeSync(fd) })
return cb(); return cb()
}) })
} },
, function (cb) { function (cb) {
i = 0; i = 0
fds = []; fds = []
multipleOpen('./test_lac/openFdsTestFile2', N, function (err) { multipleOpen('./test_lac/openFdsTestFile2', N, function (err) {
if (err) { console.log('An unexpected error occured when opening file not too many times: ' + err); } if (err) { console.log('An unexpected error occured when opening file not too many times: ' + err) }
fds.forEach(function (fd) { fs.closeSync(fd); }); fds.forEach(function (fd) { fs.closeSync(fd) })
return cb(); return cb()
}) })
} },
// Then actually test NeDB persistence // Then actually test NeDB persistence
, function () { function () {
db.remove({}, { multi: true }, function (err) { db.remove({}, { multi: true }, function (err) {
if (err) { console.log(err); } if (err) { console.log(err) }
db.insert({ hello: 'world' }, function (err) { db.insert({ hello: 'world' }, function (err) {
if (err) { console.log(err); } if (err) { console.log(err) }
i = 0; i = 0
async.whilst( function () { return i < 2 * N + 1; } async.whilst(function () { return i < 2 * N + 1 }
, function (cb) { , function (cb) {
db.persistence.persistCachedDatabase(function (err) { db.persistence.persistCachedDatabase(function (err) {
if (err) { return cb(err); } if (err) { return cb(err) }
i += 1; i += 1
return cb(); return cb()
}); })
} }
, function (err) { , function (err) {
if (err) { console.log("Got unexpected error during one peresistence operation: " + err); } if (err) { console.log('Got unexpected error during one peresistence operation: ' + err) }
} }
); )
})
}); })
});
} }
]); ])

Loading…
Cancel
Save