re-make the build process with webpack, swicth to Karma for the browser tests, bumps all dependencies except , remove bower.json, remove browser build from repo (will be published on npm though)

pull/2/head
Timothée Rebours 4 years ago
parent 1b16d2f541
commit a4cd69f5c6
  1. 5
      .gitignore
  2. 11
      benchmarks/commonUtilities.js
  3. 11
      benchmarks/ensureIndex.js
  4. 7
      benchmarks/find.js
  5. 7
      benchmarks/findOne.js
  6. 7
      benchmarks/findWithIn.js
  7. 7
      benchmarks/insert.js
  8. 11
      benchmarks/loadDatabase.js
  9. 92
      benchmarks/profiler.js
  10. 7
      benchmarks/remove.js
  11. 7
      benchmarks/update.js
  12. 6
      bower.json
  13. 95
      browser-version/browser-specific/lib/storage.js
  14. 101
      browser-version/build.js
  15. 61
      browser-version/lib/customUtils.js
  16. 84
      browser-version/lib/storage.js
  17. 9419
      browser-version/out/nedb.js
  18. 4
      browser-version/out/nedb.min.js
  19. 8
      browser-version/package.json
  20. 2
      browser-version/test/async.js
  21. 5332
      browser-version/test/chai.js
  22. 24
      browser-version/test/index.html
  23. 4
      browser-version/test/jquery.min.js
  24. 2758
      browser-version/test/localforage.js
  25. 199
      browser-version/test/mocha.css
  26. 4859
      browser-version/test/mocha.js
  27. 309
      browser-version/test/nedb-browser.js
  28. 11
      browser-version/test/playground.html
  29. 16
      browser-version/test/testLoad.html
  30. 111
      browser-version/test/testLoad.js
  31. 13
      browser-version/test/testPersistence.html
  32. 20
      browser-version/test/testPersistence.js
  33. 14
      browser-version/test/testPersistence2.html
  34. 39
      browser-version/test/testPersistence2.js
  35. 6
      browser-version/test/underscore.min.js
  36. 23
      karma.conf.local.js
  37. 62
      karma.conf.template.js
  38. 2
      lib/cursor.js
  39. 22
      lib/datastore.js
  40. 4
      lib/indexes.js
  41. 8
      lib/persistence.js
  42. 2
      lib/storage.js
  43. 6
      mochaReportConfig.json
  44. 4033
      package-lock.json
  45. 37
      package.json
  46. 125
      test/browser/load.spec.js
  47. 342
      test/browser/nedb-browser.spec.js
  48. 48
      webpack.config.js

5
.gitignore vendored

@ -11,6 +11,8 @@ pids
logs
results
.DS_Store
npm-debug.log
workspace
node_modules
@ -21,3 +23,6 @@ browser-version/node_modules
*.swp
*~
*.swo
browser-version/out
test-results

@ -29,7 +29,7 @@ module.exports.getConfiguration = function (benchDb) {
console.log('----------------------------')
console.log('Test with ' + n + ' documents')
console.log(program.withIndex ? 'Use an index' : "Don't use an index")
console.log(program.withIndex ? 'Use an index' : 'Don\'t use an index')
console.log(program.inMemory ? 'Use an in-memory datastore' : 'Use a persistent datastore')
console.log('----------------------------')
@ -75,7 +75,7 @@ function getRandomArray (n) {
}
return res
};
}
module.exports.getRandomArray = getRandomArray
/**
@ -102,6 +102,7 @@ module.exports.insertDocs = function (d, n, profiler, cb) {
})
})
}
runFrom(0)
}
@ -128,6 +129,7 @@ module.exports.findDocs = function (d, n, profiler, cb) {
})
})
}
runFrom(0)
}
@ -164,6 +166,7 @@ module.exports.findDocsWithIn = function (d, n, profiler, cb) {
})
})
}
runFrom(0)
}
@ -190,6 +193,7 @@ module.exports.findOneDocs = function (d, n, profiler, cb) {
})
})
}
runFrom(0)
}
@ -218,6 +222,7 @@ module.exports.updateDocs = function (options, d, n, profiler, cb) {
})
})
}
runFrom(0)
}
@ -253,6 +258,7 @@ module.exports.removeDocs = function (options, d, n, profiler, cb) {
})
})
}
runFrom(0)
}
@ -276,5 +282,6 @@ module.exports.loadDatabase = function (d, n, profiler, cb) {
})
})
}
runFrom(0)
}

@ -1,11 +1,12 @@
const Datastore = require('../lib/datastore')
const benchDb = 'workspace/insert.bench.db'
const async = require('async')
const program = require('commander')
const Datastore = require('../lib/datastore')
const commonUtilities = require('./commonUtilities')
const ExecTime = require('exec-time')
const profiler = new ExecTime('INSERT BENCH')
const Profiler = require('./profiler')
const profiler = new Profiler('INSERT BENCH')
const benchDb = 'workspace/insert.bench.db'
const d = new Datastore(benchDb)
const program = require('commander')
program
.option('-n --number [number]', 'Size of the collection to test on', parseInt)

@ -1,8 +1,9 @@
const benchDb = 'workspace/find.bench.db'
const async = require('async')
const ExecTime = require('exec-time')
const profiler = new ExecTime('FIND BENCH')
const commonUtilities = require('./commonUtilities')
const Profiler = require('./profiler')
const profiler = new Profiler('FIND BENCH')
const benchDb = 'workspace/find.bench.db'
const config = commonUtilities.getConfiguration(benchDb)
const d = config.d
const n = config.n

@ -1,8 +1,9 @@
const benchDb = 'workspace/findOne.bench.db'
const async = require('async')
const ExecTime = require('exec-time')
const profiler = new ExecTime('FINDONE BENCH')
const commonUtilities = require('./commonUtilities')
const Profiler = require('./profiler')
const benchDb = 'workspace/findOne.bench.db'
const profiler = new Profiler('FINDONE BENCH')
const config = commonUtilities.getConfiguration(benchDb)
const d = config.d
const n = config.n

@ -1,8 +1,9 @@
const benchDb = 'workspace/find.bench.db'
const async = require('async')
const ExecTime = require('exec-time')
const profiler = new ExecTime('FIND BENCH')
const commonUtilities = require('./commonUtilities')
const Profiler = require('./profiler')
const benchDb = 'workspace/find.bench.db'
const profiler = new Profiler('FIND BENCH')
const config = commonUtilities.getConfiguration(benchDb)
const d = config.d
const n = config.n

@ -1,8 +1,9 @@
const benchDb = 'workspace/insert.bench.db'
const async = require('async')
const ExecTime = require('exec-time')
const profiler = new ExecTime('INSERT BENCH')
const commonUtilities = require('./commonUtilities')
const Profiler = require('./profiler')
const benchDb = 'workspace/insert.bench.db'
const profiler = new Profiler('INSERT BENCH')
const config = commonUtilities.getConfiguration(benchDb)
const d = config.d
let n = config.n

@ -1,11 +1,12 @@
const Datastore = require('../lib/datastore')
const benchDb = 'workspace/loaddb.bench.db'
const async = require('async')
const program = require('commander')
const Datastore = require('../lib/datastore')
const commonUtilities = require('./commonUtilities')
const ExecTime = require('exec-time')
const profiler = new ExecTime('LOADDB BENCH')
const Profiler = require('./profiler')
const benchDb = 'workspace/loaddb.bench.db'
const profiler = new Profiler('LOADDB BENCH')
const d = new Datastore(benchDb)
const program = require('commander')
program
.option('-n --number [number]', 'Size of the collection to test on', parseInt)

@ -0,0 +1,92 @@
const util = require('util')
function formatTime (time, precision) {
// If we're dealing with ms, round up to seconds when time is at least 1 second
if (time > 1000 && precision === 'ms') {
return (Math.floor(time / 100) / 10) + ' s'
} else {
return time.toFixed(3) + ' ' + precision
}
}
// get time in ns
function getTime () {
const t = process.hrtime()
return (t[0] * 1e9 + t[1])
}
/**
* Create a profiler with name testName to monitor the execution time of a route
* The profiler has two arguments: a step msg and an optional reset for the internal timer
* It will display the execution time per step and total from latest rest
*
* Optional logToConsole flag, which defaults to true, causes steps to be printed to console.
* otherwise, they can be accessed from Profiler.steps array.
*/
function Profiler (name, logToConsole, precision) {
this.name = name
this.steps = []
this.sinceBeginning = null
this.lastStep = null
this.logToConsole = typeof (logToConsole) === 'undefined' ? true : logToConsole
this.precision = typeof (precision) === 'undefined' ? 'ms' : precision
this.divisor = 1
if (this.precision === 'ms') this.divisor = 1e6
}
Profiler.prototype.beginProfiling = function () {
if (this.logToConsole) { console.log(this.name + ' - Begin profiling') }
this.resetTimers()
}
Profiler.prototype.resetTimers = function () {
this.sinceBeginning = getTime()
this.lastStep = getTime()
this.steps.push(['BEGIN_TIMER', this.lastStep])
}
Profiler.prototype.elapsedSinceBeginning = function () {
return (getTime() - this.sinceBeginning) / this.divisor
}
Profiler.prototype.elapsedSinceLastStep = function () {
return (getTime() - this.lastStep) / this.divisor
}
// Return the deltas between steps, in nanoseconds
Profiler.prototype.getSteps = function () {
const divisor = this.divisor
return this.steps.map(function (curr, index, arr) {
if (index === 0) return undefined
const delta = (curr[1] - arr[index - 1][1])
return [curr[0], (delta / divisor)]
}).slice(1)
}
Profiler.prototype.step = function (msg) {
if (!this.sinceBeginning || !this.lastStep) {
console.log(util.format(
'%s - %s - You must call beginProfiling before registering steps',
this.name,
msg
))
return
}
if (this.logToConsole) {
console.log(util.format('%s - %s - %s (total: %s)',
this.name,
msg,
formatTime(this.elapsedSinceLastStep(), this.precision),
formatTime(this.elapsedSinceBeginning(), this.precision)
))
}
this.lastStep = getTime()
this.steps.push([msg, this.lastStep])
}
module.exports = Profiler

@ -1,8 +1,9 @@
const benchDb = 'workspace/remove.bench.db'
const async = require('async')
const ExecTime = require('exec-time')
const profiler = new ExecTime('REMOVE BENCH')
const commonUtilities = require('./commonUtilities')
const Profiler = require('./profiler')
const benchDb = 'workspace/remove.bench.db'
const profiler = new Profiler('REMOVE BENCH')
const config = commonUtilities.getConfiguration(benchDb)
const d = config.d
const n = config.n

@ -1,8 +1,9 @@
const benchDb = 'workspace/update.bench.db'
const async = require('async')
const ExecTime = require('exec-time')
const profiler = new ExecTime('UPDATE BENCH')
const commonUtilities = require('./commonUtilities')
const Profiler = require('./profiler')
const benchDb = 'workspace/update.bench.db'
const profiler = new Profiler('UPDATE BENCH')
const config = commonUtilities.getConfiguration(benchDb)
const d = config.d
const n = config.n

@ -1,6 +0,0 @@
{
"name": "nedb",
"description": "The Javascript Database for Node, nwjs, Electron and the browser",
"ignore": ["benchmarks", "lib", "test", "test_lac"],
"main": ["browser-version/nedb.js", "browser-version/nedb.min.js"]
}

@ -1,95 +0,0 @@
/**
* Way data is stored for this database
* For a Node.js/Node Webkit database it's the file system
* For a browser-side database it's localforage, which uses the best backend available (IndexedDB then WebSQL then localStorage)
*
* This version is the browser version
*/
var localforage = require('localforage')
// Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name
localforage.config({
name: 'NeDB'
, storeName: 'nedbdata'
});
function exists (filename, callback) {
localforage.getItem(filename, function (err, value) {
if (value !== null) { // Even if value is undefined, localforage returns null
return callback(true);
} else {
return callback(false);
}
});
}
function rename (filename, newFilename, callback) {
localforage.getItem(filename, function (err, value) {
if (value === null) {
localforage.removeItem(newFilename, function () { return callback(); });
} else {
localforage.setItem(newFilename, value, function () {
localforage.removeItem(filename, function () { return callback(); });
});
}
});
}
function writeFile (filename, contents, options, callback) {
// Options do not matter in browser setup
if (typeof options === 'function') { callback = options; }
localforage.setItem(filename, contents, function () { return callback(); });
}
function appendFile (filename, toAppend, options, callback) {
// Options do not matter in browser setup
if (typeof options === 'function') { callback = options; }
localforage.getItem(filename, function (err, contents) {
contents = contents || '';
contents += toAppend;
localforage.setItem(filename, contents, function () { return callback(); });
});
}
function readFile (filename, options, callback) {
// Options do not matter in browser setup
if (typeof options === 'function') { callback = options; }
localforage.getItem(filename, function (err, contents) { return callback(null, contents || ''); });
}
function unlink (filename, callback) {
localforage.removeItem(filename, function () { return callback(); });
}
// Nothing to do, no directories will be used on the browser
function mkdirp (dir, callback) {
return callback();
}
// Nothing to do, no data corruption possible in the brower
function ensureDatafileIntegrity (filename, callback) {
return callback(null);
}
// Interface
module.exports.exists = exists;
module.exports.rename = rename;
module.exports.writeFile = writeFile;
module.exports.crashSafeWriteFile = writeFile; // No need for a crash safe function in the browser
module.exports.appendFile = appendFile;
module.exports.readFile = readFile;
module.exports.unlink = unlink;
module.exports.mkdirp = mkdirp;
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity;

@ -1,101 +0,0 @@
/**
* Build the browser version of nedb
*/
var fs = require('fs')
, path = require('path')
, child_process = require('child_process')
, toCopy = ['lib', 'node_modules']
, async, browserify, uglify
;
// Ensuring both node_modules (the source one and build one), src and out directories exist
function ensureDirExists (name) {
try {
fs.mkdirSync(path.join(__dirname, name));
} catch (e) {
if (e.code !== 'EEXIST') {
console.log("Error ensuring that node_modules exists");
process.exit(1);
}
}
}
ensureDirExists('../node_modules');
ensureDirExists('node_modules');
ensureDirExists('out');
ensureDirExists('src');
// Installing build dependencies and require them
console.log("Installing build dependencies");
child_process.exec('npm install', { cwd: __dirname }, function (err, stdout, stderr) {
if (err) { console.log("Error reinstalling dependencies"); process.exit(1); }
fs = require('fs-extra');
async = require('async');
browserify = require('browserify');
uglify = require('uglify-js');
async.waterfall([
function (cb) {
console.log("Installing source dependencies if needed");
child_process.exec('npm install', { cwd: path.join(__dirname, '..') }, function (err) { return cb(err); });
}
, function (cb) {
console.log("Removing contents of the src directory");
async.eachSeries(fs.readdirSync(path.join(__dirname, 'src')), function (item, _cb) {
fs.remove(path.join(__dirname, 'src', item), _cb);
}, cb);
}
, function (cb) {
console.log("Copying source files");
async.eachSeries(toCopy, function (item, _cb) {
fs.copy(path.join(__dirname, '..', item), path.join(__dirname, 'src', item), _cb);
}, cb);
}
, function (cb) {
console.log("Copying browser specific files to replace their server-specific counterparts");
async.eachSeries(fs.readdirSync(path.join(__dirname, 'browser-specific')), function (item, _cb) {
fs.copy(path.join(__dirname, 'browser-specific', item), path.join(__dirname, 'src', item), _cb);
}, cb);
}
, function (cb) {
console.log("Browserifying the code");
var b = browserify()
, srcPath = path.join(__dirname, 'src/lib/datastore.js');
b.add(srcPath);
b.bundle({ standalone: 'Nedb' }, function (err, out) {
if (err) { return cb(err); }
fs.writeFile(path.join(__dirname, 'out/nedb.js'), out, 'utf8', function (err) {
if (err) {
return cb(err);
} else {
return cb(null, out);
}
});
});
}
, function (out, cb) {
console.log("Creating the minified version");
var compressedCode = uglify.minify(out, { fromString: true });
fs.writeFile(path.join(__dirname, 'out/nedb.min.js'), compressedCode.code, 'utf8', cb);
}
], function (err) {
if (err) {
console.log("Error during build");
console.log(err);
} else {
console.log("Build finished with success");
}
});
});

@ -8,59 +8,58 @@
* NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it
*/
function randomBytes (size) {
var bytes = new Array(size);
var r;
const bytes = new Array(size)
for (var i = 0, r; i < size; i++) {
if ((i & 0x03) == 0) r = Math.random() * 0x100000000;
bytes[i] = r >>> ((i & 0x03) << 3) & 0xff;
for (let i = 0, r; i < size; i++) {
if ((i & 0x03) === 0) r = Math.random() * 0x100000000
bytes[i] = r >>> ((i & 0x03) << 3) & 0xff
}
return bytes;
return bytes
}
/**
* Taken from the base64-js module
* https://github.com/beatgammit/base64-js/
*/
function byteArrayToBase64 (uint8) {
var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
, extraBytes = uint8.length % 3 // if we have 1 byte left, pad 2 bytes
, output = ""
, temp, length, i;
const lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
const extraBytes = uint8.length % 3 // if we have 1 byte left, pad 2 bytes
let output = ''
let temp
let length
let i
function tripletToBase64 (num) {
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F];
};
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F]
}
// go through the array every three bytes, we'll deal with trailing stuff later
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) {
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]);
output += tripletToBase64(temp);
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2])
output += tripletToBase64(temp)
}
// pad the end with zeros, but make sure to not forget the extra bytes
switch (extraBytes) {
case 1:
temp = uint8[uint8.length - 1];
output += lookup[temp >> 2];
output += lookup[(temp << 4) & 0x3F];
output += '==';
break;
temp = uint8[uint8.length - 1]
output += lookup[temp >> 2]
output += lookup[(temp << 4) & 0x3F]
output += '=='
break
case 2:
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]);
output += lookup[temp >> 10];
output += lookup[(temp >> 4) & 0x3F];
output += lookup[(temp << 2) & 0x3F];
output += '=';
break;
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1])
output += lookup[temp >> 10]
output += lookup[(temp >> 4) & 0x3F]
output += lookup[(temp << 2) & 0x3F]
output += '='
break
}
return output;
return output
}
/**
* Return a random alphanumerical string of length len
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
@ -70,9 +69,7 @@ function byteArrayToBase64 (uint8) {
* See http://en.wikipedia.org/wiki/Birthday_problem
*/
function uid (len) {
return byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+\/]/g, '').slice(0, len);
return byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len)
}
module.exports.uid = uid;
module.exports.uid = uid

@ -0,0 +1,84 @@
/**
* Way data is stored for this database
* For a Node.js/Node Webkit database it's the file system
* For a browser-side database it's localforage, which uses the best backend available (IndexedDB then WebSQL then localStorage)
*
* This version is the browser version
*/
const localforage = require('localforage')
// Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name
const store = localforage.createInstance({
name: 'NeDB',
storeName: 'nedbdata'
})
function exists (filename, cback) {
// eslint-disable-next-line node/handle-callback-err
store.getItem(filename, (err, value) => {
if (value !== null) return cback(true) // Even if value is undefined, localforage returns null
else return cback(false)
})
}
function rename (filename, newFilename, callback) {
// eslint-disable-next-line node/handle-callback-err
store.getItem(filename, (err, value) => {
if (value === null) store.removeItem(newFilename, () => callback())
else {
store.setItem(newFilename, value, () => {
store.removeItem(filename, () => callback())
})
}
})
}
function writeFile (filename, contents, options, callback) {
// Options do not matter in browser setup
if (typeof options === 'function') { callback = options }
store.setItem(filename, contents, () => callback())
}
function appendFile (filename, toAppend, options, callback) {
// Options do not matter in browser setup
if (typeof options === 'function') { callback = options }
// eslint-disable-next-line node/handle-callback-err
store.getItem(filename, (err, contents) => {
contents = contents || ''
contents += toAppend
store.setItem(filename, contents, () => callback())
})
}
function readFile (filename, options, callback) {
// Options do not matter in browser setup
if (typeof options === 'function') { callback = options }
// eslint-disable-next-line node/handle-callback-err
store.getItem(filename, (err, contents) => callback(null, contents || ''))
}
function unlink (filename, callback) {
store.removeItem(filename, () => callback())
}
// Nothing to do, no directories will be used on the browser
function mkdir (dir, options, callback) {
return callback()
}
// Nothing to do, no data corruption possible in the brower
function ensureDatafileIntegrity (filename, callback) {
return callback(null)
}
// Interface
module.exports.exists = exists
module.exports.rename = rename
module.exports.writeFile = writeFile
module.exports.crashSafeWriteFile = writeFile // No need for a crash safe function in the browser
module.exports.appendFile = appendFile
module.exports.readFile = readFile
module.exports.unlink = unlink
module.exports.mkdir = mkdir
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -1,8 +0,0 @@
{
"dependencies": {
"async": "~0.2.9",
"fs-extra": "~0.6.3",
"uglify-js": "~2.3.6",
"browserify": "~2.25.0"
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

@ -1,24 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Mocha tests for NeDB</title>
<link rel="stylesheet" href="mocha.css">
</head>
<body>
<div id="mocha"></div>
<script src="jquery.min.js"></script>
<script src="chai.js"></script>
<script src="underscore.min.js"></script>
<script src="mocha.js"></script>
<script>mocha.setup('bdd')</script>
<script src="../out/nedb.min.js"></script>
<script src="localforage.js"></script>
<script src="nedb-browser.js"></script>
<script>
mocha.checkLeaks();
mocha.globals(['jQuery']);
mocha.run();
</script>
</body>
</html>

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

@ -1,199 +0,0 @@
@charset "UTF-8";
body {
font: 20px/1.5 "Helvetica Neue", Helvetica, Arial, sans-serif;
padding: 60px 50px;
}
#mocha ul, #mocha li {
margin: 0;
padding: 0;
}
#mocha ul {
list-style: none;
}
#mocha h1, #mocha h2 {
margin: 0;
}
#mocha h1 {
margin-top: 15px;
font-size: 1em;
font-weight: 200;
}
#mocha h1 a {
text-decoration: none;
color: inherit;
}
#mocha h1 a:hover {
text-decoration: underline;
}
#mocha .suite .suite h1 {
margin-top: 0;
font-size: .8em;
}
#mocha h2 {
font-size: 12px;
font-weight: normal;
cursor: pointer;
}
#mocha .suite {
margin-left: 15px;
}
#mocha .test {
margin-left: 15px;
}
#mocha .test:hover h2::after {
position: relative;
top: 0;
right: -10px;
content: '(view source)';
font-size: 12px;
font-family: arial;
color: #888;
}
#mocha .test.pending:hover h2::after {
content: '(pending)';
font-family: arial;
}
#mocha .test.pass.medium .duration {
background: #C09853;
}
#mocha .test.pass.slow .duration {
background: #B94A48;
}
#mocha .test.pass::before {
content: '✓';
font-size: 12px;
display: block;
float: left;
margin-right: 5px;
color: #00d6b2;
}
#mocha .test.pass .duration {
font-size: 9px;
margin-left: 5px;
padding: 2px 5px;
color: white;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
-moz-box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
-webkit-border-radius: 5px;
-moz-border-radius: 5px;
-ms-border-radius: 5px;
-o-border-radius: 5px;
border-radius: 5px;
}
#mocha .test.pass.fast .duration {
display: none;
}
#mocha .test.pending {
color: #0b97c4;
}
#mocha .test.pending::before {
content: '◦';
color: #0b97c4;
}
#mocha .test.fail {
color: #c00;
}
#mocha .test.fail pre {
color: black;
}
#mocha .test.fail::before {
content: '✖';
font-size: 12px;
display: block;
float: left;
margin-right: 5px;
color: #c00;
}
#mocha .test pre.error {
color: #c00;
}
#mocha .test pre {
display: inline-block;
font: 12px/1.5 monaco, monospace;
margin: 5px;
padding: 15px;
border: 1px solid #eee;
border-bottom-color: #ddd;
-webkit-border-radius: 3px;
-webkit-box-shadow: 0 1px 3px #eee;
}
#report.pass .test.fail {
display: none;
}
#report.fail .test.pass {
display: none;
}
#error {
color: #c00;
font-size: 1.5 em;
font-weight: 100;
letter-spacing: 1px;
}
#stats {
position: fixed;
top: 15px;
right: 10px;
font-size: 12px;
margin: 0;
color: #888;
}
#stats .progress {
float: right;
padding-top: 0;
}
#stats em {
color: black;
}
#stats a {
text-decoration: none;
color: inherit;
}
#stats a:hover {
border-bottom: 1px solid #eee;
}
#stats li {
display: inline-block;
margin: 0 5px;
list-style: none;
padding-top: 11px;
}
code .comment { color: #ddd }
code .init { color: #2F6FAD }
code .string { color: #5890AD }
code .keyword { color: #8A6343 }
code .number { color: #2F6FAD }

File diff suppressed because it is too large Load Diff

@ -1,309 +0,0 @@
/**
* Testing the browser version of NeDB
* The goal of these tests is not to be exhaustive, we have the server-side NeDB tests for that
* This is more of a sanity check which executes most of the code at least once and checks
* it behaves as the server version does
*/
var assert = chai.assert;
/**
* Given a docs array and an id, return the document whose id matches, or null if none is found
*/
function findById (docs, id) {
return _.find(docs, function (doc) { return doc._id === id; }) || null;
}
describe('Basic CRUD functionality', function () {
it('Able to create a database object in the browser', function () {
var db = new Nedb();
assert.equal(db.inMemoryOnly, true);
assert.equal(db.persistence.inMemoryOnly, true);
});
it('Insertion and querying', function (done) {
var db = new Nedb();
db.insert({ a: 4 }, function (err, newDoc1) {
assert.isNull(err);
db.insert({ a: 40 }, function (err, newDoc2) {
assert.isNull(err);
db.insert({ a: 400 }, function (err, newDoc3) {
assert.isNull(err);
db.find({ a: { $gt: 36 } }, function (err, docs) {
var doc2 = _.find(docs, function (doc) { return doc._id === newDoc2._id; })
, doc3 = _.find(docs, function (doc) { return doc._id === newDoc3._id; })
;
assert.isNull(err);
assert.equal(docs.length, 2);
assert.equal(doc2.a, 40);
assert.equal(doc3.a, 400);
db.find({ a: { $lt: 36 } }, function (err, docs) {
assert.isNull(err);
assert.equal(docs.length, 1);
assert.equal(docs[0].a, 4);
done();
});
});
});
});
});
});
it('Querying with regular expressions', function (done) {
var db = new Nedb();
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
assert.isNull(err);
db.insert({ planet: 'Mars' }, function (err, newDoc2) {
assert.isNull(err);
db.insert({ planet: 'Jupiter' }, function (err, newDoc3) {
assert.isNull(err);
db.insert({ planet: 'Eaaaaaarth' }, function (err, newDoc4) {
assert.isNull(err);
db.insert({ planet: 'Maaaars' }, function (err, newDoc5) {
assert.isNull(err);
db.find({ planet: /ar/ }, function (err, docs) {
assert.isNull(err);
assert.equal(docs.length, 4);
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc1._id; }).planet, 'Earth');
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc2._id; }).planet, 'Mars');
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id; }).planet, 'Eaaaaaarth');
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id; }).planet, 'Maaaars');
db.find({ planet: /aa+r/ }, function (err, docs) {
assert.isNull(err);
assert.equal(docs.length, 2);
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id; }).planet, 'Eaaaaaarth');
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id; }).planet, 'Maaaars');
done();
});
});
});
});
});
});
});
});
it('Updating documents', function (done) {
var db = new Nedb();
db.insert({ planet: 'Eaaaaarth' }, function (err, newDoc1) {
db.insert({ planet: 'Maaaaars' }, function (err, newDoc2) {
// Simple update
db.update({ _id: newDoc2._id }, { $set: { planet: 'Saturn' } }, {}, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 1);
db.find({}, function (err, docs) {
assert.equal(docs.length, 2);
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth');
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
// Failing update
db.update({ _id: 'unknown' }, { $inc: { count: 1 } }, {}, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 0);
db.find({}, function (err, docs) {
assert.equal(docs.length, 2);
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth');
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
// Document replacement
db.update({ planet: 'Eaaaaarth' }, { planet: 'Uranus' }, { multi: false }, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 1);
db.find({}, function (err, docs) {
assert.equal(docs.length, 2);
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus');
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
// Multi update
db.update({}, { $inc: { count: 3 } }, { multi: true }, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 2);
db.find({}, function (err, docs) {
assert.equal(docs.length, 2);
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus');
assert.equal(findById(docs, newDoc1._id).count, 3);
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
assert.equal(findById(docs, newDoc2._id).count, 3);
done();
});
});
});
});
});
});
});
});
});
});
});
it('Updating documents: special modifiers', function (done) {
var db = new Nedb();
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
// Pushing to an array
db.update({}, { $push: { satellites: 'Phobos' } }, {}, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 1);
db.findOne({}, function (err, doc) {
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos'] });
db.update({}, { $push: { satellites: 'Deimos' } }, {}, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 1);
db.findOne({}, function (err, doc) {
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos', 'Deimos'] });
done();
});
});
});
});
});
});
it('Upserts', function (done) {
var db = new Nedb();
db.update({ a: 4 }, { $inc: { b: 1 } }, { upsert: true }, function (err, nr, upsert) {
assert.isNull(err);
// Return upserted document
assert.equal(upsert.a, 4);
assert.equal(upsert.b, 1);
assert.equal(nr, 1);
db.find({}, function (err, docs) {
assert.equal(docs.length, 1);
assert.equal(docs[0].a, 4);
assert.equal(docs[0].b, 1);
done();
});
});
});
it('Removing documents', function (done) {
var db = new Nedb();
db.insert({ a: 2 });
db.insert({ a: 5 });
db.insert({ a: 7 });
// Multi remove
db.remove({ a: { $in: [ 5, 7 ] } }, { multi: true }, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 2);
db.find({}, function (err, docs) {
assert.equal(docs.length, 1);
assert.equal(docs[0].a, 2);
// Remove with no match
db.remove({ b: { $exists: true } }, { multi: true }, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 0);
db.find({}, function (err, docs) {
assert.equal(docs.length, 1);
assert.equal(docs[0].a, 2);
// Simple remove
db.remove({ a: { $exists: true } }, { multi: true }, function (err, nr) {
assert.isNull(err);
assert.equal(nr, 1);
db.find({}, function (err, docs) {
assert.equal(docs.length, 0);
done();
});
});
});
});
});
});
});
}); // ==== End of 'Basic CRUD functionality' ==== //
describe('Indexing', function () {
it('getCandidates works as expected', function (done) {
var db = new Nedb();
db.insert({ a: 4 }, function () {
db.insert({ a: 6 }, function () {
db.insert({ a: 7 }, function () {
db.getCandidates({ a: 6 }, function (err, candidates) {
console.log(candidates);
assert.equal(candidates.length, 3);
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 4; }));
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6; }));
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 7; }));
db.ensureIndex({ fieldName: 'a' });
db.getCandidates({ a: 6 }, function (err, candidates) {
assert.equal(candidates.length, 1);
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6; }));
done();
});
});
});
});
});
});
it('Can use indexes to enforce a unique constraint', function (done) {
var db = new Nedb();
db.ensureIndex({ fieldName: 'u', unique: true });
db.insert({ u : 5 }, function (err) {
assert.isNull(err);
db.insert({ u : 98 }, function (err) {
assert.isNull(err);
db.insert({ u : 5 }, function (err) {
assert.equal(err.errorType, 'uniqueViolated');
done();
});
});
});
});
}); // ==== End of 'Indexing' ==== //
describe("Don't forget to launch persistence tests!", function () {
it("See file testPersistence.html", function (done) {
done();
});
}); // ===== End of 'persistent in-browser database' =====

@ -1,11 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Playground for NeDB</title>
</head>
<body>
<script src="../out/nedb.min.js"></script>
</body>
</html>

@ -1,16 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Test NeDB persistence load in the browser</title>
<link rel="stylesheet" href="mocha.css">
</head>
<body>
<div id="results"></div>
<script src="./localforage.js"></script>
<script src="./async.js"></script>
<script src="../out/nedb.js"></script>
<script src="./testLoad.js"></script>
</body>
</html>

@ -1,111 +0,0 @@
console.log('BEGINNING');
var N = 50000
, db = new Nedb({ filename: 'loadTest', autoload: true })
, t, i
, sample = JSON.stringify({ data: Math.random(), _id: Math.random() });
;
// Some inserts in sequence, using the default storage mechanism (IndexedDB in my case)
function someInserts (sn, N, callback) {
var i = 0, beg = Date.now();
async.whilst( function () { return i < N; }
, function (_cb) {
db.insert({ data: Math.random() }, function (err) { i += 1; return _cb(err); });
}
, function (err) {
console.log("Inserts, series " + sn + " " + (Date.now() - beg));
return callback(err);
});
}
// Manually updating the localStorage on the same variable
function someLS (sn, N, callback) {
var i = 0, beg = Date.now();
for (i = 0; i < N; i += 1) {
localStorage.setItem('loadTestLS', getItem('loadTestLS') + sample);
}
console.log("localStorage, series " + sn + " " + (Date.now() - beg));
return callback();
}
// Manually updating the localStorage on different variables
function someLSDiff (sn, N, callback) {
var i = 0, beg = Date.now();
for (i = 0; i < N; i += 1) {
localStorage.setItem('loadTestLS-' + i, sample);
}
console.log("localStorage, series " + sn + " " + (Date.now() - beg));
return callback();
}
// Manually updating the localforage default on the same variable (IndexedDB on my machine)
function someLF (sn, N, callback) {
var i = 0, beg = Date.now();
async.whilst( function () { return i < N; }
, function (_cb) {
localforage.getItem('loadTestLF', function (err, value) {
if (err) { return _cb(err); }
localforage.setItem('loadTestLF', value + sample, function (err) { i += 1; return _cb(err); });
});
}
, function (err) {
console.log("localForage/IDB, series " + sn + " " + (Date.now() - beg));
return callback(err);
});
}
// Manually updating the localforage default on the different variables (IndexedDB on my machine)
function someLFDiff (sn, N, callback) {
var i = 0, beg = Date.now();
async.whilst( function () { return i < N; }
, function (_cb) {
localforage.setItem('loadTestLF-' + i, sample, function (err) { i += 1; return _cb(err); });
}
, function (err) {
console.log("localForage/IDB, series " + sn + " " + (Date.now() - beg));
return callback(err);
});
}
localStorage.setItem('loadTestLS', '');
async.waterfall([
function (cb) { db.remove({}, { multi: true }, function (err) { return cb(err); }); }
// Slow and gets slower with database size
//, async.apply(someInserts, "#1", N) // N=5000, 141s
//, async.apply(someInserts, "#2", N) // N=5000, 208s
//, async.apply(someInserts, "#3", N) // N=5000, 281s
//, async.apply(someInserts, "#4", N) // N=5000, 350s
// Slow and gets slower really fast with database size, then outright crashes
//, async.apply(someLS, "#1", N) // N=4000, 2.5s
//, async.apply(someLS, "#2", N) // N=4000, 8.0s
//, async.apply(someLS, "#3", N) // N=4000, 26.5s
//, async.apply(someLS, "#4", N) // N=4000, 47.8s then crash, can't get string (with N=5000 crash happens on second pass)
// Much faster and more consistent
//, async.apply(someLSDiff, "#1", N) // N=50000, 0.7s
//, async.apply(someLSDiff, "#2", N) // N=50000, 0.5s
//, async.apply(someLSDiff, "#3", N) // N=50000, 0.5s
//, async.apply(someLSDiff, "#4", N) // N=50000, 0.5s
// Slow and gets slower with database size
//, function (cb) { localforage.setItem('loadTestLF', '', function (err) { return cb(err) }) }
//, async.apply(someLF, "#1", N) // N=5000, 69s
//, async.apply(someLF, "#2", N) // N=5000, 108s
//, async.apply(someLF, "#3", N) // N=5000, 137s
//, async.apply(someLF, "#4", N) // N=5000, 169s
// Quite fast and speed doesn't change with database size (tested with N=10000 and N=50000, still no slow-down)
//, async.apply(someLFDiff, "#1", N) // N=5000, 18s
//, async.apply(someLFDiff, "#2", N) // N=5000, 18s
//, async.apply(someLFDiff, "#3", N) // N=5000, 18s
//, async.apply(someLFDiff, "#4", N) // N=5000, 18s
]);

@ -1,13 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Test NeDB persistence in the browser</title>
<link rel="stylesheet" href="mocha.css">
</head>
<body>
<div id="results"></div>
<script src="../out/nedb.js"></script>
<script src="./testPersistence.js"></script>
</body>
</html>

@ -1,20 +0,0 @@
console.log("Beginning tests");
console.log("Please note these tests work on Chrome latest, might not work on other browsers due to discrepancies in how local storage works for the file:// protocol");
function testsFailed () {
document.getElementById("results").innerHTML = "TESTS FAILED";
}
var filename = 'test';
var db = new Nedb({ filename: filename, autoload: true });
db.remove({}, { multi: true }, function () {
db.insert({ hello: 'world' }, function (err) {
if (err) {
testsFailed();
return;
}
window.location = './testPersistence2.html';
});
});

@ -1,14 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Test NeDB persistence in the browser - Results</title>
<link rel="stylesheet" href="mocha.css">
</head>
<body>
<div id="results"></div>
<script src="jquery.min.js"></script>
<script src="../out/nedb.js"></script>
<script src="./testPersistence2.js"></script>
</body>
</html>

@ -1,39 +0,0 @@
// Capture F5 to reload the base page testPersistence.html not this one
$(document).on('keydown', function (e) {
if (e.keyCode === 116) {
e.preventDefault();
window.location = 'testPersistence.html';
}
});
console.log("Checking tests results");
console.log("Please note these tests work on Chrome latest, might not work on other browsers due to discrepancies in how local storage works for the file:// protocol");
function testsFailed () {
document.getElementById("results").innerHTML = "TESTS FAILED";
}
var filename = 'test';
var db = new Nedb({ filename: filename, autoload: true });
db.find({}, function (err, docs) {
if (docs.length !== 1) {
console.log(docs);
console.log("Unexpected length of document database");
return testsFailed();
}
if (Object.keys(docs[0]).length !== 2) {
console.log("Unexpected length insert document in database");
return testsFailed();
}
if (docs[0].hello !== 'world') {
console.log("Unexpected document");
return testsFailed();
}
document.getElementById("results").innerHTML = "BROWSER PERSISTENCE TEST PASSED";
});

File diff suppressed because one or more lines are too long

@ -0,0 +1,23 @@
'use strict'
/* eslint-disable @typescript-eslint/no-var-requires */
const template = require('./karma.conf.template.js')
module.exports = function (config) {
const localBrowser = {
ChromeHeadlessNoSandbox: {
base: 'ChromeHeadless',
flags: ['--no-sandbox']
}
}
config.set(Object.assign({}, template(config), {
customLaunchers: localBrowser,
browsers: ['ChromeHeadlessNoSandbox']
// browsers: ['FirefoxHeadless'],
// browsers: ['Safari'],
// browsers: ['ChromeHeadlessNoSandbox', 'FirefoxHeadless', 'Safari'],
// concurrency: 3
}))
}

@ -0,0 +1,62 @@
'use strict'
const path = require('path')
module.exports = (config) => ({
// Increase timeout in case connection in CI is slow
captureTimeout: 120000,
browserNoActivityTimeout: 300000,
browserDisconnectTimeout: 300000,
browserDisconnectTolerance: 3,
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['mocha', 'chai', 'source-map-support'],
// list of files / patterns to load in the browser
files: [
'node_modules/underscore/underscore-min.js',
'node_modules/localforage/dist/localforage.min.js',
'node_modules/async/lib/async.js',
'browser-version/out/nedb.min.js',
'test/browser/nedb-browser.spec.js',
'test/browser/load.spec.js'
],
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress', 'junit'],
junitReporter: {
outputDir: 'test-results', // results will be saved as $outputDir/$browserName.xml
useBrowserName: true // add browser name to report and classes names
},
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: true,
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
// Concurrency level
// how many browser should be started simultaneous
concurrency: 1,
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// list of files to exclude
exclude: []
})

@ -1,8 +1,8 @@
/**
* Manage access to data, be it to find, update or remove it
*/
const model = require('./model')
const _ = require('underscore')
const model = require('./model.js')
class Cursor {
/**

@ -1,14 +1,15 @@
const customUtils = require('./customUtils')
const model = require('./model')
const async = require('async')
const Executor = require('./executor')
const Index = require('./indexes')
const { EventEmitter } = require('events')
const util = require('util')
const async = require('async')
const _ = require('underscore')
const Persistence = require('./persistence')
const Cursor = require('./cursor')
class Datastore {
const Cursor = require('./cursor.js')
const customUtils = require('./customUtils.js')
const Executor = require('./executor.js')
const Index = require('./indexes.js')
const model = require('./model.js')
const Persistence = require('./persistence.js')
class Datastore extends EventEmitter {
/**
* Create a new collection
* @param {String} options.filename Optional, datastore will be in-memory only if not provided
@ -26,6 +27,7 @@ class Datastore {
* * compaction.done - Fired whenever a compaction operation was finished
*/
constructor (options) {
super()
let filename
// Retrocompatibility with v0.6 and before
@ -700,6 +702,4 @@ class Datastore {
}
}
util.inherits(Datastore, require('events').EventEmitter)
module.exports = Datastore

@ -1,6 +1,6 @@
const BinarySearchTree = require('@seald-io/binary-search-tree').BinarySearchTree
const model = require('./model')
const _ = require('underscore')
const BinarySearchTree = require('@seald-io/binary-search-tree').BinarySearchTree
const model = require('./model.js')
/**
* Two indexed pointers are equal iif they point to the same place

@ -4,12 +4,12 @@
* * Persistence.loadDatabase(callback) and callback has signature err
* * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err
*/
const storage = require('./storage')
const path = require('path')
const model = require('./model')
const async = require('async')
const customUtils = require('./customUtils')
const Index = require('./indexes')
const customUtils = require('./customUtils.js')
const Index = require('./indexes.js')
const model = require('./model.js')
const storage = require('./storage.js')
class Persistence {
/**

@ -7,8 +7,8 @@
* It's essentially fs, mkdirp and crash safe write and read functions
*/
const fs = require('fs')
const async = require('async')
const path = require('path')
const async = require('async')
const storage = {}
// eslint-disable-next-line node/no-callback-literal

@ -0,0 +1,6 @@
{
"reporterEnabled": "mocha-junit-reporter, spec",
"mochaJunitReporterReporterOptions": {
"mochaFile": "test-results/report.xml"
}
}

4033
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -27,29 +27,44 @@
},
"devDependencies": {
"chai": "^4.3.4",
"commander": "1.1.1",
"exec-time": "0.0.2",
"commander": "^7.2.0",
"events": "^3.3.0",
"jquery": "^3.6.0",
"karma": "^6.3.2",
"karma-chai": "^0.1.0",
"karma-chrome-launcher": "^3.1.0",
"karma-junit-reporter": "^2.0.1",
"karma-mocha": "^2.0.1",
"karma-source-map-support": "^1.4.0",
"mocha": "^8.4.0",
"request": "2.9.x",
"mocha-junit-reporter": "^2.0.0",
"path-browserify": "^1.0.1",
"process": "^0.11.10",
"semver": "^7.3.5",
"sinon": "1.3.x",
"standard": "^16.0.3"
"source-map-loader": "^2.0.2",
"standard": "^16.0.3",
"terser-webpack-plugin": "^5.1.2",
"timers-browserify": "^2.0.12",
"util": "^0.12.3",
"webpack": "^5.37.0",
"webpack-cli": "^4.7.0",
"xvfb-maybe": "^0.2.1"
},
"scripts": {
"test": "mocha --reporter spec --timeout 10000"
"test": "mocha --reporter spec --timeout 10000",
"build:browser": "webpack && webpack --optimization-minimize",
"pretest:browser": "npm run build:browser",
"test:browser": "xvfb-maybe karma start karma.conf.local.js"
},
"main": "index.js",
"browser": {
"./lib/customUtils.js": "./browser-version/browser-specific/lib/customUtils.js",
"./lib/storage.js": "./browser-version/browser-specific/lib/storage.js"
},
"browser": "browser-version/out/nedb.min.js",
"license": "MIT",
"publishConfig": {
"access": "public"
},
"standard": {
"ignore": [
"browser-version"
"browser-version/out"
]
}
}

@ -0,0 +1,125 @@
/* eslint-env mocha, browser */
/* global async, Nedb, localforage */
const N = 5000
const db = new Nedb({ filename: 'loadTest', autoload: true })
const sample = JSON.stringify({ data: Math.random(), _id: Math.random() })
// Some inserts in sequence, using the default storage mechanism (IndexedDB in my case)
const someInserts = (sn, N, callback) => {
const beg = Date.now()
let i = 0
async.whilst(() => i < N, _cb => {
db.insert({ data: Math.random() }, err => { i += 1; return _cb(err) })
}, err => {
console.log('Inserts, series ' + sn + ' ' + (Date.now() - beg))
return callback(err)
})
}
// Manually updating the localStorage on the same variable
const someLS = (sn, N, callback) => {
const beg = Date.now()
for (let i = 0; i < N; i += 1) {
localStorage.setItem('loadTestLS', localStorage.getItem('loadTestLS') + sample)
}
console.log('localStorage, series ' + sn + ' ' + (Date.now() - beg))
return callback()
}
// Manually updating the localStorage on different variables
const someLSDiff = (sn, N, callback) => {
const beg = Date.now()
for (let i = 0; i < N; i += 1) {
localStorage.setItem('loadTestLS-' + i, sample)
}
console.log('localStorage, series ' + sn + ' ' + (Date.now() - beg))
return callback()
}
// Manually updating the localforage default on the same variable (IndexedDB on my machine)
function someLF (sn, N, callback) {
const beg = Date.now()
let i = 0
async.whilst(() => i < N, _cb => {
localforage.getItem('loadTestLF', (err, value) => {
if (err) return _cb(err)
localforage.setItem('loadTestLF', value + sample, err => { i += 1; return _cb(err) })
})
}, err => {
console.log('localForage/IDB, series ' + sn + ' ' + (Date.now() - beg))
return callback(err)
})
}
// Manually updating the localforage default on the different variables (IndexedDB on my machine)
const someLFDiff = (sn, N, callback) => {
const beg = Date.now()
let i = 0
async.whilst(() => i < N, _cb => {
localforage.setItem('loadTestLF-' + i, sample, err => { i += 1; return _cb(err) })
}, err => {
console.log('localForage/IDB, series ' + sn + ' ' + (Date.now() - beg))
return callback(err)
})
}
// These tests benchmark various key/value storage methods, we skip them by default
describe.skip('Load tests', function () {
this.timeout(60000)
before('Cleanup', function (done) {
localStorage.setItem('loadTestLS', '')
db.remove({}, { multi: true }, err => done(err))
})
it.skip('Inserts', function (done) {
async.waterfall([
// Slow and gets slower with database size
async.apply(someInserts, '#1', N), // N=5000, 141s
async.apply(someInserts, '#2', N), // N=5000, 208s
async.apply(someInserts, '#3', N), // N=5000, 281s
async.apply(someInserts, '#4', N) // N=5000, 350s
], done)
})
it.skip('Localstorage', function (done) {
async.waterfall([
// Slow and gets slower really fast with database size, then outright crashes
async.apply(someLS, '#1', N), // N=4000, 2.5s
async.apply(someLS, '#2', N), // N=4000, 8.0s
async.apply(someLS, '#3', N), // N=4000, 26.5s
async.apply(someLS, '#4', N) // N=4000, 47.8s then crash, can't get string (with N=5000 crash happens on second pass)
], done)
})
it.skip('Localstorage Diff', function (done) {
async.waterfall([
// Much faster and more consistent
async.apply(someLSDiff, '#1', N), // N=50000, 0.7s
async.apply(someLSDiff, '#2', N), // N=50000, 0.5s
async.apply(someLSDiff, '#3', N), // N=50000, 0.5s
async.apply(someLSDiff, '#4', N) // N=50000, 0.5s
], done)
})
it.skip('LocalForage', function (done) {
async.waterfall([
// Slow and gets slower with database size
cb => { localforage.setItem('loadTestLF', '', err => cb(err)) },
async.apply(someLF, '#1', N), // N=5000, 69s
async.apply(someLF, '#2', N), // N=5000, 108s
async.apply(someLF, '#3', N), // N=5000, 137s
async.apply(someLF, '#4', N) // N=5000, 169s
], done)
})
it.skip('LocalForage diff', function (done) {
async.waterfall([
// Quite fast and speed doesn't change with database size (tested with N=10000 and N=50000, still no slow-down)
async.apply(someLFDiff, '#1', N), // N=5000, 18s
async.apply(someLFDiff, '#2', N), // N=5000, 18s
async.apply(someLFDiff, '#3', N), // N=5000, 18s
async.apply(someLFDiff, '#4', N) // N=5000, 18s
], done)
})
})

@ -0,0 +1,342 @@
/* eslint-env mocha */
/* global chai, _, Nedb */
/**
* Testing the browser version of NeDB
* The goal of these tests is not to be exhaustive, we have the server-side NeDB tests for that
* This is more of a sanity check which executes most of the code at least once and checks
* it behaves as the server version does
*/
const assert = chai.assert
/**
* Given a docs array and an id, return the document whose id matches, or null if none is found
*/
function findById (docs, id) {
return _.find(docs, function (doc) { return doc._id === id }) || null
}
describe('Basic CRUD functionality', function () {
it('Able to create a database object in the browser', function () {
const db = new Nedb()
assert.equal(db.inMemoryOnly, true)
assert.equal(db.persistence.inMemoryOnly, true)
})
it('Insertion and querying', function (done) {
const db = new Nedb()
db.insert({ a: 4 }, function (err, newDoc1) {
assert.isNull(err)
db.insert({ a: 40 }, function (err, newDoc2) {
assert.isNull(err)
db.insert({ a: 400 }, function (err, newDoc3) {
assert.isNull(err)
db.find({ a: { $gt: 36 } }, function (err, docs) {
const doc2 = _.find(docs, function (doc) { return doc._id === newDoc2._id })
const doc3 = _.find(docs, function (doc) { return doc._id === newDoc3._id })
assert.isNull(err)
assert.equal(docs.length, 2)
assert.equal(doc2.a, 40)
assert.equal(doc3.a, 400)
db.find({ a: { $lt: 36 } }, function (err, docs) {
assert.isNull(err)
assert.equal(docs.length, 1)
assert.equal(docs[0].a, 4)
done()
})
})
})
})
})
})
it('Querying with regular expressions', function (done) {
const db = new Nedb()
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
assert.isNull(err)
db.insert({ planet: 'Mars' }, function (err, newDoc2) {
assert.isNull(err)
db.insert({ planet: 'Jupiter' }, function (err, newDoc3) {
assert.isNull(err)
db.insert({ planet: 'Eaaaaaarth' }, function (err, newDoc4) {
assert.isNull(err)
db.insert({ planet: 'Maaaars' }, function (err, newDoc5) {
assert.isNull(err)
db.find({ planet: /ar/ }, function (err, docs) {
assert.isNull(err)
assert.equal(docs.length, 4)
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc1._id }).planet, 'Earth')
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc2._id }).planet, 'Mars')
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id }).planet, 'Eaaaaaarth')
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id }).planet, 'Maaaars')
db.find({ planet: /aa+r/ }, function (err, docs) {
assert.isNull(err)
assert.equal(docs.length, 2)
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id }).planet, 'Eaaaaaarth')
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id }).planet, 'Maaaars')
done()
})
})
})
})
})
})
})
})
it('Updating documents', function (done) {
const db = new Nedb()
// eslint-disable-next-line node/handle-callback-err
db.insert({ planet: 'Eaaaaarth' }, function (err, newDoc1) {
// eslint-disable-next-line node/handle-callback-err
db.insert({ planet: 'Maaaaars' }, function (err, newDoc2) {
// Simple update
db.update({ _id: newDoc2._id }, { $set: { planet: 'Saturn' } }, {}, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 1)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 2)
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth')
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn')
// Failing update
db.update({ _id: 'unknown' }, { $inc: { count: 1 } }, {}, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 0)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 2)
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth')
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn')
// Document replacement
db.update({ planet: 'Eaaaaarth' }, { planet: 'Uranus' }, { multi: false }, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 1)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 2)
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus')
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn')
// Multi update
db.update({}, { $inc: { count: 3 } }, { multi: true }, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 2)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 2)
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus')
assert.equal(findById(docs, newDoc1._id).count, 3)
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn')
assert.equal(findById(docs, newDoc2._id).count, 3)
done()
})
})
})
})
})
})
})
})
})
})
})
it('Updating documents: special modifiers', function (done) {
const db = new Nedb()
// eslint-disable-next-line node/handle-callback-err
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
// Pushing to an array
db.update({}, { $push: { satellites: 'Phobos' } }, {}, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 1)
// eslint-disable-next-line node/handle-callback-err
db.findOne({}, function (err, doc) {
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos'] })
db.update({}, { $push: { satellites: 'Deimos' } }, {}, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 1)
// eslint-disable-next-line node/handle-callback-err
db.findOne({}, function (err, doc) {
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos', 'Deimos'] })
done()
})
})
})
})
})
})
it('Upserts', function (done) {
const db = new Nedb()
db.update({ a: 4 }, { $inc: { b: 1 } }, { upsert: true }, function (err, nr, upsert) {
assert.isNull(err)
// Return upserted document
assert.equal(upsert.a, 4)
assert.equal(upsert.b, 1)
assert.equal(nr, 1)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 1)
assert.equal(docs[0].a, 4)
assert.equal(docs[0].b, 1)
done()
})
})
})
it('Removing documents', function (done) {
const db = new Nedb()
db.insert({ a: 2 })
db.insert({ a: 5 })
db.insert({ a: 7 })
// Multi remove
db.remove({ a: { $in: [5, 7] } }, { multi: true }, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 2)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 1)
assert.equal(docs[0].a, 2)
// Remove with no match
db.remove({ b: { $exists: true } }, { multi: true }, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 0)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 1)
assert.equal(docs[0].a, 2)
// Simple remove
db.remove({ a: { $exists: true } }, { multi: true }, function (err, nr) {
assert.isNull(err)
assert.equal(nr, 1)
// eslint-disable-next-line node/handle-callback-err
db.find({}, function (err, docs) {
assert.equal(docs.length, 0)
done()
})
})
})
})
})
})
})
}) // ==== End of 'Basic CRUD functionality' ==== //
describe('Indexing', function () {
it('getCandidates works as expected', function (done) {
const db = new Nedb()
db.insert({ a: 4 }, function () {
db.insert({ a: 6 }, function () {
db.insert({ a: 7 }, function () {
// eslint-disable-next-line node/handle-callback-err
db.getCandidates({ a: 6 }, function (err, candidates) {
assert.equal(candidates.length, 3)
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 4 }))
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6 }))
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 7 }))
db.ensureIndex({ fieldName: 'a' })
// eslint-disable-next-line node/handle-callback-err
db.getCandidates({ a: 6 }, function (err, candidates) {
assert.equal(candidates.length, 1)
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6 }))
done()
})
})
})
})
})
})
it('Can use indexes to enforce a unique constraint', function (done) {
const db = new Nedb()
db.ensureIndex({ fieldName: 'u', unique: true })
db.insert({ u: 5 }, function (err) {
assert.isNull(err)
db.insert({ u: 98 }, function (err) {
assert.isNull(err)
db.insert({ u: 5 }, function (err) {
assert.equal(err.errorType, 'uniqueViolated')
done()
})
})
})
})
}) // ==== End of 'Indexing' ==== //
describe("Don't forget to launch persistence tests!", function () {
const filename = 'test'
before('Clean & write', function (done) {
const db = new Nedb({ filename: filename, autoload: true })
db.remove({}, { multi: true }, function () {
db.insert({ hello: 'world' }, function (err) {
assert.isNull(err)
done()
})
})
})
it('Read & check', function (done) {
const db = new Nedb({ filename: filename, autoload: true })
db.find({}, (err, docs) => {
assert.isNull(err)
if (docs.length !== 1) {
return done(new Error('Unexpected length of document database'))
}
if (Object.keys(docs[0]).length !== 2) {
return done(new Error('Unexpected length insert document in database'))
}
if (docs[0].hello !== 'world') {
return done(new Error('Unexpected document'))
}
done()
})
})
}) // ===== End of 'persistent in-browser database' =====

@ -0,0 +1,48 @@
'use strict'
const path = require('path')
const webpack = require('webpack')
module.exports = (env, argv) => {
const minimize = argv.optimizationMinimize || false
return {
mode: 'production',
cache: false,
watch: false,
target: 'web',
node: {
global: true
},
optimization: {
minimize: minimize
},
resolve: {
fallback: {
fs: false,
path: require.resolve('path-browserify'),
util: require.resolve('util/'),
events: require.resolve('events/'),
crypto: false
}
},
plugins: [
new webpack.NormalModuleReplacementPlugin(new RegExp(path.resolve(__dirname, 'lib/storage.js')), path.resolve(__dirname, 'browser-version/lib/storage.js')),
new webpack.NormalModuleReplacementPlugin(new RegExp(path.resolve(__dirname, 'lib/customUtils.js')), path.resolve(__dirname, 'browser-version/lib/customUtils.js')),
new webpack.ProvidePlugin({
process: 'process/browser',
Buffer: ['buffer', 'Buffer'],
setImmediate: ['timers-browserify', 'setImmediate'],
clearImmediate: ['timers-browserify', 'clearImmediate']
})
],
entry: {
Nedb: path.join(__dirname, 'lib', 'datastore.js')
},
output: {
path: path.join(__dirname, 'browser-version/out'),
filename: minimize ? 'nedb.min.js' : 'nedb.js',
libraryTarget: 'window',
library: '[name]'
}
}
}
Loading…
Cancel
Save