Timothée Rebours 3 years ago
parent c2b6fafadd
commit 201400f610
  1. 1
      .gitignore
  2. 15
      CHANGELOG.md
  3. 4
      README.md
  4. 11
      jsdoc.conf.js
  5. 21
      lib/cursor.js
  6. 611
      lib/datastore.js
  7. 70
      lib/indexes.js
  8. 13
      lib/persistence.js
  9. 3
      lib/storage.js
  10. 11564
      package-lock.json
  11. 6
      package.json
  12. 18
      test/db.async.test.js
  13. 20
      test/db.test.js
  14. 2
      test/persistence.async.test.js

1
.gitignore vendored

@ -26,3 +26,4 @@ browser-version/node_modules
browser-version/out
test-results
docs

@ -6,6 +6,21 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres
to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [3.0.0] - Unreleased
### Added
- Added an async interface for all functions
- The JSDoc is now much more exhaustive
### Changed
- All the functions are now async at the core, and a fully retro-compatible callback-ified version is exposed.
- The executor is now much simpler and Promise-based. A retro-compatible shim is still exposed, with the exception that it no longer handles [`arguments`](https://developer.mozilla.org/fr/docs/Web/JavaScript/Reference/Functions/arguments) as the arguments Array. If you use the executor directly, you'll need to convert it to a proper Array beforehand.
- As a result, the `async` dependency has been removed completely. To avoid rewriting the tests, shims of some functions of `async` are defined in an utilities file used exclusively in the tests.
- The `Datastore#update`'s callback has its signature slightly changed. The `upsert` flag is always defined either at `true` or `false` but not `null` nor `undefined`, and `affectedDocuments` is `null` when none is given rather than `undefined` (except when there is an error of course).
### Deprecated
- Formally deprecate giving a string as argument to the `Datastore` constructor
- Formally deprecate using `Persistence.getNWAppFilename()` and `options.nodeWebkitAppName`
## [2.2.0] - 2021-10-29
### Added
- Include a `"react-native"` version (heavily inspired from [react-native-local-mongdb](https://github.com/antoniopresto/react-native-local-mongodb)).

@ -12,9 +12,9 @@ and maintain it for the needs of [Seald](https://www.seald.io).
browsers, 100% JavaScript, no binary dependency**. API is a subset of MongoDB's
and it's [plenty fast](#speed).
## Installation, tests
## Installation
Module name on npm is `@seald-io/nedb`.
Module name on npm is [`@seald-io/nedb`](https://www.npmjs.com/package/@seald-io/nedb).
```
npm install @seald-io/nedb

@ -0,0 +1,11 @@
'use strict'
module.exports = {
plugins: ['plugins/markdown'],
source: {
include: ['./lib']
},
opts: {
destination: './docs'
}
}

@ -4,12 +4,29 @@
const model = require('./model.js')
const { callbackify, promisify } = require('util')
/**
* @callback Cursor~execFn
* @param {?Error} err
* @param {?document[]|?document} res
*/
/**
* @callback Cursor~execFnAsync
* @param {?document[]|?document} res
* @return {Promise}
*/
/**
* @extends Promise
*/
class Cursor {
/**
* Create a new cursor for this collection
* @param {Datastore} db - The datastore this cursor is bound to
* @param {Query} query - The query this cursor will operate on
* @param {Function} execFn - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove
* @param {query} query - The query this cursor will operate on
* @param {Cursor~execFn|Cursor~execFnAsync} [execFn] - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove
* @param {boolean} [async = false] If true, specifies that the `execFn` is of type {@link Cursor~execFnAsync} rather than {@link Cursor~execFn}.
*
*/
constructor (db, query, execFn, async = false) {
this.db = db

@ -1,5 +1,5 @@
const { EventEmitter } = require('events')
const { callbackify } = require('util')
const { callbackify, deprecate } = require('util')
const Cursor = require('./cursor.js')
const customUtils = require('./customUtils.js')
const Executor = require('./executor.js')
@ -8,22 +8,147 @@ const model = require('./model.js')
const Persistence = require('./persistence.js')
const { isDate } = require('./utils.js')
/**
* Compaction event. Happens when the Datastore's Persistence has been compacted.
* It happens when calling `datastore.persistence.compactDatafile`, which is called periodically if you have called
* `datastore.persistence.setAutocompactionInterval`.
*
* @event Datastore#event:"compaction.done"
* @type {undefined}
*/
/**
* String comparison function.
* ```
* if (a < b) return -1
* if (a > b) return 1
* return 0
* ```
* @callback compareStrings
* @param {string} a
* @param {string} b
* @return {number}
*/
/**
* Generic document in NeDB.
* It consists of an Object with anything you want inside.
* @typedef document
* @property {?string} _id Internal `_id` of the document, which can be `null` at some points (when not inserted yet
* for example).
* @type {object.<string, *>}
*/
/**
* Nedb query.
*
* Each key of a query references a field name, which can use the dot-notation to reference subfields inside nested
* documents, arrays, arrays of subdocuments and to match a specific element of an array.
*
* Each value of a query can be one of the following:
* - `string`: matches all documents which have this string as value for the referenced field name
* - `number`: matches all documents which have this number as value for the referenced field name
* - `Regexp`: matches all documents which have a value that matches the given `Regexp` for the referenced field name
* - `object`: matches all documents which have this object as deep-value for the referenced field name
* - Comparison operators: the syntax is `{ field: { $op: value } }` where `$op` is any comparison operator:
* - `$lt`, `$lte`: less than, less than or equal
* - `$gt`, `$gte`: greater than, greater than or equal
* - `$in`: member of. `value` must be an array of values
* - `$ne`, `$nin`: not equal, not a member of
* - `$stat`: checks whether the document posses the property `field`. `value` should be true or false
* - `$regex`: checks whether a string is matched by the regular expression. Contrary to MongoDB, the use of
* `$options` with `$regex` is not supported, because it doesn't give you more power than regex flags. Basic
* queries are more readable so only use the `$regex` operator when you need to use another operator with it
* - `$size`: if the referenced filed is an Array, matches on the size of the array
* - `$elemMatch`: matches if at least one array element matches the sub-query entirely
* - Logical operators: You can combine queries using logical operators:
* - For `$or` and `$and`, the syntax is `{ $op: [query1, query2, ...] }`.
* - For `$not`, the syntax is `{ $not: query }`
* - For `$where`, the syntax is:
* ```
* { $where: function () {
* // object is 'this'
* // return a boolean
* } }
* ```
* @typedef query
* @type {object.<string, *>}
*/
/**
* Nedb projection.
*
* You can give `find` and `findOne` an optional second argument, `projections`.
* The syntax is the same as MongoDB: `{ a: 1, b: 1 }` to return only the `a`
* and `b` fields, `{ a: 0, b: 0 }` to omit these two fields. You cannot use both
* modes at the time, except for `_id` which is by default always returned and
* which you can choose to omit. You can project on nested documents.
*
* To reference subfields, you can use the dot-notation.
*
* @typedef projection
* @type {object.<string, 0|1>}
*/
/**
* The `beforeDeserialization`and `afterDeserialization` callbacks should
* @callback serializationHook
* @param {string} x
* @return {string}
*/
/**
* The `Datastore` class is the main class of NeDB.
* @extends EventEmitter
*/
class Datastore extends EventEmitter {
/**
* Create a new collection
* @param {String} [options.filename] Optional, datastore will be in-memory only if not provided
* @param {Boolean} [options.timestampData] Optional, defaults to false. If set to true, createdAt and updatedAt will be created and populated automatically (if not specified by user)
* @param {Boolean} [options.inMemoryOnly] Optional, defaults to false
* @param {String} [options.nodeWebkitAppName] Optional, specify the name of your NW app if you want options.filename to be relative to the directory where Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
* @param {Boolean} [options.autoload] Optional, defaults to false
* @param {Function} [options.onload] Optional, if autoload is used this will be called after the load database with the error object as parameter. If you don't pass it the error will be thrown
* @param {Function} [options.beforeDeserialization] Optional, serialization hooks
* @param {Function} [options.afterSerialization] Optional, serialization hooks
* @param {Number} [options.corruptAlertThreshold] Optional, threshold after which an alert is thrown if too much data is corrupt
* @param {Function} [options.compareStrings] Optional, string comparison function that overrides default for sorting
* Create a new collection, either persistent or in-memory.
*
* If you use a persistent datastore without the `autoload` option, you need to call `loadDatabase` manually. This
* function fetches the data from datafile and prepares the database. **Don't forget it!** If you use a persistent
* datastore, no command (insert, find, update, remove) will be executed before `loadDatabase` is called, so make sure
* to call it yourself or use the `autoload` option.
*
* Event Emitter - Events
* * compaction.done - Fired whenever a compaction operation was finished
* @param {object|string} options Can be an object or a string. If options is a string, the behavior is the same as in
* v0.6: it will be interpreted as `options.filename`. **Giving a string is deprecated, and will be removed in the
* next major version.**
* @param {string} [options.filename = null] Path to the file where the data is persisted. If left blank, the datastore is
* automatically considered in-memory only. It cannot end with a `~` which is used in the temporary files NeDB uses to
* perform crash-safe writes.
* @param {boolean} [options.inMemoryOnly = false] If set to true, no data will be written in storage.
* @param {boolean} [options.timestampData = false] If set to true, createdAt and updatedAt will be created and
* populated automatically (if not specified by user)
* @param {boolean} [options.autoload = false] If used, the database will automatically be loaded from the datafile
* upon creation (you don't need to call `loadDatabase`). Any command issued before load is finished is buffered and
* will be executed when load is done. When autoloading is done, you can either use the `onload` callback, or you can
* use `this.autoloadPromise` which resolves (or rejects) when autloading is done.
* @param {function} [options.onload] If you use autoloading, this is the handler called after the `loadDatabase`. It
* takes one `error` argument. If you use autoloading without specifying this handler, and an error happens during
* load, an error will be thrown.
* @param {function} [options.beforeDeserialization] Hook you can use to transform data after it was serialized and
* before it is written to disk. Can be used for example to encrypt data before writing database to disk. This
* function takes a string as parameter (one line of an NeDB data file) and outputs the transformed string, **which
* must absolutely not contain a `\n` character** (or data will be lost).
* @param {function} [options.afterSerialization] Inverse of `afterSerialization`. Make sure to include both and not
* just one, or you risk data loss. For the same reason, make sure both functions are inverses of one another. Some
* failsafe mechanisms are in place to prevent data loss if you misuse the serialization hooks: NeDB checks that never
* one is declared without the other, and checks that they are reverse of one another by testing on random strings of
* various lengths. In addition, if too much data is detected as corrupt, NeDB will refuse to start as it could mean
* you're not using the deserialization hook corresponding to the serialization hook used before.
* @param {number} [options.corruptAlertThreshold = 0.1] Between 0 and 1, defaults to 10%. NeDB will refuse to start
* if more than this percentage of the datafile is corrupt. 0 means you don't tolerate any corruption, 1 means you
* don't care.
* @param {compareStrings} [options.compareStrings] If specified, it overrides default string comparison which is not
* well adapted to non-US characters in particular accented letters. Native `localCompare` will most of the time be
* the right choice.
* @param {string} [options.nodeWebkitAppName] **Deprecated:** if you are using NeDB from whithin a Node Webkit app,
* specify its name (the same one you use in the `package.json`) in this field and the `filename` will be relative to
* the directory Node Webkit uses to store the rest of the application's data (local storage etc.). It works on Linux,
* OS X and Windows. Now that you can use `require('nw.gui').App.dataPath` in Node Webkit to get the path to the data
* directory for your application, you should not use this option anymore and it will be removed.
*
* @fires Datastore#event:"compaction.done"
*/
constructor (options) {
super()
@ -31,18 +156,42 @@ class Datastore extends EventEmitter {
// Retrocompatibility with v0.6 and before
if (typeof options === 'string') {
deprecate(() => {
filename = options
this.inMemoryOnly = false // Default
}, 'Giving a string to the Datastore constructor is deprecated and will be removed in the next version. Please use an options object with an argument \'filename\'.')()
} else {
options = options || {}
filename = options.filename
/**
* Determines if the `Datastore` keeps data in-memory, or if it saves it in storage. Is not read after
* instanciation.
* @type {boolean}
* @private
*/
this.inMemoryOnly = options.inMemoryOnly || false
/**
* Determines if the `Datastore` should autoload the database upon instantiation. Is not read after instanciation.
* @type {boolean}
* @private
*/
this.autoload = options.autoload || false
/**
* Determines if the `Datastore` should add `createdAt` and `updatedAt` fields automatically if not set by the user.
* @type {boolean}
* @private
*/
this.timestampData = options.timestampData || false
}
// Determine whether in memory or persistent
if (!filename || typeof filename !== 'string' || filename.length === 0) {
/**
* If null, it means `inMemoryOnly` is `true`. The `filename` is the name given to the storage module. Is not read
* after instanciation.
* @type {?string}
* @private
*/
this.filename = null
this.inMemoryOnly = true
} else {
@ -50,9 +199,19 @@ class Datastore extends EventEmitter {
}
// String comparison function
/**
* Overrides default string comparison which is not well adapted to non-US characters in particular accented
* letters. Native `localCompare` will most of the time be the right choice
* @type {compareStrings}
* @private
*/
this.compareStrings = options.compareStrings
// Persistence handling
/**
* The `Persistence` instance for this `Datastore`.
* @type {Persistence}
*/
this.persistence = new Persistence({
db: this,
nodeWebkitAppName: options.nodeWebkitAppName,
@ -63,19 +222,40 @@ class Datastore extends EventEmitter {
// This new executor is ready if we don't use persistence
// If we do, it will only be ready once loadDatabase is called
/**
* The `Executor` instance for this `Datastore`. It is used in all methods exposed by the `Datastore`, any `Cursor`
* produced by the `Datastore` and by `this.persistence.compactDataFile` & `this.persistence.compactDataFileAsync`
* to ensure operations are performed sequentially in the database.
* @type {Executor}
*/
this.executor = new Executor()
if (this.inMemoryOnly) this.executor.ready = true
// Indexed by field name, dot notation can be used
// _id is always indexed and since _ids are generated randomly the underlying
// binary is always well-balanced
/**
* Indexed by field name, dot notation can be used.
* _id is always indexed and since _ids are generated randomly the underlying binary search tree is always well-balanced
* @type {Object.<string, Index>}
* @private
*/
this.indexes = {}
this.indexes._id = new Index({ fieldName: '_id', unique: true })
/**
* Stores the time to live (TTL) of the indexes created. The key represents the field name, the value the number of
* seconds after which data with this index field should be removed.
* @type {Object.<string, number>}
* @private
*/
this.ttlIndexes = {}
// Queue a load of the database right away and call the onload handler
// By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception
if (this.autoload) {
/**
* A Promise that resolves when the autoload has finished.
*
* The onload callback is not awaited by this Promise, it is started immediately after that.
* @type {Promise}
*/
this.autoloadPromise = this.loadDatabaseAsync()
this.autoloadPromise
.then(() => {
@ -88,18 +268,25 @@ class Datastore extends EventEmitter {
}
/**
* Load the database from the datafile, and trigger the execution of buffered commands if any
* Load the database from the datafile, and trigger the execution of buffered commands if any.
* @param {function} callback
*/
loadDatabase (...args) {
this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: args }, true)
loadDatabase (callback) {
this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: [callback] }, true)
}
loadDatabaseAsync (...args) {
return this.executor.pushAsync(() => this.persistence.loadDatabaseAsync(args), true)
/**
* Load the database from the datafile, and trigger the execution of buffered commands if any.
* @async
* @return {Promise}
*/
loadDatabaseAsync () {
return this.executor.pushAsync(() => this.persistence.loadDatabaseAsync(), true)
}
/**
* Get an array of all the data in the database
* @return {document[]}
*/
getAllData () {
return this.indexes._id.getAll()
@ -114,21 +301,41 @@ class Datastore extends EventEmitter {
}
}
/**
* @callback Datastore~ensureIndexCallback
* @param {?Error} err
*/
/**
* Ensure an index is kept for this field. Same parameters as lib/indexes
* For now this function is synchronous, we need to test how much time it takes
* We use an async API for consistency with the rest of the code
* @param {Object} options
* @param {String} options.fieldName
* @param {Boolean} [options.unique]
* @param {Boolean} [options.sparse]
* @param {Number} [options.expireAfterSeconds] - Optional, if set this index becomes a TTL index (only works on Date fields, not arrays of Date)
* @param {Function} callback Optional callback, signature: err
* This function acts synchronously on the indexes, however the persistence of the indexes is deferred with the
* executor.
* Previous versions said explicitly the callback was optional, it is now recommended setting one.
* @param {object} options
* @param {string} options.fieldName Name of the field to index. Use the dot notation to index a field in a nested document.
* @param {boolean} [options.unique = false] Enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
* @param {boolean} [options.sparse = false] don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
* @param {number} [options.expireAfterSeconds] - if set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored
* @param {Datastore~ensureIndexCallback} callback Callback, signature: err
*/
// TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state
ensureIndex (options = {}, callback = () => {}) {
callbackify(this.ensureIndexAsync.bind(this))(options, callback)
}
/**
* Ensure an index is kept for this field. Same parameters as lib/indexes
* This function acts synchronously on the indexes, however the persistence of the indexes is deferred with the
* executor.
* Previous versions said explicitly the callback was optional, it is now recommended setting one.
* @param {object} options
* @param {string} options.fieldName Name of the field to index. Use the dot notation to index a field in a nested document.
* @param {boolean} [options.unique = false] Enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
* @param {boolean} [options.sparse = false] Don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
* @param {number} [options.expireAfterSeconds] - If set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored
* @return {Promise<void>}
*/
// TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state
async ensureIndexAsync (options = {}) {
if (!options.fieldName) {
const err = new Error('Cannot create an index without a fieldName')
@ -151,16 +358,31 @@ class Datastore extends EventEmitter {
await this.persistence.persistNewStateAsync([{ $$indexCreated: options }])
}
/**
* @callback Datastore~removeIndexCallback
* @param {?Error} err
*/
/**
* Remove an index
* @param {String} fieldName
* @param {Function} callback Optional callback, signature: err
* Previous versions said explicitly the callback was optional, it is now recommended setting one.
* @param {string} fieldName Field name of the index to remove. Use the dot notation to remove an index referring to a
* field in a nested document.
* @param {Datastore~removeIndexCallback} callback Optional callback, signature: err
*/
// TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state
removeIndex (fieldName, callback = () => {}) {
callbackify(this.removeIndexAsync.bind(this))(fieldName, callback)
}
/**
* Remove an index
* Previous versions said explicitly the callback was optional, it is now recommended setting one.
* @param {string} fieldName Field name of the index to remove. Use the dot notation to remove an index referring to a
* field in a nested document.
* @return {Promise<void>}
*/
// TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state
async removeIndexAsync (fieldName) {
delete this.indexes[fieldName]
@ -169,6 +391,8 @@ class Datastore extends EventEmitter {
/**
* Add one or several document(s) to all indexes
* @param {document} doc
* @private
*/
addToIndexes (doc) {
let failingIndex
@ -197,6 +421,7 @@ class Datastore extends EventEmitter {
/**
* Remove one or several document(s) from all indexes
* @param {document} doc
*/
removeFromIndexes (doc) {
for (const index of Object.values(this.indexes)) {
@ -208,6 +433,10 @@ class Datastore extends EventEmitter {
* Update one or several documents in all indexes
* To update multiple documents, oldDoc must be an array of { oldDoc, newDoc } pairs
* If one update violates a constraint, all changes are rolled back
* @param {document|Array.<{oldDoc: document, newDoc: document}>} oldDoc Document to update, or an `Array` of
* `{oldDoc, newDoc}` pairs.
* @param {document} [newDoc] Document to replace the oldDoc with. If the first argument is an `Array` of
* `{oldDoc, newDoc}` pairs, this second argument is ignored.
*/
updateIndexes (oldDoc, newDoc) {
let failingIndex
@ -234,6 +463,13 @@ class Datastore extends EventEmitter {
}
}
/**
* Get all candidate documents matching the query, regardless of their expiry status.
* @param {query} query
* @return {document[]}
*
* @private
*/
_getCandidates (query) {
const indexNames = Object.keys(this.indexes)
// STEP 1: get candidates list by checking indexes from most to least frequent usecase
@ -266,6 +502,12 @@ class Datastore extends EventEmitter {
return this.getAllData()
}
/**
* @callback Datastore~getCandidatesCallback
* @param {?Error} err
* @param {?document[]} candidates
*/
/**
* Return the list of candidates for a given query
* Crude implementation for now, we return the candidates given by the first usable index if any
@ -275,9 +517,12 @@ class Datastore extends EventEmitter {
*
* Returned candidates will be scanned to find and remove all expired documents
*
* @param {Query} query
* @param {Boolean} dontExpireStaleDocs Optional, defaults to false, if true don't remove stale docs. Useful for the remove function which shouldn't be impacted by expirations
* @param {Function} callback Signature err, candidates
* @param {query} query
* @param {boolean|function} [dontExpireStaleDocs = false] If true don't remove stale docs. Useful for the remove
* function which shouldn't be impacted by expirations. If argument is not given, it is used as the callback.
* @param {Datastore~getCandidatesCallback} callback Signature err, candidates
*
* @private
*/
getCandidates (query, dontExpireStaleDocs, callback) {
if (typeof dontExpireStaleDocs === 'function') {
@ -288,6 +533,22 @@ class Datastore extends EventEmitter {
callbackify(this.getCandidatesAsync.bind(this))(query, dontExpireStaleDocs, callback)
}
/**
* Return the list of candidates for a given query
* Crude implementation for now, we return the candidates given by the first usable index if any
* We try the following query types, in this order: basic match, $in match, comparison match
* One way to make it better would be to enable the use of multiple indexes if the first usable index
* returns too much data. I may do it in the future.
*
* Returned candidates will be scanned to find and remove all expired documents
*
* @param {query} query
* @param {boolean} [dontExpireStaleDocs = false] If true don't remove stale docs. Useful for the remove function
* which shouldn't be impacted by expirations.
* @return {Promise<document[]>} candidates
*
* @private
*/
async getCandidatesAsync (query, dontExpireStaleDocs = false) {
const validDocs = []
@ -309,11 +570,17 @@ class Datastore extends EventEmitter {
return validDocs
}
/**
* @callback Datastore~insertCallback
* @param {?Error} err
* @param {?document} insertedDoc
*/
/**
* Insert a new document
* Private Use Datastore.insert which has the same signature
* @param {Document} newDoc
* @param {Function} callback Optional callback, signature: err, insertedDoc
* @param {?document} newDoc
* @param {Datastore~insertCallback} callback Optional callback, signature: err, insertedDoc
*
* @private
*/
@ -321,6 +588,13 @@ class Datastore extends EventEmitter {
return callbackify(this._insertAsync.bind(this))(newDoc, callback)
}
/**
* Insert a new document
* Private Use Datastore.insertAsync which has the same signature
* @param {document} newDoc
* @return {Promise<document>}
* @private
*/
async _insertAsync (newDoc) {
const preparedDoc = this._prepareDocumentForInsertion(newDoc)
this._insertInCache(preparedDoc)
@ -331,6 +605,7 @@ class Datastore extends EventEmitter {
/**
* Create a new _id that's not already in use
* @return {string} id
* @private
*/
_createNewId () {
@ -343,6 +618,8 @@ class Datastore extends EventEmitter {
/**
* Prepare a document (or array of documents) to be inserted in a database
* Meaning adds _id and timestamps if necessary on a copy of newDoc to avoid any side effect on user input
* @param {document|document[]} newDoc document, or Array of documents, to prepare
* @return {document|document[]} prepared document, or Array of prepared documents
* @private
*/
_prepareDocumentForInsertion (newDoc) {
@ -365,6 +642,7 @@ class Datastore extends EventEmitter {
/**
* If newDoc is an array of documents, this will insert all documents in the cache
* @param {document|document[]} preparedDoc
* @private
*/
_insertInCache (preparedDoc) {
@ -375,6 +653,7 @@ class Datastore extends EventEmitter {
/**
* If one insertion fails (e.g. because of a unique constraint), roll back all previous
* inserts and throws the error
* @param {document[]} preparedDocs
* @private
*/
_insertMultipleDocsInCache (preparedDocs) {
@ -400,18 +679,40 @@ class Datastore extends EventEmitter {
}
}
/**
* Insert a new document
* Private Use Datastore.insert which has the same signature
* @param {document} newDoc
* @param {Datastore~insertCallback} callback Optional callback, signature: err, insertedDoc
*
* @private
*/
insert (...args) {
this.executor.push({ this: this, fn: this._insert, arguments: args })
}
/**
* Insert a new document
* Private Use Datastore.insertAsync which has the same signature
* @param {document} newDoc
* @return {Promise<document>}
* @async
*/
insertAsync (...args) {
return this.executor.pushAsync(() => this._insertAsync(...args))
}
/**
* @callback Datastore~countCallback
* @param {?Error} err
* @param {?number} count
*/
/**
* Count all documents matching the query
* @param {Query} query MongoDB-style query
* @param {Function} callback Optional callback, signature: err, count
* @param {query} query MongoDB-style query
* @param {Datastore~countCallback} [callback] If given, the function will return undefined, otherwise it will return the Cursor.
* @return {Cursor<number>|undefined}
*/
count (query, callback) {
const cursor = this.countAsync(query)
@ -420,16 +721,30 @@ class Datastore extends EventEmitter {
else return cursor
}
/**
* Count all documents matching the query
* @param {query} query MongoDB-style query
* @return {Cursor<number>} count
* @async
*/
countAsync (query) {
return new Cursor(this, query, async docs => docs.length, true) // this is a trick, Cursor itself is a thenable, which allows to await it
}
/**
* @callback Datastore~findCallback
* @param {?Error} err
* @param {document[]} docs
*/
/**
* Find all documents matching the query
* If no callback is passed, we return the cursor so that user can limit, skip and finally exec
* @param {Object} query MongoDB-style query
* @param {Object} projection MongoDB-style projection
* @param {Function} callback Optional callback, signature: err, docs
* @param {query} query MongoDB-style query
* @param {projection|Datastore~findCallback} [projection = {}] MongoDB-style projection. If not given, will be
* interpreted as the callback.
* @param {Datastore~findCallback} [callback] Optional callback, signature: err, docs
* @return {Cursor<document[]>|undefined}
*/
find (query, projection, callback) {
if (arguments.length === 1) {
@ -448,6 +763,14 @@ class Datastore extends EventEmitter {
else return cursor
}
/**
* Find all documents matching the query
* If no callback is passed, we return the cursor so that user can limit, skip and finally exec
* @param {query} query MongoDB-style query
* @param {projection} [projection = {}] MongoDB-style projection
* @return {Cursor<document[]>}
* @async
*/
findAsync (query, projection = {}) {
const cursor = new Cursor(this, query, docs => docs.map(doc => model.deepCopy(doc)), true)
@ -455,11 +778,18 @@ class Datastore extends EventEmitter {
return cursor
}
/**
* @callback Datastore~findOneCallback
* @param {?Error} err
* @param {document} doc
*/
/**
* Find one document matching the query
* @param {Object} query MongoDB-style query
* @param {Object} projection MongoDB-style projection
* @param {Function} callback Optional callback, signature: err, doc
* @param {query} query MongoDB-style query
* @param {projection} projection MongoDB-style projection
* @param {Datastore~findOneCallback} callback Optional callback, signature: err, doc
* @return {Cursor<document>|undefined}
*/
findOne (query, projection, callback) {
if (arguments.length === 1) {
@ -478,6 +808,12 @@ class Datastore extends EventEmitter {
else return cursor
}
/**
* Find one document matching the query
* @param {query} query MongoDB-style query
* @param {projection} projection MongoDB-style projection
* @return {Cursor<document>}
*/
findOneAsync (query, projection = {}) {
const cursor = new Cursor(this, query, docs => docs.length === 1 ? model.deepCopy(docs[0]) : null, true)
@ -485,33 +821,52 @@ class Datastore extends EventEmitter {
return cursor
}
/**
* If update was an upsert, `upsert` flag is set to true, `affectedDocuments` can be one of the following:
* - For an upsert, the upserted document
* - For an update with returnUpdatedDocs option false, null
* - For an update with returnUpdatedDocs true and multi false, the updated document
* - For an update with returnUpdatedDocs true and multi true, the array of updated documents
*
* **WARNING:** The API was changed between v1.7.4 and v1.8, for consistency and readability reasons. Prior and
* including to v1.7.4, the callback signature was (err, numAffected, updated) where updated was the updated document
* in case of an upsert or the array of updated documents for an update if the returnUpdatedDocs option was true. That
* meant that the type of affectedDocuments in a non multi update depended on whether there was an upsert or not,
* leaving only two ways for the user to check whether an upsert had occured: checking the type of affectedDocuments
* or running another find query on the whole dataset to check its size. Both options being ugly, the breaking change
* was necessary.
* @callback Datastore~updateCallback
* @param {?Error} err
* @param {?number} numAffected
* @param {?document[]|?document} affectedDocuments
* @param {?boolean} upsert
*/
/**
* Update all docs matching query.
* Use Datastore.update which has the same signature
* @param {Object} query
* @param {Object} updateQuery
* @param {Object} options Optional options
* options.multi If true, can update multiple documents (defaults to false)
* options.upsert If true, document is inserted if the query doesn't match anything
* options.returnUpdatedDocs Defaults to false, if true return as third argument the array of updated matched documents (even if no change actually took place)
* @param {Function} cb Optional callback, signature: (err, numAffected, affectedDocuments, upsert)
* If update was an upsert, upsert flag is set to true
* affectedDocuments can be one of the following:
* * For an upsert, the upserted document
* * For an update with returnUpdatedDocs option false, null
* * For an update with returnUpdatedDocs true and multi false, the updated document
* * For an update with returnUpdatedDocs true and multi true, the array of updated documents
*
* WARNING: The API was changed between v1.7.4 and v1.8, for consistency and readability reasons. Prior and including to v1.7.4,
* the callback signature was (err, numAffected, updated) where updated was the updated document in case of an upsert
* or the array of updated documents for an update if the returnUpdatedDocs option was true. That meant that the type of
* affectedDocuments in a non multi update depended on whether there was an upsert or not, leaving only two ways for the
* user to check whether an upsert had occured: checking the type of affectedDocuments or running another find query on
* the whole dataset to check its size. Both options being ugly, the breaking change was necessary.
* @param {query} query is the same kind of finding query you use with `find` and `findOne`
* @param {document|update} update specifies how the documents should be modified. It is either a new document or a
* set of modifiers (you cannot use both together, it doesn't make sense!):
* - A new document will replace the matched docs
* - The modifiers create the fields they need to modify if they don't exist, and you can apply them to subdocs.
* Available field modifiers are `$set` to change a field's value, `$unset` to delete a field, `$inc` to increment a
* field's value and `$min`/`$max` to change field's value, only if provided value is less/greater than current
* value. To work on arrays, you have `$push`, `$pop`, `$addToSet`, `$pull`, and the special `$each` and `$slice`.
* @param {object|Datastore~updateCallback} [options] Optional options. If not given, is interpreted as the callback.
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {boolean} [options.upsert = false] If true, can insert a new document corresponding to the `update` rules if
* your `query` doesn't match anything. If your `update` is a simple object with no modifiers, it is the inserted
* document. In the other case, the `query` is stripped from all operator recursively, and the `update` is applied to
* it.
* @param {boolean} [options.returnUpdatedDocs = false] (not Mongo-DB compatible) If true and update is not an upsert,
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
* @param {Datastore~updateCallback} [cb] Optional callback
*
* @private
*/
_update (query, updateQuery, options, cb) {
_update (query, update, options, cb) {
if (typeof options === 'function') {
cb = options
options = {}
@ -521,10 +876,35 @@ class Datastore extends EventEmitter {
const _callback = (err, res = {}) => {
callback(err, res.numAffected, res.affectedDocuments, res.upsert)
}
callbackify(this._updateAsync.bind(this))(query, updateQuery, options, _callback)
callbackify(this._updateAsync.bind(this))(query, update, options, _callback)
}
async _updateAsync (query, updateQuery, options = {}) {
/**
* Update all docs matching query.
* Use Datastore.updateAsync which has the same signature
* @param {query} query is the same kind of finding query you use with `find` and `findOne`
* @param {document|update} update specifies how the documents should be modified. It is either a new document or a
* set of modifiers (you cannot use both together, it doesn't make sense!):
* - A new document will replace the matched docs
* - The modifiers create the fields they need to modify if they don't exist, and you can apply them to subdocs.
* Available field modifiers are `$set` to change a field's value, `$unset` to delete a field, `$inc` to increment a
* field's value and `$min`/`$max` to change field's value, only if provided value is less/greater than current
* value. To work on arrays, you have `$push`, `$pop`, `$addToSet`, `$pull`, and the special `$each` and `$slice`.
* @param {Object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {boolean} [options.upsert = false] If true, can insert a new document corresponding to the `update` rules if
* your `query` doesn't match anything. If your `update` is a simple object with no modifiers, it is the inserted
* document. In the other case, the `query` is stripped from all operator recursively, and the `update` is applied to
* it.
* @param {boolean} [options.returnUpdatedDocs = false] (not Mongo-DB compatible) If true and update is not an upsert,
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
*
* @return {Promise<{numAffected: number, affectedDocuments: document[]|document, upsert: boolean}>}
*
* @private
*/
async _updateAsync (query, update, options = {}) {
const multi = options.multi !== undefined ? options.multi : false
const upsert = options.upsert !== undefined ? options.upsert : false
@ -539,13 +919,13 @@ class Datastore extends EventEmitter {
let toBeInserted
try {
model.checkObject(updateQuery)
model.checkObject(update)
// updateQuery is a simple object with no modifier, use it as the document to insert
toBeInserted = updateQuery
toBeInserted = update
} catch (e) {
// updateQuery contains modifiers, use the find query as the base,
// strip it from all operators and update it according to updateQuery
toBeInserted = model.modify(model.deepCopy(query, true), updateQuery)
toBeInserted = model.modify(model.deepCopy(query, true), update)
}
const newDoc = await this._insertAsync(toBeInserted)
return { numAffected: 1, affectedDocuments: newDoc, upsert: true }
@ -564,7 +944,7 @@ class Datastore extends EventEmitter {
if (model.match(candidate, query) && (multi || numReplaced === 0)) {
numReplaced += 1
if (this.timestampData) { createdAt = candidate.createdAt }
modifiedDoc = model.modify(candidate, updateQuery)
modifiedDoc = model.modify(candidate, update)
if (this.timestampData) {
modifiedDoc.createdAt = createdAt
modifiedDoc.updatedAt = new Date()
@ -579,31 +959,81 @@ class Datastore extends EventEmitter {
// Update the datafile
const updatedDocs = modifications.map(x => x.newDoc)
await this.persistence.persistNewStateAsync(updatedDocs)
if (!options.returnUpdatedDocs) return { numAffected: numReplaced }
if (!options.returnUpdatedDocs) return { numAffected: numReplaced, upsert: false, affectedDocuments: null }
else {
let updatedDocsDC = []
updatedDocs.forEach(doc => { updatedDocsDC.push(model.deepCopy(doc)) })
if (!multi) updatedDocsDC = updatedDocsDC[0]
return { numAffected: numReplaced, affectedDocuments: updatedDocsDC }
return { numAffected: numReplaced, affectedDocuments: updatedDocsDC, upsert: false }
}
}
/**
* Update all docs matching query.
* @param {query} query is the same kind of finding query you use with `find` and `findOne`
* @param {document|update} update specifies how the documents should be modified. It is either a new document or a
* set of modifiers (you cannot use both together, it doesn't make sense!):
* - A new document will replace the matched docs
* - The modifiers create the fields they need to modify if they don't exist, and you can apply them to subdocs.
* Available field modifiers are `$set` to change a field's value, `$unset` to delete a field, `$inc` to increment a
* field's value and `$min`/`$max` to change field's value, only if provided value is less/greater than current
* value. To work on arrays, you have `$push`, `$pop`, `$addToSet`, `$pull`, and the special `$each` and `$slice`.
* @param {Object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {boolean} [options.upsert = false] If true, can insert a new document corresponding to the `update` rules if
* your `query` doesn't match anything. If your `update` is a simple object with no modifiers, it is the inserted
* document. In the other case, the `query` is stripped from all operator recursively, and the `update` is applied to
* it.
* @param {boolean} [options.returnUpdatedDocs = false] (not Mongo-DB compatible) If true and update is not an upsert,
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
* @param {Datastore~updateCallback} [cb] Optional callback
*
*/
update (...args) {
this.executor.push({ this: this, fn: this._update, arguments: args })
}
/**
* Update all docs matching query.
* @param {query} query is the same kind of finding query you use with `find` and `findOne`
* @param {document|update} update specifies how the documents should be modified. It is either a new document or a
* set of modifiers (you cannot use both together, it doesn't make sense!):
* - A new document will replace the matched docs
* - The modifiers create the fields they need to modify if they don't exist, and you can apply them to subdocs.
* Available field modifiers are `$set` to change a field's value, `$unset` to delete a field, `$inc` to increment a
* field's value and `$min`/`$max` to change field's value, only if provided value is less/greater than current
* value. To work on arrays, you have `$push`, `$pop`, `$addToSet`, `$pull`, and the special `$each` and `$slice`.
* @param {Object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {boolean} [options.upsert = false] If true, can insert a new document corresponding to the `update` rules if
* your `query` doesn't match anything. If your `update` is a simple object with no modifiers, it is the inserted
* document. In the other case, the `query` is stripped from all operator recursively, and the `update` is applied to
* it.
* @param {boolean} [options.returnUpdatedDocs = false] (not Mongo-DB compatible) If true and update is not an upsert,
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
* @async
* @return {Promise<{numAffected: number, affectedDocuments: document[]|document, upsert: boolean}>}
*/
updateAsync (...args) {
return this.executor.pushAsync(() => this._updateAsync(...args))
}
/**
* @callback Datastore~removeCallback
* @param {?Error} err
* @param {?number} numRemoved
*/
/**
* Remove all docs matching the query.
* Use Datastore.remove which has the same signature
* For now very naive implementation (similar to update)
* @param {Object} query
* @param {Object} options Optional options
* options.multi If true, can update multiple documents (defaults to false)
* @param {Function} cb Optional callback, signature: err, numRemoved
* @param {query} query
* @param {object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {Datastore~removeCallback} [cb]
*
* @private
*/
@ -617,6 +1047,15 @@ class Datastore extends EventEmitter {
callbackify(this._removeAsync.bind(this))(query, options, callback)
}
/**
* Remove all docs matching the query.
* Use Datastore.removeAsync which has the same signature
* @param {query} query
* @param {object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @return {Promise<number>} How many documents were removed
* @private
*/
async _removeAsync (query, options = {}) {
const multi = options.multi !== undefined ? options.multi : false
@ -636,10 +1075,26 @@ class Datastore extends EventEmitter {
return numRemoved
}
/**
* Remove all docs matching the query.
* @param {query} query
* @param {object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {Datastore~removeCallback} [cb] Optional callback, signature: err, numRemoved
*/
remove (...args) {
this.executor.push({ this: this, fn: this._remove, arguments: args })
}
/**
* Remove all docs matching the query.
* Use Datastore.removeAsync which has the same signature
* @param {query} query
* @param {object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @return {Promise<number>} How many documents were removed
* @async
*/
removeAsync (...args) {
return this.executor.pushAsync(() => this._removeAsync(...args))
}

@ -3,12 +3,17 @@ const model = require('./model.js')
const { uniq, isDate } = require('./utils.js')
/**
* Two indexed pointers are equal iif they point to the same place
* Two indexed pointers are equal if they point to the same place
* @param {*} a
* @param {*} b
* @return {boolean}
*/
const checkValueEquality = (a, b) => a === b
/**
* Type-aware projection
* @param {*} elt
* @return {string|*}
*/
function projectForUnique (elt) {
if (elt === null) return '$null'
@ -25,24 +30,45 @@ class Index {
* Create a new index
* All methods on an index guarantee that either the whole operation was successful and the index changed
* or the operation was unsuccessful and an error is thrown while the index is unchanged
* @param {String} options.fieldName On which field should the index apply (can use dot notation to index on sub fields)
* @param {Boolean} options.unique Optional, enforce a unique constraint (default: false)
* @param {Boolean} options.sparse Optional, allow a sparse index (we can have documents for which fieldName is undefined) (default: false)
* @param {object} options
* @param {string} options.fieldName On which field should the index apply (can use dot notation to index on sub fields)
* @param {boolean} [options.unique = false] Enforces a unique constraint
* @param {boolean} [options.sparse = false] Allows a sparse index (we can have documents for which fieldName is `undefined`)
*/
constructor (options) {
/**
* On which field the index applies to (may use dot notation to index on sub fields).
* @type {string}
*/
this.fieldName = options.fieldName
/**
* Defines if the index enforces a unique constraint for this index.
* @type {boolean}
*/
this.unique = options.unique || false
/**
* Defines if we can have documents for which fieldName is `undefined`
* @type {boolean}
*/
this.sparse = options.sparse || false
/**
* Options object given to the underlying BinarySearchTree.
* @type {{unique: boolean, checkValueEquality: (function(*, *): boolean), compareKeys: ((function(*, *, compareStrings): (number|number))|*)}}
*/
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality: checkValueEquality }
this.reset() // No data in the beginning
/**
* Underlying BinarySearchTree for this index. Uses an AVLTree for optimization.
* @type {AVLTree}
*/
this.tree = new BinarySearchTree(this.treeOptions)
}
/**
* Reset an index
* @param {Document or Array of documents} newData Optional, data to initialize the index with
* If an error is thrown during insertion, the index is not modified
* @param {?document|?document[]} [newData] Data to initialize the index with. If an error is thrown during
* insertion, the index is not modified.
*/
reset (newData) {
this.tree = new BinarySearchTree(this.treeOptions)
@ -54,6 +80,7 @@ class Index {
* Insert a new document in the index
* If an array is passed, we insert all its elements (if one insertion fails the index is not modified)
* O(log(n))
* @param {document|document[]} doc The document, or array of documents, to insert.
*/
insert (doc) {
let keys
@ -98,8 +125,8 @@ class Index {
/**
* Insert an array of documents in the index
* If a constraint is violated, the changes should be rolled back and an error thrown
*
* @API private
* @param {document[]} docs Array of documents to insert.
* @private
*/
insertMultipleDocs (docs) {
let error
@ -125,10 +152,11 @@ class Index {
}
/**
* Remove a document from the index
* Removes a document from the index.
* If an array is passed, we remove all its elements
* The remove operation is safe with regards to the 'unique' constraint
* O(log(n))
* @param {document[]|document} doc The document, or Array of documents, to remove.
*/
remove (doc) {
if (Array.isArray(doc)) {
@ -153,6 +181,10 @@ class Index {
* Update a document in the index
* If a constraint is violated, changes are rolled back and an error thrown
* Naive implementation, still in O(log(n))
* @param {document|Array.<{oldDoc: document, newDoc: document}>} oldDoc Document to update, or an `Array` of
* `{oldDoc, newDoc}` pairs.
* @param {document} [newDoc] Document to replace the oldDoc with. If the first argument is an `Array` of
* `{oldDoc, newDoc}` pairs, this second argument is ignored.
*/
update (oldDoc, newDoc) {
if (Array.isArray(oldDoc)) {
@ -174,7 +206,7 @@ class Index {
* Update multiple documents in the index
* If a constraint is violated, the changes need to be rolled back
* and an error thrown
* @param {Array<{ oldDoc: T, newDoc: T }>} pairs
* @param {Array.<{oldDoc: document, newDoc: document}>} pairs
*
* @private
*/
@ -212,6 +244,8 @@ class Index {
/**
* Revert an update
* @param {document|Array.<{oldDoc: document, newDoc: document}>} oldDoc Document to revert to, or an `Array` of `{oldDoc, newDoc}` pairs.
* @param {document} [newDoc] Document to revert from. If the first argument is an Array of {oldDoc, newDoc}, this second argument is ignored.
*/
revertUpdate (oldDoc, newDoc) {
const revert = []
@ -227,8 +261,8 @@ class Index {
/**
* Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things)
* @param {Thing} value Value to match the key against
* @return {Array of documents}
* @param {Array.<*>|*} value Value to match the key against
* @return {document[]}
*/
getMatching (value) {
if (!Array.isArray(value)) return this.tree.search(value)
@ -253,8 +287,12 @@ class Index {
/**
* Get all documents in index whose key is between bounds are they are defined by query
* Documents are sorted by key
* @param {Query} query
* @return {Array of documents}
* @param {object} query An object with at least one matcher among $gt, $gte, $lt, $lte.
* @param {*} [query.$gt] Greater than matcher.
* @param {*} [query.$gte] Greater than or equal matcher.
* @param {*} [query.$lt] Lower than matcher.
* @param {*} [query.$lte] Lower than or equal matcher.
* @return {document[]}
*/
getBetweenBounds (query) {
return this.tree.betweenBounds(query)
@ -262,7 +300,7 @@ class Index {
/**
* Get all elements in the index
* @return {Array of documents}
* @return {document[]}
*/
getAll () {
const res = []

@ -5,7 +5,7 @@
* * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err
*/
const path = require('path')
const { callbackify, promisify } = require('util')
const { callbackify, promisify, deprecate } = require('util')
const byline = require('./byline')
const customUtils = require('./customUtils.js')
const Index = require('./indexes.js')
@ -55,14 +55,9 @@ class Persistence {
// For NW apps, store data in the same directory where NW stores application data
if (this.filename && options.nodeWebkitAppName) {
console.log('==================================================================')
console.log('WARNING: The nodeWebkitAppName option is deprecated')
console.log('To get the path to the directory where Node Webkit stores the data')
console.log('for your app, use the internal nw.gui module like this')
console.log('require(\'nw.gui\').App.dataPath')
console.log('See https://github.com/rogerwang/node-webkit/issues/500')
console.log('==================================================================')
deprecate(() => {
this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename)
}, 'The nodeWebkitAppName option is deprecated and will be removed in the next version. To get the path to the directory where Node Webkit stores the data for your app, use the internal nw.gui module like this require(\'nw.gui\').App.dataPath See https://github.com/rogerwang/node-webkit/issues/500')()
}
}
@ -311,6 +306,7 @@ class Persistence {
* data for this application. Probably the best place to store data
*/
static getNWAppFilename (appName, relativeFilename) {
return deprecate(() => {
let home
if (process.platform === 'win32' || process.platform === 'win64') {
@ -328,6 +324,7 @@ class Persistence {
} else throw new Error(`Can't use the Node Webkit relative path for platform ${process.platform}`)
return path.join(home, 'nedb-data', relativeFilename)
}, 'The getNWAppFilename static method is deprecated and will be removed in the next version. To get the path to the directory where Node Webkit stores the data for your app, use the internal nw.gui module like this require(\'nw.gui\').App.dataPath See https://github.com/rogerwang/node-webkit/issues/500')()
}
}

@ -98,8 +98,7 @@ storage.writeFileLines = (filename, lines, callback = () => {}) => {
const readable = Readable.from(lines)
readable.on('data', (line) => {
try {
stream.write(line)
stream.write('\n')
stream.write(line + '\n')
} catch (err) {
callback(err)
}

11564
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -53,13 +53,14 @@
"events": "^3.3.0",
"jest": "^27.3.1",
"jquery": "^3.6.0",
"jsdoc": "^3.6.7",
"karma": "^6.3.2",
"karma-chai": "^0.1.0",
"karma-chrome-launcher": "^3.1.0",
"karma-junit-reporter": "^2.0.1",
"karma-mocha": "^2.0.1",
"karma-source-map-support": "^1.4.0",
"mocha": "^8.4.0",
"mocha": "^9.1.3",
"mocha-junit-reporter": "^2.0.0",
"path-browserify": "^1.0.1",
"process": "^0.11.10",
@ -84,7 +85,8 @@
"test:browser": "xvfb-maybe karma start karma.conf.local.js",
"test:react-native": "jest test/react-native",
"test:typings": "ts-node ./typings-tests.ts",
"prepublishOnly": "npm run build:browser"
"prepublishOnly": "npm run build:browser",
"generateDocs": "jsdoc -c jsdoc.conf.js"
},
"main": "index.js",
"browser": {

@ -760,7 +760,7 @@ describe('Database async', function () {
affectedDocuments: affectedDocumentsEmpty
} = await d.updateAsync({ impossible: 'db is empty anyway' }, { newDoc: true }, {})
assert.equal(numAffectedEmpty, 0)
assert.equal(affectedDocumentsEmpty, undefined)
assert.equal(affectedDocumentsEmpty, null)
const docsEmpty = await d.findAsync({})
assert.equal(docsEmpty.length, 0) // Default option for upsert is false
@ -1095,8 +1095,8 @@ describe('Database async', function () {
// returnUpdatedDocs set to false
const { numAffected, affectedDocuments, upsert } = await d.updateAsync({ a: 1 }, { $set: { b: 20 } }, {})
assert.equal(numAffected, 1)
assert.equal(affectedDocuments, undefined)
assert.equal(upsert, undefined)
assert.equal(affectedDocuments, null)
assert.equal(upsert, false)
// returnUpdatedDocs set to true
const {
@ -1107,7 +1107,7 @@ describe('Database async', function () {
assert.equal(numAffected2, 1)
assert.equal(affectedDocuments2.a, 1)
assert.equal(affectedDocuments2.b, 21)
assert.equal(upsert2, undefined)
assert.equal(upsert2, false)
})
it('Regular update, multi true', async () => {
@ -1121,8 +1121,8 @@ describe('Database async', function () {
upsert
} = await d.updateAsync({}, { $set: { b: 20 } }, { multi: true })
assert.equal(numAffected, 2)
assert.equal(affectedDocuments, undefined)
assert.equal(upsert, undefined)
assert.equal(affectedDocuments, null)
assert.equal(upsert, false)
// returnUpdatedDocs set to true
const {
@ -1135,7 +1135,7 @@ describe('Database async', function () {
})
assert.equal(numAffected2, 2)
assert.equal(affectedDocuments2.length, 2)
assert.equal(upsert2, undefined)
assert.equal(upsert2, false)
})
it('Upsert', async () => {
@ -1145,8 +1145,8 @@ describe('Database async', function () {
// Upsert flag not set
const { numAffected, affectedDocuments, upsert } = await d.updateAsync({ a: 3 }, { $set: { b: 20 } }, {})
assert.equal(numAffected, 0)
assert.equal(affectedDocuments, undefined)
assert.equal(upsert, undefined)
assert.equal(affectedDocuments, null)
assert.equal(upsert, false)
// Upsert flag set
const {

@ -1293,10 +1293,10 @@ describe('Database', function () {
describe('Upserts', function () {
it('Can perform upserts if needed', function (done) {
d.update({ impossible: 'db is empty anyway' }, { newDoc: true }, {}, function (err, nr, upsert) {
d.update({ impossible: 'db is empty anyway' }, { newDoc: true }, {}, function (err, nr, affectedDocuments) {
assert.isNull(err)
nr.should.equal(0)
assert.isUndefined(upsert)
assert.isNull(affectedDocuments)
// eslint-disable-next-line node/handle-callback-err
d.find({}, function (err, docs) {
@ -1791,8 +1791,8 @@ describe('Database', function () {
d.update({ a: 1 }, { $set: { b: 20 } }, {}, function (err, numAffected, affectedDocuments, upsert) {
assert.isNull(err)
numAffected.should.equal(1)
assert.isUndefined(affectedDocuments)
assert.isUndefined(upsert)
assert.isNull(affectedDocuments)
assert.isFalse(upsert)
// returnUpdatedDocs set to true
d.update({ a: 1 }, { $set: { b: 21 } }, { returnUpdatedDocs: true }, function (err, numAffected, affectedDocuments, upsert) {
@ -1800,7 +1800,7 @@ describe('Database', function () {
numAffected.should.equal(1)
affectedDocuments.a.should.equal(1)
affectedDocuments.b.should.equal(21)
assert.isUndefined(upsert)
assert.isFalse(upsert)
done()
})
@ -1815,8 +1815,8 @@ describe('Database', function () {
d.update({}, { $set: { b: 20 } }, { multi: true }, function (err, numAffected, affectedDocuments, upsert) {
assert.isNull(err)
numAffected.should.equal(2)
assert.isUndefined(affectedDocuments)
assert.isUndefined(upsert)
assert.isNull(affectedDocuments)
assert.isFalse(upsert)
// returnUpdatedDocs set to true
d.update({}, { $set: { b: 21 } }, {
@ -1826,7 +1826,7 @@ describe('Database', function () {
assert.isNull(err)
numAffected.should.equal(2)
affectedDocuments.length.should.equal(2)
assert.isUndefined(upsert)
assert.isFalse(upsert)
done()
})
@ -1841,8 +1841,8 @@ describe('Database', function () {
d.update({ a: 3 }, { $set: { b: 20 } }, {}, function (err, numAffected, affectedDocuments, upsert) {
assert.isNull(err)
numAffected.should.equal(0)
assert.isUndefined(affectedDocuments)
assert.isUndefined(upsert)
assert.isNull(affectedDocuments)
assert.isFalse(upsert)
// Upsert flag set
d.update({ a: 3 }, { $set: { b: 21 } }, { upsert: true }, function (err, numAffected, affectedDocuments, upsert) {

@ -65,7 +65,7 @@ describe('Persistence async', function () {
model.serialize({ _id: '3', nested: { today: now } })
const treatedData = d.persistence.treatRawData(rawData).data
treatedData.sort(function (a, b) { return a._id - b._id })
treatedData.sort((a, b) => a._id - b._id)
assert.equal(treatedData.length, 2)
assert.deepEqual(treatedData[0], { _id: '1', a: 2, ages: [1, 5, 12] })
assert.deepEqual(treatedData[1], { _id: '3', nested: { today: now } })

Loading…
Cancel
Save