massive update of the JSDoc, rewrite of the model & storage modules to clarify

pull/11/head
Timothée Rebours 3 years ago
parent 6d43867c20
commit 0e9e5c7f4d
  1. 5
      browser-version/lib/customUtils.js
  2. 183
      browser-version/lib/storage.browser.js
  3. 174
      browser-version/lib/storage.react-native.js
  4. 33
      index.d.ts
  5. 11
      jsdoc.conf.js
  6. 6
      lib/cursor.js
  7. 6
      lib/customUtils.js
  8. 119
      lib/datastore.js
  9. 15
      lib/executor.js
  10. 8
      lib/indexes.js
  11. 563
      lib/model.js
  12. 48
      lib/persistence.js
  13. 289
      lib/storage.js
  14. 10
      lib/utils.js
  15. 326
      package-lock.json
  16. 4
      package.json

@ -1,5 +1,7 @@
/**
* Specific customUtils for the browser, where we don't have access to the Crypto and Buffer modules
* Utility functions that need to be reimplemented for each environment.
* This is the version for the browser & React-Native
* @module customUtilsBrowser
*/
/**
@ -66,6 +68,7 @@ const byteArrayToBase64 = uint8 => {
* See http://en.wikipedia.org/wiki/Birthday_problem
* @param {number} len
* @return {string}
* @alias module:customUtilsNode.uid
*/
const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len)

@ -1,13 +1,14 @@
/**
* Way data is stored for this database
* For a Node.js/Node Webkit database it's the file system
* For a browser-side database it's localforage, which uses the best backend available (IndexedDB then WebSQL then localStorage)
* For a react-native database, we use @react-native-async-storage/async-storage
* For a browser-side database it's localforage which chooses the best option depending on user browser (IndexedDB then WebSQL then localStorage)
*
* This version is the browser version
* @module storageBrowser
*/
const localforage = require('localforage')
const { callbackify } = require('util')
const { callbackify } = require('util') // TODO: util is not a dependency, this would fail if util is not polyfilled
// Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name
const store = localforage.createInstance({
@ -15,9 +16,16 @@ const store = localforage.createInstance({
storeName: 'nedbdata'
})
const existsAsync = async filename => {
/**
* Returns Promise<true> if file exists
* @param {string} file
* @return {Promise<boolean>}
* @async
* @alias module:storageBrowser.existsAsync
*/
const existsAsync = async file => {
try {
const value = await store.getItem(filename)
const value = await store.getItem(file)
if (value !== null) return true // Even if value is undefined, localforage returns null
return false
} catch (error) {
@ -25,34 +33,91 @@ const existsAsync = async filename => {
}
}
/**
* @callback module:storageBrowser~existsCallback
* @param {boolean} exists
*/
/**
* Callback returns true if file exists
* @function
* @param {string} file
* @param {module:storageBrowser~existsCallback} cb
* @alias module:storageBrowser.exists
*/
const exists = callbackify(existsAsync)
const renameAsync = async (filename, newFilename) => {
/**
* Moves the item from one path to another
* @param {string} oldPath
* @param {string} newPath
* @return {Promise<void>}
* @alias module:storageBrowser.renameAsync
* @async
*/
const renameAsync = async (oldPath, newPath) => {
try {
const value = await store.getItem(filename)
if (value === null) await store.removeItem(newFilename)
const value = await store.getItem(oldPath)
if (value === null) await store.removeItem(newPath)
else {
await store.setItem(newFilename, value)
await store.removeItem(filename)
await store.setItem(newPath, value)
await store.removeItem(oldPath)
}
} catch (err) {
console.warn('An error happened while renaming, skip')
}
}
/**
* Moves the item from one path to another
* @function
* @param {string} oldPath
* @param {string} newPath
* @param {NoParamCallback} c
* @return {void}
* @alias module:storageBrowser.rename
*/
const rename = callbackify(renameAsync)
const writeFileAsync = async (filename, contents, options) => {
/**
* Saves the item at given path
* @param {string} file
* @param {string} data
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storageBrowser.writeFileAsync
* @async
*/
const writeFileAsync = async (file, data, options) => {
// Options do not matter in browser setup
try {
await store.setItem(filename, contents)
await store.setItem(file, data)
} catch (error) {
console.warn('An error happened while writing, skip')
}
}
/**
* Saves the item at given path
* @function
* @param {string} path
* @param {string} data
* @param {object} options
* @param {function} callback
* @alias module:storageBrowser.writeFile
*/
const writeFile = callbackify(writeFileAsync)
/**
* Append to the item at given path
* @function
* @param {string} filename
* @param {string} toAppend
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storageBrowser.appendFileAsync
* @async
*/
const appendFileAsync = async (filename, toAppend, options) => {
// Options do not matter in browser setup
try {
@ -63,8 +128,26 @@ const appendFileAsync = async (filename, toAppend, options) => {
}
}
/**
* Append to the item at given path
* @function
* @param {string} filename
* @param {string} toAppend
* @param {object} [options]
* @param {function} callback
* @alias module:storageBrowser.appendFile
*/
const appendFile = callbackify(appendFileAsync)
/**
* Read data at given path
* @function
* @param {string} filename
* @param {object} [options]
* @return {Promise<Buffer>}
* @alias module:storageBrowser.readFileAsync
* @async
*/
const readFileAsync = async (filename, options) => {
try {
return (await store.getItem(filename)) || ''
@ -73,9 +156,24 @@ const readFileAsync = async (filename, options) => {
return ''
}
}
/**
* Read data at given path
* @function
* @param {string} filename
* @param {object} options
* @param {function} callback
* @alias module:storageBrowser.readFile
*/
const readFile = callbackify(readFileAsync)
/**
* Remove the data at given path
* @function
* @param {string} filename
* @return {Promise<void>}
* @async
* @alias module:storageBrowser.unlinkAsync
*/
const unlinkAsync = async filename => {
try {
await store.removeItem(filename)
@ -84,23 +182,74 @@ const unlinkAsync = async filename => {
}
}
/**
* Remove the data at given path
* @function
* @param {string} path
* @param {function} callback
* @alias module:storageBrowser.unlink
*/
const unlink = callbackify(unlinkAsync)
// Nothing to do, no directories will be used on the browser
const mkdirAsync = (dir, options) => Promise.resolve()
/**
* Shim for storage.mkdirAsync, nothing to do, no directories will be used on the browser
* @function
* @param {string} path
* @param {object} [options]
* @return {Promise<void|string>}
* @alias module:storageBrowser.mkdirAsync
* @async
*/
const mkdirAsync = (path, options) => Promise.resolve()
/**
* Shim for storage.mkdir, nothing to do, no directories will be used on the browser
* @function
* @param {string} path
* @param {object} options
* @param {function} callback
* @alias module:storageBrowser.mkdir
*/
const mkdir = callbackify(mkdirAsync)
// Nothing to do, no data corruption possible in the browser
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* Nothing to do, no data corruption possible in the browser
* @param {string} filename
* @return {Promise<void>}
* @alias module:storageBrowser.ensureDatafileIntegrityAsync
*/
const ensureDatafileIntegrityAsync = (filename) => Promise.resolve()
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* Nothing to do, no data corruption possible in the browser
* @function
* @param {string} filename
* @param {NoParamCallback} callback signature: err
* @alias module:storageBrowser.ensureDatafileIntegrity
*/
const ensureDatafileIntegrity = callbackify(ensureDatafileIntegrityAsync)
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
* @alias module:storageBrowser.crashSafeWriteFileLinesAsync
*/
const crashSafeWriteFileLinesAsync = async (filename, lines) => {
lines.push('') // Add final new line
await writeFileAsync(filename, lines.join('\n'))
}
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @function
* @param {string} filename
* @param {string[]} lines
* @param {NoParamCallback} [callback] Optional callback, signature: err
* @alias module:storageBrowser.crashSafeWriteFileLines
*/
const crashSafeWriteFileLines = callbackify(crashSafeWriteFileLinesAsync)
// Interface

@ -5,48 +5,112 @@
* For a react-native database, we use @react-native-async-storage/async-storage
*
* This version is the react-native version
* @module storageReactNative
*/
const AsyncStorage = require('@react-native-async-storage/async-storage').default
const { callbackify } = require('util')
const { callbackify } = require('util') // TODO: util is not a dependency, this would fail if util is not polyfilled
const existsAsync = async filename => {
/**
* Returns Promise<true> if file exists
* @param {string} file
* @return {Promise<boolean>}
* @async
* @alias module:storageReactNative.existsAsync
*/
const existsAsync = async file => {
try {
const value = await AsyncStorage.getItem(filename)
const value = await AsyncStorage.getItem(file)
if (value !== null) return true // Even if value is undefined, localforage returns null
return false
} catch (error) {
return false
}
}
/**
* @callback module:storageReactNative~existsCallback
* @param {boolean} exists
*/
/**
* Callback returns true if file exists
* @function
* @param {string} file
* @param {module:storageReactNative~existsCallback} cb
* @alias module:storageReactNative.exists
*/
const exists = callbackify(existsAsync)
const renameAsync = async (filename, newFilename) => {
/**
* Moves the item from one path to another
* @param {string} oldPath
* @param {string} newPath
* @return {Promise<void>}
* @alias module:storageReactNative.renameAsync
* @async
*/
const renameAsync = async (oldPath, newPath) => {
try {
const value = await AsyncStorage.getItem(filename)
if (value === null) await AsyncStorage.removeItem(newFilename)
const value = await AsyncStorage.getItem(oldPath)
if (value === null) await AsyncStorage.removeItem(newPath)
else {
await AsyncStorage.setItem(newFilename, value)
await AsyncStorage.removeItem(filename)
await AsyncStorage.setItem(newPath, value)
await AsyncStorage.removeItem(oldPath)
}
} catch (err) {
console.warn('An error happened while renaming, skip')
}
}
/**
* Moves the item from one path to another
* @function
* @param {string} oldPath
* @param {string} newPath
* @param {NoParamCallback} c
* @return {void}
* @alias module:storageReactNative.rename
*/
const rename = callbackify(renameAsync)
const writeFileAsync = async (filename, contents, options) => {
/**
* Saves the item at given path
* @param {string} file
* @param {string} data
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storageReactNative.writeFileAsync
* @async
*/
const writeFileAsync = async (file, data, options) => {
// Options do not matter in browser setup
try {
await AsyncStorage.setItem(filename, contents)
await AsyncStorage.setItem(file, data)
} catch (error) {
console.warn('An error happened while writing, skip')
}
}
/**
* Saves the item at given path
* @function
* @param {string} path
* @param {string} data
* @param {object} options
* @param {function} callback
* @alias module:storageReactNative.writeFile
*/
const writeFile = callbackify(writeFileAsync)
/**
* Append to the item at given path
* @function
* @param {string} filename
* @param {string} toAppend
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storageReactNative.appendFileAsync
* @async
*/
const appendFileAsync = async (filename, toAppend, options) => {
// Options do not matter in browser setup
try {
@ -57,8 +121,26 @@ const appendFileAsync = async (filename, toAppend, options) => {
}
}
/**
* Append to the item at given path
* @function
* @param {string} filename
* @param {string} toAppend
* @param {object} [options]
* @param {function} callback
* @alias module:storageReactNative.appendFile
*/
const appendFile = callbackify(appendFileAsync)
/**
* Read data at given path
* @function
* @param {string} filename
* @param {object} [options]
* @return {Promise<string>}
* @alias module:storageReactNative.readFileAsync
* @async
*/
const readFileAsync = async (filename, options) => {
try {
return (await AsyncStorage.getItem(filename)) || ''
@ -68,8 +150,24 @@ const readFileAsync = async (filename, options) => {
}
}
/**
* Read data at given path
* @function
* @param {string} filename
* @param {object} options
* @param {function} callback
* @alias module:storageReactNative.readFile
*/
const readFile = callbackify(readFileAsync)
/**
* Remove the data at given path
* @function
* @param {string} filename
* @return {Promise<void>}
* @async
* @alias module:storageReactNative.unlinkAsync
*/
const unlinkAsync = async filename => {
try {
await AsyncStorage.removeItem(filename)
@ -78,23 +176,75 @@ const unlinkAsync = async filename => {
}
}
/**
* Remove the data at given path
* @function
* @param {string} path
* @param {function} callback
* @alias module:storageReactNative.unlink
*/
const unlink = callbackify(unlinkAsync)
// Nothing to do, no directories will be used on react-native
/**
* Shim for storage.mkdirAsync, nothing to do, no directories will be used on the browser
* @function
* @param {string} dir
* @param {object} [options]
* @return {Promise<void|string>}
* @alias module:storageReactNative.mkdirAsync
* @async
*/
const mkdirAsync = (dir, options) => Promise.resolve()
/**
* Shim for storage.mkdir, nothing to do, no directories will be used on the browser
* @function
* @param {string} path
* @param {object} options
* @param {function} callback
* @alias module:storageReactNative.mkdir
*/
const mkdir = callbackify(mkdirAsync)
// Nothing to do, no data corruption possible in the browser
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* Nothing to do, no data corruption possible in the browser
* @param {string} filename
* @return {Promise<void>}
* @alias module:storageReactNative.ensureDatafileIntegrityAsync
*/
const ensureDatafileIntegrityAsync = (filename) => Promise.resolve()
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* Nothing to do, no data corruption possible in the browser
* @function
* @param {string} filename
* @param {NoParamCallback} callback signature: err
* @alias module:storageReactNative.ensureDatafileIntegrity
*/
const ensureDatafileIntegrity = callbackify(ensureDatafileIntegrityAsync)
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
* @alias module:storageReactNative.crashSafeWriteFileLinesAsync
*/
const crashSafeWriteFileLinesAsync = async (filename, lines) => {
lines.push('') // Add final new line
await writeFileAsync(filename, lines.join('\n'))
}
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @function
* @param {string} filename
* @param {string[]} lines
* @param {NoParamCallback} [callback] Optional callback, signature: err
* @alias module:storageReactNative.crashSafeWriteFileLines
*/
const crashSafeWriteFileLines = callbackify(crashSafeWriteFileLinesAsync)
// Interface

33
index.d.ts vendored

@ -1,6 +1,7 @@
// Type definitions for @seald-io/nedb 2.1.0
// Project: https://github.com/seald/nedb forked from https://github.com/louischatriot/nedb
// Definitions by: Mehdi Kouhen <https://github.com/arantes555>
// Definitions by: Timothée Rebours <https://gihub.com/tex0l>
// Mehdi Kouhen <https://github.com/arantes555>
// Stefan Steinhart <https://github.com/reppners>
// Anthony Nichols <https://github.com/anthonynichols>
// Alejandro Fernandez Haro <https://github.com/afharo>
@ -17,16 +18,24 @@ declare class Nedb<G = any> extends EventEmitter {
persistence: Nedb.Persistence;
autoloadPromise: Promise<void>|null;
loadDatabase(): void;
loadDatabaseAsync(): Promise<void>;
getAllData<T extends G>(): T[];
resetIndexes(newData?: any): void;
ensureIndex(options: Nedb.EnsureIndexOptions, callback?: (err: Error | null) => void): void;
ensureIndexAsync(options: Nedb.EnsureIndexOptions): Promise<void>;
removeIndex(fieldName: string, callback?: (err: Error | null) => void): void;
removeIndexAsync(fieldName: string): Promise<void>;
addToIndexes<T extends G>(doc: T | T[]): void;
removeFromIndexes<T extends G>(doc: T | T[]): void;
@ -36,24 +45,39 @@ declare class Nedb<G = any> extends EventEmitter {
getCandidates<T extends G>(query: any, dontExpireStaleDocs: boolean, callback?: (err: Error | null, candidates: T[]) => void): void;
getCandidatesAsync<T extends G>(query: any, dontExpireStaleDocs: boolean): Promise<T[]>;
insert<T extends G>(newDoc: T, callback?: (err: Error | null, document: T) => void): void;
insert<T extends G>(newDocs: T[], callback?: (err: Error | null, documents: T[]) => void): void;
insertAsync<T extends G>(newDoc: T): Promise<T>;
insertAsync<T extends G>(newDocs: T[]): Promise<T[]>;
count(query: any, callback: (err: Error | null, n: number) => void): void;
count(query: any): Nedb.CursorCount;
countAsync(query: any): Nedb.Cursor<number>;
find<T extends G>(query: any, projection: any, callback?: (err: Error | null, documents: T[]) => void): void;
find<T extends G>(query: any, projection?: any): Nedb.Cursor<T>;
find<T extends G>(query: any, callback: (err: Error | null, documents: T[]) => void): void;
findAsync<T extends G>(query: any, projection?: any): Nedb.Cursor<T[]>;
findOne<T extends G>(query: any, projection: any, callback: (err: Error | null, document: T) => void): void;
findOne<T extends G>(query: any, callback: (err: Error | null, document: T) => void): void;
findOneAsync<T extends G>(query: any, projection?: any): Nedb.Cursor<T>;
update<T extends G>(query: any, updateQuery: any, options?: Nedb.UpdateOptions, callback?: (err: Error | null, numberOfUpdated: number, affectedDocuments: T | T[] | null, upsert: boolean | null) => void): void;
updateAsync<T extends G>(query: any, updateQuery: any, options?: Nedb.UpdateOptions): Promise<{numAffected: number, affectedDocuments: T|T[]|null, upsert: boolean}>;
remove(query: any, options: Nedb.RemoveOptions, callback?: (err: Error | null, n: number) => void): void;
remove(query: any, callback?: (err: Error | null, n: number) => void): void;
removeAsync(query: any, options: Nedb.RemoveOptions): Promise<number>;
addListener(event: 'compaction.done', listener: () => void): this;
on(event: 'compaction.done', listener: () => void): this;
once(event: 'compaction.done', listener: () => void): this;
@ -67,12 +91,13 @@ declare class Nedb<G = any> extends EventEmitter {
}
declare namespace Nedb {
interface Cursor<T> {
interface Cursor<T> extends Promise<T> {
sort(query: any): Cursor<T>;
skip(n: number): Cursor<T>;
limit(n: number): Cursor<T>;
projection(query: any): Cursor<T>;
exec(callback: (err: Error | null, documents: T[]) => void): void;
execAsync(): Promise<T>;
}
interface CursorCount {
@ -83,13 +108,14 @@ declare namespace Nedb {
filename?: string;
timestampData?: boolean;
inMemoryOnly?: boolean;
nodeWebkitAppName?: string;
autoload?: boolean;
onload?(error: Error | null): any;
beforeDeserialization?(line: string): string;
afterSerialization?(line: string): string;
corruptAlertThreshold?: number;
compareStrings?(a: string, b: string): number;
/** @deprecated */
nodeWebkitAppName?: string;
}
interface UpdateOptions {
@ -111,6 +137,7 @@ declare namespace Nedb {
interface Persistence {
compactDatafile(): void;
compactDatafileAsync(): Promise<void>;
setAutocompactionInterval(interval: number): void;
stopAutocompaction(): void;
}

@ -1,11 +0,0 @@
'use strict'
module.exports = {
plugins: ['plugins/markdown'],
source: {
include: ['./lib']
},
opts: {
destination: './docs'
}
}

@ -1,6 +1,3 @@
/**
* Manage access to data, be it to find, update or remove it
*/
const model = require('./model.js')
const { callbackify, promisify } = require('util')
@ -17,6 +14,9 @@ const { callbackify, promisify } = require('util')
*/
/**
* Manage access to data, be it to find, update or remove it.
*
* It extends Promise so that its methods are chainable & awaitable.
* @extends Promise
*/
class Cursor {

@ -1,3 +1,8 @@
/**
* Utility functions that need to be reimplemented for each environment.
* This is the version for Node.js
* @module customUtilsNode
*/
const crypto = require('crypto')
/**
@ -9,6 +14,7 @@ const crypto = require('crypto')
* See http://en.wikipedia.org/wiki/Birthday_problem
* @param {number} len
* @return {string}
* @alias module:customUtilsNode.uid
*/
const uid = len => crypto.randomBytes(Math.ceil(Math.max(8, len * 2)))
.toString('base64')

@ -9,12 +9,9 @@ const Persistence = require('./persistence.js')
const { isDate } = require('./utils.js')
/**
* Compaction event. Happens when the Datastore's Persistence has been compacted.
* It happens when calling `datastore.persistence.compactDatafile`, which is called periodically if you have called
* `datastore.persistence.setAutocompactionInterval`.
*
* @event Datastore#event:"compaction.done"
* @type {undefined}
* Callback with no parameter
* @callback NoParamCallback
* @param {?Error} err
*/
/**
@ -30,13 +27,43 @@ const { isDate } = require('./utils.js')
* @return {number}
*/
/**
* Callback that returns an Array of documents
* @callback MultipleDocumentsCallback
* @param {?Error} err
* @param {?document[]} docs
*/
/**
* Callback that returns a single document
* @callback SingleDocumentCallback
* @param {?Error} err
* @param {?document} docs
*/
/**
* Generic async function
* @callback AsyncFunction
* @param {...*} args
* @return {Promise<*>}
*/
/**
* Compaction event. Happens when the Datastore's Persistence has been compacted.
* It happens when calling `datastore.persistence.compactDatafile`, which is called periodically if you have called
* `datastore.persistence.setAutocompactionInterval`.
*
* @event Datastore.event:"compaction.done"
* @type {undefined}
*/
/**
* Generic document in NeDB.
* It consists of an Object with anything you want inside.
* @typedef document
* @property {?string} _id Internal `_id` of the document, which can be `null` at some points (when not inserted yet
* for example).
* @type {object.<string, *>}
* @property {?string} [_id] Internal `_id` of the document, which can be `null` or undefined at some points (when not
* inserted yet for example).
* @type {Object.<string, *>}
*/
/**
@ -72,7 +99,7 @@ const { isDate } = require('./utils.js')
* } }
* ```
* @typedef query
* @type {object.<string, *>}
* @type {Object.<string, *>}
*/
/**
@ -87,7 +114,7 @@ const { isDate } = require('./utils.js')
* To reference subfields, you can use the dot-notation.
*
* @typedef projection
* @type {object.<string, 0|1>}
* @type {Object.<string, 0|1>}
*/
/**
@ -148,7 +175,7 @@ class Datastore extends EventEmitter {
* OS X and Windows. Now that you can use `require('nw.gui').App.dataPath` in Node Webkit to get the path to the data
* directory for your application, you should not use this option anymore and it will be removed.
*
* @fires Datastore#event:"compaction.done"
* @fires Datastore.event:"compaction.done"
*/
constructor (options) {
super()
@ -254,7 +281,7 @@ class Datastore extends EventEmitter {
* A Promise that resolves when the autoload has finished.
*
* The onload callback is not awaited by this Promise, it is started immediately after that.
* @type {Promise}
* @type {?Promise}
*/
this.autoloadPromise = this.loadDatabaseAsync()
this.autoloadPromise
@ -301,11 +328,6 @@ class Datastore extends EventEmitter {
}
}
/**
* @callback Datastore~ensureIndexCallback
* @param {?Error} err
*/
/**
* Ensure an index is kept for this field. Same parameters as lib/indexes
* This function acts synchronously on the indexes, however the persistence of the indexes is deferred with the
@ -316,7 +338,7 @@ class Datastore extends EventEmitter {
* @param {boolean} [options.unique = false] Enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
* @param {boolean} [options.sparse = false] don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
* @param {number} [options.expireAfterSeconds] - if set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored
* @param {Datastore~ensureIndexCallback} callback Callback, signature: err
* @param {NoParamCallback} callback Callback, signature: err
*/
// TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state
ensureIndex (options = {}, callback = () => {}) {
@ -358,17 +380,12 @@ class Datastore extends EventEmitter {
await this.persistence.persistNewStateAsync([{ $$indexCreated: options }])
}
/**
* @callback Datastore~removeIndexCallback
* @param {?Error} err
*/
/**
* Remove an index
* Previous versions said explicitly the callback was optional, it is now recommended setting one.
* @param {string} fieldName Field name of the index to remove. Use the dot notation to remove an index referring to a
* field in a nested document.
* @param {Datastore~removeIndexCallback} callback Optional callback, signature: err
* @param {NoParamCallback} callback Optional callback, signature: err
*/
// TODO: contrary to what is said in the JSDoc, this function should probably be called through the executor, it persists a new state
removeIndex (fieldName, callback = () => {}) {
@ -502,12 +519,6 @@ class Datastore extends EventEmitter {
return this.getAllData()
}
/**
* @callback Datastore~getCandidatesCallback
* @param {?Error} err
* @param {?document[]} candidates
*/
/**
* Return the list of candidates for a given query
* Crude implementation for now, we return the candidates given by the first usable index if any
@ -520,7 +531,7 @@ class Datastore extends EventEmitter {
* @param {query} query
* @param {boolean|function} [dontExpireStaleDocs = false] If true don't remove stale docs. Useful for the remove
* function which shouldn't be impacted by expirations. If argument is not given, it is used as the callback.
* @param {Datastore~getCandidatesCallback} callback Signature err, candidates
* @param {MultipleDocumentsCallback} callback Signature err, candidates
*
* @private
*/
@ -570,17 +581,11 @@ class Datastore extends EventEmitter {
return validDocs
}
/**
* @callback Datastore~insertCallback
* @param {?Error} err
* @param {?document} insertedDoc
*/
/**
* Insert a new document
* Private Use Datastore.insert which has the same signature
* @param {?document} newDoc
* @param {Datastore~insertCallback} callback Optional callback, signature: err, insertedDoc
* @param {document|document[]} newDoc
* @param {SingleDocumentCallback} [callback = () => {}] Optional callback, signature: err, insertedDoc
*
* @private
*/
@ -591,8 +596,8 @@ class Datastore extends EventEmitter {
/**
* Insert a new document
* Private Use Datastore.insertAsync which has the same signature
* @param {document} newDoc
* @return {Promise<document>}
* @param {document|document[]} newDoc
* @return {Promise<document|document[]>}
* @private
*/
async _insertAsync (newDoc) {
@ -682,8 +687,8 @@ class Datastore extends EventEmitter {
/**
* Insert a new document
* Private Use Datastore.insert which has the same signature
* @param {document} newDoc
* @param {Datastore~insertCallback} callback Optional callback, signature: err, insertedDoc
* @param {document|document[]} newDoc
* @param {SingleDocumentCallback} [callback = () => {}] Optional callback, signature: err, insertedDoc
*
* @private
*/
@ -694,7 +699,7 @@ class Datastore extends EventEmitter {
/**
* Insert a new document
* Private Use Datastore.insertAsync which has the same signature
* @param {document} newDoc
* @param {document|document[]} newDoc
* @return {Promise<document>}
* @async
*/
@ -731,19 +736,13 @@ class Datastore extends EventEmitter {
return new Cursor(this, query, async docs => docs.length, true) // this is a trick, Cursor itself is a thenable, which allows to await it
}
/**
* @callback Datastore~findCallback
* @param {?Error} err
* @param {document[]} docs
*/
/**
* Find all documents matching the query
* If no callback is passed, we return the cursor so that user can limit, skip and finally exec
* @param {query} query MongoDB-style query
* @param {projection|Datastore~findCallback} [projection = {}] MongoDB-style projection. If not given, will be
* @param {projection|MultipleDocumentsCallback} [projection = {}] MongoDB-style projection. If not given, will be
* interpreted as the callback.
* @param {Datastore~findCallback} [callback] Optional callback, signature: err, docs
* @param {MultipleDocumentsCallback} [callback] Optional callback, signature: err, docs
* @return {Cursor<document[]>|undefined}
*/
find (query, projection, callback) {
@ -788,7 +787,7 @@ class Datastore extends EventEmitter {
* Find one document matching the query
* @param {query} query MongoDB-style query
* @param {projection} projection MongoDB-style projection
* @param {Datastore~findOneCallback} callback Optional callback, signature: err, doc
* @param {SingleDocumentCallback} callback Optional callback, signature: err, doc
* @return {Cursor<document>|undefined}
*/
findOne (query, projection, callback) {
@ -862,7 +861,7 @@ class Datastore extends EventEmitter {
* @param {boolean} [options.returnUpdatedDocs = false] (not Mongo-DB compatible) If true and update is not an upsert,
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
* @param {Datastore~updateCallback} [cb] Optional callback
* @param {Datastore~updateCallback} [cb = () => {}] Optional callback
*
* @private
*/
@ -900,7 +899,7 @@ class Datastore extends EventEmitter {
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
*
* @return {Promise<{numAffected: number, affectedDocuments: document[]|document, upsert: boolean}>}
* @return {Promise<{numAffected: number, affectedDocuments: document[]|document|null, upsert: boolean}>}
*
* @private
*/
@ -987,7 +986,7 @@ class Datastore extends EventEmitter {
* @param {boolean} [options.returnUpdatedDocs = false] (not Mongo-DB compatible) If true and update is not an upsert,
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
* @param {Datastore~updateCallback} [cb] Optional callback
* @param {Datastore~updateCallback} [cb = () => {}] Optional callback
*
*/
update (...args) {
@ -1014,7 +1013,7 @@ class Datastore extends EventEmitter {
* will return the array of documents matched by the find query and updated. Updated documents will be returned even
* if the update did not actually modify them.
* @async
* @return {Promise<{numAffected: number, affectedDocuments: document[]|document, upsert: boolean}>}
* @return {Promise<{numAffected: number, affectedDocuments: document[]|document|null, upsert: boolean}>}
*/
updateAsync (...args) {
return this.executor.pushAsync(() => this._updateAsync(...args))
@ -1033,7 +1032,7 @@ class Datastore extends EventEmitter {
* @param {query} query
* @param {object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {Datastore~removeCallback} [cb]
* @param {Datastore~removeCallback} [cb = () => {}]
*
* @private
*/
@ -1080,7 +1079,7 @@ class Datastore extends EventEmitter {
* @param {query} query
* @param {object} [options] Optional options
* @param {boolean} [options.multi = false] If true, can update multiple documents
* @param {Datastore~removeCallback} [cb] Optional callback, signature: err, numRemoved
* @param {Datastore~removeCallback} [cb = () => {}] Optional callback, signature: err, numRemoved
*/
remove (...args) {
this.executor.push({ this: this, fn: this._remove, arguments: args })

@ -1,9 +1,3 @@
/**
* @callback AsyncFunction
* @param {...[*]} args
* @return {Promise<*>}
*/
/**
* Responsible for sequentially executing actions on the database
* @private
@ -59,7 +53,14 @@ class Waterfall {
}
}
/**
* Executes operations sequentially.
* Has an option for a buffer that can be triggered afterwards.
*/
class Executor {
/**
* Instantiates a new Executor.
*/
constructor () {
this.ready = false
this.queue = new Waterfall()
@ -110,7 +111,7 @@ class Executor {
/**
* If executor is ready, queue task (and process it immediately if executor was idle)
* If not, buffer task for later processing
* @param {AsyncFunction} task
* @param {function(...*):Promise<*>} task
* @param {boolean} [forceQueuing = false]
* @return {Promise<*>}
* @async

@ -7,6 +7,7 @@ const { uniq, isDate } = require('./utils.js')
* @param {*} a
* @param {*} b
* @return {boolean}
* @private
*/
const checkValueEquality = (a, b) => a === b
@ -14,8 +15,9 @@ const checkValueEquality = (a, b) => a === b
* Type-aware projection
* @param {*} elt
* @return {string|*}
* @private
*/
function projectForUnique (elt) {
const projectForUnique = elt => {
if (elt === null) return '$null'
if (typeof elt === 'string') return '$string' + elt
if (typeof elt === 'boolean') return '$boolean' + elt
@ -25,6 +27,10 @@ function projectForUnique (elt) {
return elt // Arrays and objects, will check for pointer equality
}
/**
* Indexes on field names, with atomic operations and which can optionally enforce a unique constraint or allow indexed
* fields to be undefined
*/
class Index {
/**
* Create a new index

@ -3,19 +3,9 @@
* Serialization/deserialization
* Copying
* Querying, update
* @module model
*/
const { uniq, isDate, isRegExp } = require('./utils.js')
/**
* @type {Object.<string, Model~modifierFunction>}
*/
const modifierFunctions = {}
/**
* @type {Object.<string, Model~modifierFunction>}
*/
const lastStepModifierFunctions = {}
const comparisonFunctions = {}
const logicalOperators = {}
const arrayComparisonFunctions = {}
/**
* Check a key, throw an error if the key is non valid
@ -24,6 +14,7 @@ const arrayComparisonFunctions = {}
* Non-treatable edge cases here: if part of the object if of the form { $$date: number } or { $$deleted: true }
* Its serialized-then-deserialized version it will transformed into a Date object
* But you really need to want it to trigger such behaviour, even when warned not to use '$' at the beginning of the field names...
* @private
*/
const checkKey = (k, v) => {
if (typeof k === 'number') k = k.toString()
@ -43,6 +34,7 @@ const checkKey = (k, v) => {
* Check a DB object and throw an error if it's not valid
* Works by applying the above checkKey function to all fields recursively
* @param {document|document[]} obj
* @alias module:model.checkObject
*/
const checkObject = obj => {
if (Array.isArray(obj)) {
@ -70,6 +62,7 @@ const checkObject = obj => {
* Accepted secondary types: Objects, Arrays
* @param {document} obj
* @return {string}
* @alias module:model.serialize
*/
const serialize = obj => {
return JSON.stringify(obj, function (k, v) {
@ -91,6 +84,7 @@ const serialize = obj => {
* Return the object itself
* @param {string} rawData
* @return {document}
* @alias module:model.deserialize
*/
const deserialize = rawData => JSON.parse(rawData, function (k, v) {
if (k === '$$date') return new Date(v)
@ -112,6 +106,7 @@ const deserialize = rawData => JSON.parse(rawData, function (k, v) {
* @param {?document} obj
* @param {boolean} [strictKeys=false]
* @return {?document}
* @alias module:modelel:(.*)
*/
function deepCopy (obj, strictKeys) {
if (
@ -145,6 +140,7 @@ function deepCopy (obj, strictKeys) {
* Arrays are considered primitive
* @param {*} obj
* @return {boolean}
* @alias module:modelel:(.*)
*/
const isPrimitiveType = obj => (
typeof obj === 'boolean' ||
@ -162,6 +158,7 @@ const isPrimitiveType = obj => (
* @param {number|string|boolean} a
* @param {number|string|boolean} b
* @return {number} 0 if a == b, 1 i a > b, -1 if a < b
* @private
*/
const compareNSB = (a, b) => {
if (a < b) return -1
@ -176,6 +173,7 @@ const compareNSB = (a, b) => {
* @param {Array} a
* @param {Array} b
* @return {number} 0 if arrays have the same length and all elements equal one another. Else either 1 or -1.
* @private
*/
const compareArrays = (a, b) => {
const minLength = Math.min(a.length, b.length)
@ -198,8 +196,9 @@ const compareArrays = (a, b) => {
* Return -1 if a < b, 1 if a > b and 0 if a = b (note that equality here is NOT the same as defined in areThingsEqual!)
* @param {*} a
* @param {*} b
* @param {Function} [_compareStrings] String comparing function, returning -1, 0 or 1, overriding default string comparison (useful for languages with accented letters)
* @param {compareStrings} [_compareStrings] String comparing function, returning -1, 0 or 1, overriding default string comparison (useful for languages with accented letters)
* @return {number}
* @alias module:model.compareThings
*/
const compareThings = (a, b, _compareStrings) => {
const compareStrings = _compareStrings || compareNSB
@ -250,7 +249,7 @@ const compareThings = (a, b, _compareStrings) => {
// ==============================================================
/**
* @callback Model~modifierFunction
* @callback modifierFunction
* The signature of modifier functions is as follows
* Their structure is always the same: recursively follow the dot notation while creating
* the nested documents if needed, then apply the "last step modifier"
@ -260,80 +259,29 @@ const compareThings = (a, b, _compareStrings) => {
*/
/**
* Set a field to a new value
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$set = (obj, field, value) => {
obj[field] = value
}
/**
* Unset a field
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$unset = (obj, field, value) => {
delete obj[field]
}
/**
* Push an element to the end of an array field
* Optional modifier $each instead of value to push several values
* Optional modifier $slice to slice the resulting array, see https://docs.mongodb.org/manual/reference/operator/update/slice/
* Différeence with MongoDB: if $slice is specified and not $each, we act as if value is an empty array
* @type Model~modifierFunction
* Create the complete modifier function
* @param {function} lastStepModifierFunction a lastStepModifierFunction
* @param {boolean} [unset = false] Bad looking specific fix, needs to be generalized modifiers that behave like $unset are implemented
* @return {modifierFunction}
* @private
*/
lastStepModifierFunctions.$push = (obj, field, value) => {
// Create the array if it doesn't exist
if (!Object.prototype.hasOwnProperty.call(obj, field)) obj[field] = []
const createModifierFunction = (lastStepModifierFunction, unset = false) => (obj, field, value) => {
const func = (obj, field, value) => {
const fieldParts = typeof field === 'string' ? field.split('.') : field
if (!Array.isArray(obj[field])) throw new Error('Can\'t $push an element on non-array values')
if (
value !== null &&
typeof value === 'object' &&
value.$slice &&
value.$each === undefined
) value.$each = []
if (value !== null && typeof value === 'object' && value.$each) {
if (
Object.keys(value).length >= 3 ||
(Object.keys(value).length === 2 && value.$slice === undefined)
) throw new Error('Can only use $slice in cunjunction with $each when $push to array')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => {
obj[field].push(v)
})
if (value.$slice === undefined || typeof value.$slice !== 'number') return
if (value.$slice === 0) obj[field] = []
if (fieldParts.length === 1) lastStepModifierFunction(obj, field, value)
else {
let start
let end
const n = obj[field].length
if (value.$slice < 0) {
start = Math.max(0, n + value.$slice)
end = n
} else if (value.$slice > 0) {
start = 0
end = Math.min(n, value.$slice)
if (obj[fieldParts[0]] === undefined) {
if (unset) return
obj[fieldParts[0]] = {}
}
obj[field] = obj[field].slice(start, end)
func(obj[fieldParts[0]], fieldParts.slice(1), value)
}
} else {
obj[field].push(value)
}
return func(obj, field, value)
}
/**
* Add an element to an array field only if it is not already in it
* No modification if the element is already in the array
* Note that it doesn't check whether the original array contains duplicates
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$addToSet = (obj, field, value) => {
const $addToSetPartial = (obj, field, value) => {
// Create the array if it doesn't exist
if (!Object.prototype.hasOwnProperty.call(obj, field)) { obj[field] = [] }
@ -344,7 +292,7 @@ lastStepModifierFunctions.$addToSet = (obj, field, value) => {
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => {
lastStepModifierFunctions.$addToSet(obj, field, v)
$addToSetPartial(obj, field, v)
})
} else {
let addToSet = true
@ -356,90 +304,133 @@ lastStepModifierFunctions.$addToSet = (obj, field, value) => {
}
/**
* Remove the first or last element of an array
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$pop = (obj, field, value) => {
if (!Array.isArray(obj[field])) throw new Error('Can\'t $pop an element from non-array values')
if (typeof value !== 'number') throw new Error(`${value} isn't an integer, can't use it with $pop`)
if (value === 0) return
if (value > 0) obj[field] = obj[field].slice(0, obj[field].length - 1)
else obj[field] = obj[field].slice(1)
}
/**
* Removes all instances of a value from an existing array
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$pull = (obj, field, value) => {
if (!Array.isArray(obj[field])) throw new Error('Can\'t $pull an element from non-array values')
const arr = obj[field]
for (let i = arr.length - 1; i >= 0; i -= 1) {
if (match(arr[i], value)) arr.splice(i, 1)
}
}
/**
* Increment a numeric field's value
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$inc = (obj, field, value) => {
if (typeof value !== 'number') throw new Error(`${value} must be a number`)
if (typeof obj[field] !== 'number') {
if (!Object.prototype.hasOwnProperty.call(obj, field)) obj[field] = value
else throw new Error('Don\'t use the $inc modifier on non-number fields')
} else obj[field] += value
}
* @enum {modifierFunction}
*/
const modifierFunctions = {
/**
* Set a field to a new value
*/
$set: createModifierFunction((obj, field, value) => {
obj[field] = value
}),
/**
* Unset a field
*/
$unset: createModifierFunction((obj, field, value) => {
delete obj[field]
}, true),
/**
* Updates the value of the field, only if specified field is smaller than the current value of the field
*/
$min: createModifierFunction((obj, field, value) => {
if (typeof obj[field] === 'undefined') obj[field] = value
else if (value < obj[field]) obj[field] = value
}),
/**
* Updates the value of the field, only if specified field is greater than the current value of the field
*/
$max: createModifierFunction((obj, field, value) => {
if (typeof obj[field] === 'undefined') obj[field] = value
else if (value > obj[field]) obj[field] = value
}),
/**
* Increment a numeric field's value
*/
$inc: createModifierFunction((obj, field, value) => {
if (typeof value !== 'number') throw new Error(`${value} must be a number`)
if (typeof obj[field] !== 'number') {
if (!Object.prototype.hasOwnProperty.call(obj, field)) obj[field] = value
else throw new Error('Don\'t use the $inc modifier on non-number fields')
} else obj[field] += value
}),
/**
* Removes all instances of a value from an existing array
*/
$pull: createModifierFunction((obj, field, value) => {
if (!Array.isArray(obj[field])) throw new Error('Can\'t $pull an element from non-array values')
const arr = obj[field]
for (let i = arr.length - 1; i >= 0; i -= 1) {
if (match(arr[i], value)) arr.splice(i, 1)
}
}),
/**
* Remove the first or last element of an array
*/
$pop: createModifierFunction((obj, field, value) => {
if (!Array.isArray(obj[field])) throw new Error('Can\'t $pop an element from non-array values')
if (typeof value !== 'number') throw new Error(`${value} isn't an integer, can't use it with $pop`)
if (value === 0) return
if (value > 0) obj[field] = obj[field].slice(0, obj[field].length - 1)
else obj[field] = obj[field].slice(1)
}),
/**
* Add an element to an array field only if it is not already in it
* No modification if the element is already in the array
* Note that it doesn't check whether the original array contains duplicates
*/
$addToSet: createModifierFunction($addToSetPartial),
/**
* Push an element to the end of an array field
* Optional modifier $each instead of value to push several values
* Optional modifier $slice to slice the resulting array, see https://docs.mongodb.org/manual/reference/operator/update/slice/
* Difference with MongoDB: if $slice is specified and not $each, we act as if value is an empty array
*/
$push: createModifierFunction((obj, field, value) => {
// Create the array if it doesn't exist
if (!Object.prototype.hasOwnProperty.call(obj, field)) obj[field] = []
if (!Array.isArray(obj[field])) throw new Error('Can\'t $push an element on non-array values')
/**
* Updates the value of the field, only if specified field is greater than the current value of the field
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$max = (obj, field, value) => {
if (typeof obj[field] === 'undefined') obj[field] = value
else if (value > obj[field]) obj[field] = value
}
if (
value !== null &&
typeof value === 'object' &&
value.$slice &&
value.$each === undefined
) value.$each = []
/**
* Updates the value of the field, only if specified field is smaller than the current value of the field
* @type Model~modifierFunction
*/
lastStepModifierFunctions.$min = (obj, field, value) => {
if (typeof obj[field] === 'undefined') obj[field] = value
else if (value < obj[field]) obj[field] = value
}
if (value !== null && typeof value === 'object' && value.$each) {
if (
Object.keys(value).length >= 3 ||
(Object.keys(value).length === 2 && value.$slice === undefined)
) throw new Error('Can only use $slice in cunjunction with $each when $push to array')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
/**
* Create the complete modifier function
* @param {string} modifier one of lastStepModifierFunctions keys
* @return {Model~modifierFunction}
*/
const createModifierFunction = modifier => (obj, field, value) => {
const fieldParts = typeof field === 'string' ? field.split('.') : field
value.$each.forEach(v => {
obj[field].push(v)
})
if (fieldParts.length === 1) lastStepModifierFunctions[modifier](obj, field, value)
else {
if (obj[fieldParts[0]] === undefined) {
if (modifier === '$unset') return // Bad looking specific fix, needs to be generalized modifiers that behave like $unset are implemented
obj[fieldParts[0]] = {}
if (value.$slice === undefined || typeof value.$slice !== 'number') return
if (value.$slice === 0) obj[field] = []
else {
let start
let end
const n = obj[field].length
if (value.$slice < 0) {
start = Math.max(0, n + value.$slice)
end = n
} else if (value.$slice > 0) {
start = 0
end = Math.min(n, value.$slice)
}
obj[field] = obj[field].slice(start, end)
}
} else {
obj[field].push(value)
}
modifierFunctions[modifier](obj[fieldParts[0]], fieldParts.slice(1), value)
}
}
})
// Actually create all modifier functions
Object.keys(lastStepModifierFunctions).forEach(modifier => {
modifierFunctions[modifier] = createModifierFunction(modifier)
})
}
/**
* Modify a DB object according to an update query
* @param {document} obj
* @param {query} updateQuery
* @return {document}
* @alias module:model.modify
*/
const modify = (obj, updateQuery) => {
const keys = Object.keys(updateQuery)
@ -490,6 +481,7 @@ const modify = (obj, updateQuery) => {
* @param {object} obj
* @param {string} field
* @return {*}
* @alias module:model.getDotValue
*/
const getDotValue = (obj, field) => {
const fieldParts = typeof field === 'string' ? field.split('.') : field
@ -518,6 +510,7 @@ const getDotValue = (obj, field) => {
* @param {*} a
* @param {*} a
* @return {boolean}
* @alias module:model.areThingsEqual
*/
const areThingsEqual = (a, b) => {
// Strings, booleans, numbers, null
@ -566,6 +559,7 @@ const areThingsEqual = (a, b) => {
* @param {*} a
* @param {*} a
* @return {boolean}
* @private
*/
const areComparable = (a, b) => {
if (
@ -583,7 +577,7 @@ const areComparable = (a, b) => {
}
/**
* @callback Model~comparisonOperator
* @callback comparisonOperator
* Arithmetic and comparison operators
* @param {*} a Value in the object
* @param {*} b Value in the query
@ -591,165 +585,130 @@ const areComparable = (a, b) => {
*/
/**
* Lower than
* @type Model~comparisonOperator
*/
comparisonFunctions.$lt = (a, b) => areComparable(a, b) && a < b
/**
* Lower than or equals
* @type Model~comparisonOperator
*/
comparisonFunctions.$lte = (a, b) => areComparable(a, b) && a <= b
/**
* Greater than
* @type Model~comparisonOperator
*/
comparisonFunctions.$gt = (a, b) => areComparable(a, b) && a > b
/**
* Greater than or equals
* @type Model~comparisonOperator
*/
comparisonFunctions.$gte = (a, b) => areComparable(a, b) && a >= b
/**
* Does not equal
* @type Model~comparisonOperator
* @enum {comparisonOperator}
*/
comparisonFunctions.$ne = (a, b) => a === undefined || !areThingsEqual(a, b)
const comparisonFunctions = {
/** Lower than */
$lt: (a, b) => areComparable(a, b) && a < b,
/** Lower than or equals */
$lte: (a, b) => areComparable(a, b) && a <= b,
/** Greater than */
$gt: (a, b) => areComparable(a, b) && a > b,
/** Greater than or equals */
$gte: (a, b) => areComparable(a, b) && a >= b,
/** Does not equal */
$ne: (a, b) => a === undefined || !areThingsEqual(a, b),
/** Is in Array */
$in: (a, b) => {
if (!Array.isArray(b)) throw new Error('$in operator called with a non-array')
/**
* Is in Array
* @type Model~comparisonOperator
*/
comparisonFunctions.$in = (a, b) => {
if (!Array.isArray(b)) throw new Error('$in operator called with a non-array')
for (const el of b) {
if (areThingsEqual(a, el)) return true
}
for (const el of b) {
if (areThingsEqual(a, el)) return true
return false
},
/** Is not in Array */
$nin: (a, b) => {
if (!Array.isArray(b)) throw new Error('$nin operator called with a non-array')
return !comparisonFunctions.$in(a, b)
},
/** Matches Regexp */
$regex: (a, b) => {
if (!isRegExp(b)) throw new Error('$regex operator called with non regular expression')
if (typeof a !== 'string') return false
else return b.test(a)
},
/** Returns true if field exists */
$exists: (a, b) => {
// This will be true for all values of stat except false, null, undefined and 0
// That's strange behaviour (we should only use true/false) but that's the way Mongo does it...
if (b || b === '') b = true
else b = false
if (a === undefined) return !b
else return b
},
/** Specific to Arrays, returns true if a length equals b */
$size: (a, b) => {
if (!Array.isArray(a)) return false
if (b % 1 !== 0) throw new Error('$size operator called without an integer')
return a.length === b
},
/** Specific to Arrays, returns true if some elements of a match the query b */
$elemMatch: (a, b) => {
if (!Array.isArray(a)) return false
return a.some(el => match(el, b))
}
return false
}
/**
* Is not in Array
* @type Model~comparisonOperator
*/
comparisonFunctions.$nin = (a, b) => {
if (!Array.isArray(b)) throw new Error('$nin operator called with a non-array')
return !comparisonFunctions.$in(a, b)
}
const arrayComparisonFunctions = { $size: true, $elemMatch: true }
/**
* Matches Regexp
* @type Model~comparisonOperator
* @enum
*/
comparisonFunctions.$regex = (a, b) => {
if (!isRegExp(b)) throw new Error('$regex operator called with non regular expression')
const logicalOperators = {
/**
* Match any of the subqueries
* @param {document} obj
* @param {query[]} query
* @return {boolean}
*/
$or: (obj, query) => {
if (!Array.isArray(query)) throw new Error('$or operator used without an array')
if (typeof a !== 'string') return false
else return b.test(a)
}
/**
* Returns true if field exists
* @type Model~comparisonOperator
*/
comparisonFunctions.$exists = (a, b) => {
// This will be true for all values of stat except false, null, undefined and 0
// That's strange behaviour (we should only use true/false) but that's the way Mongo does it...
if (b || b === '') b = true
else b = false
if (a === undefined) return !b
else return b
}
/**
* Specific to Arrays, returns true if a length equals b
* @type Model~comparisonOperator
*/
comparisonFunctions.$size = (a, b) => {
if (!Array.isArray(a)) return false
if (b % 1 !== 0) throw new Error('$size operator called without an integer')
return a.length === b
}
/**
* Specific to Arrays, returns true if some elements of a match the query b
* @type Model~comparisonOperator
*/
comparisonFunctions.$elemMatch = (a, b) => {
if (!Array.isArray(a)) return false
return a.some(el => match(el, b))
}
arrayComparisonFunctions.$size = true
arrayComparisonFunctions.$elemMatch = true
/**
* Match any of the subqueries
* @param {document} obj
* @param {query[]} query
* @return {boolean}
*/
logicalOperators.$or = (obj, query) => {
if (!Array.isArray(query)) throw new Error('$or operator used without an array')
for (let i = 0; i < query.length; i += 1) {
if (match(obj, query[i])) return true
}
return false
}
for (let i = 0; i < query.length; i += 1) {
if (match(obj, query[i])) return true
}
/**
* Match all of the subqueries
* @param {document} obj
* @param {query[]} query
* @return {boolean}
*/
logicalOperators.$and = (obj, query) => {
if (!Array.isArray(query)) throw new Error('$and operator used without an array')
return false
},
/**
* Match all of the subqueries
* @param {document} obj
* @param {query[]} query
* @return {boolean}
*/
$and: (obj, query) => {
if (!Array.isArray(query)) throw new Error('$and operator used without an array')
for (let i = 0; i < query.length; i += 1) {
if (!match(obj, query[i])) return false
}
for (let i = 0; i < query.length; i += 1) {
if (!match(obj, query[i])) return false
return true
},
/**
* Inverted match of the query
* @param {document} obj
* @param {query} query
* @return {boolean}
*/
$not: (obj, query) => !match(obj, query),
/**
* @callback whereCallback
* @param {document} obj
* @return {boolean}
*/
/**
* Use a function to match
* @param {document} obj
* @param {whereCallback} fn
* @return {boolean}
*/
$where: (obj, fn) => {
if (typeof fn !== 'function') throw new Error('$where operator used without a function')
const result = fn.call(obj)
if (typeof result !== 'boolean') throw new Error('$where function must return boolean')
return result
}
return true
}
/**
* Inverted match of the query
* @param {document} obj
* @param {query} query
* @return {boolean}
*/
logicalOperators.$not = (obj, query) => !match(obj, query)
/**
* @callback Model~whereCallback
* @param {document} obj
* @return {boolean}
*/
/**
* Use a function to match
* @param {document} obj
* @param {Model~whereCallback} fn
* @return {boolean}
*/
logicalOperators.$where = (obj, fn) => {
if (typeof fn !== 'function') throw new Error('$where operator used without a function')
const result = fn.call(obj)
if (typeof result !== 'boolean') throw new Error('$where function must return boolean')
return result
}
/**
@ -757,6 +716,7 @@ logicalOperators.$where = (obj, fn) => {
* @param {document} obj Document to check
* @param {query} query
* @return {boolean}
* @alias module:model.match
*/
const match = (obj, query) => {
// Primitive query against a primitive type
@ -778,10 +738,6 @@ const match = (obj, query) => {
return true
}
/**
* Match an object against a specific { key: value } part of a query
* if the treatObjAsValue flag is set, don't try to match every part separately, but the array as a whole
*/
/**
* Match an object against a specific { key: value } part of a query
* if the treatObjAsValue flag is set, don't try to match every part separately, but the array as a whole
@ -790,6 +746,7 @@ const match = (obj, query) => {
* @param {*} queryValue
* @param {boolean} [treatObjAsValue=false]
* @return {boolean}
* @private
*/
function matchQueryPart (obj, queryKey, queryValue, treatObjAsValue) {
const objValue = getDotValue(obj, queryKey)

@ -1,9 +1,3 @@
/**
* Handle every persistence-related task
* The interface Datastore expects to be implemented is
* * Persistence.loadDatabase(callback) and callback has signature err
* * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err
*/
const path = require('path')
const { callbackify, promisify, deprecate } = require('util')
const byline = require('./byline')
@ -12,12 +6,17 @@ const Index = require('./indexes.js')
const model = require('./model.js')
const storage = require('./storage.js')
/**
* Handle every persistence-related task
*/
class Persistence {
/**
* Create a new Persistence object for database options.db
* @param {Datastore} options.db
* @param {Number} [options.corruptAlertThreshold] Optional, threshold after which an alert is thrown if too much data is corrupt
* @param {string} [options.nodeWebkitAppName] Optional, specify the name of your NW app if you want options.filename to be relative to the directory where Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
* @param {function} [options.beforeDeserialization] Hook you can use to transform data after it was serialized and before it is written to disk.
* @param {function} [options.afterSerialization] Inverse of `afterSerialization`.
*/
constructor (options) {
this.db = options.db
@ -61,17 +60,12 @@ class Persistence {
}
}
/**
* @callback Persistence~persistCachedDatabaseCallback
* @param {?Error} err
*/
/**
* Persist cached database
* This serves as a compaction function since the cache always contains only the number of documents in the collection
* while the data file is append-only so it may grow larger
* This is an internal function, use compactDataFile which uses the executor
* @param {Persistence~persistCachedDatabaseCallback} callback Optional callback, signature: err
* @param {NoParamCallback} callback Optional callback, signature: err
*/
persistCachedDatabase (callback = () => {}) {
return callbackify(this.persistCachedDatabaseAsync.bind(this))(callback)
@ -108,14 +102,9 @@ class Persistence {
this.db.emit('compaction.done')
}
/**
* @callback Persistence~compactDataFileCallback
* @param {?Error} err
*/
/**
* Queue a rewrite of the datafile
* @param {Persistence~compactDataFileCallback} [callback = () => {}] Optional callback, signature: err
* @param {NoParamCallback} [callback = () => {}] Optional callback, signature: err
*/
compactDatafile (callback = () => {}) {
this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [callback] })
@ -151,16 +140,11 @@ class Persistence {
if (this.autocompactionIntervalId) clearInterval(this.autocompactionIntervalId)
}
/**
* @callback Persistence~persistNewStateCallback
* @param {?Error} err
*/
/**
* Persist new state for the given newDocs (can be insertion, update or removal)
* Use an append-only format
* @param {string[]} newDocs Can be empty if no doc was updated/removed
* @param {Persistence~persistNewStateCallback} [callback = () => {}] Optional, signature: err
* @param {NoParamCallback} [callback = () => {}] Optional, signature: err
*/
persistNewState (newDocs, callback = () => {}) {
callbackify(this.persistNewStateAsync.bind(this))(newDocs, err => callback(err))
@ -169,7 +153,7 @@ class Persistence {
/**
* Persist new state for the given newDocs (can be insertion, update or removal)
* Use an append-only format
* @param {string[]} newDocs Can be empty if no doc was updated/removed
* @param {document[]} newDocs Can be empty if no doc was updated/removed
* @return {Promise}
*/
async persistNewStateAsync (newDocs) {
@ -295,11 +279,6 @@ class Persistence {
return promisify(this.treatRawStream.bind(this))(rawStream)
}
/**
* @callback Persistence~loadDatabaseCallback
* @param {?Error} err
*/
/**
* Load the database
* 1) Create all indexes
@ -308,7 +287,7 @@ class Persistence {
* This means pulling data out of the data file or creating it if it doesn't exist
* Also, all data is persisted right away, which has the effect of compacting the database file
* This operation is very quick at startup for a big collection (60ms for ~10k docs)
* @param {Persistence~loadDatabaseCallback} callback Optional callback, signature: err
* @param {NoParamCallback} callback Optional callback, signature: err
*/
loadDatabase (callback = () => {}) {
callbackify(this.loadDatabaseAsync.bind(this))(err => callback(err))
@ -359,15 +338,10 @@ class Persistence {
this.db.executor.processBuffer()
}
/**
* @callback Persistence~ensureDirectoryExistsCallback
* @param {?Error} err
*/
/**
* Check if a directory stat and create it on the fly if it is not the case
* @param {string} dir
* @param {Persistence~ensureDirectoryExistsCallback} [callback = () => {}] optional callback, signature: err
* @param {NoParamCallback} [callback = () => {}] optional callback, signature: err
*/
static ensureDirectoryExists (dir, callback = () => {}) {
storage.mkdir(dir, { recursive: true }, err => { callback(err) })

@ -1,76 +1,212 @@
/**
* Way data is stored for this database
* For a Node.js/Node Webkit database it's the file system
* For a browser-side database it's localforage which chooses the best option depending on user browser (IndexedDB then WebSQL then localStorage)
* For a browser-side database it's localforage, which uses the best backend available (IndexedDB then WebSQL then localStorage)
* For a react-native database, we use @react-native-async-storage/async-storage
*
* This version is the Node.js/Node Webkit version
* It's essentially fs, mkdirp and crash safe write and read functions
* @module storage
*/
const fs = require('fs')
const fsPromises = fs.promises
const path = require('path')
const { callbackify, promisify } = require('util')
const storage = {}
const { Readable } = require('stream')
/**
* @callback Storage~existsCallback
* @callback module:storage~existsCallback
* @param {boolean} exists
*/
/**
* Callback returns true if file exists
* @param {string} file
* @param {Storage~existsCallback} cb
* @param {module:storage~existsCallback} cb
* @alias module:storage.exists
*/
// eslint-disable-next-line node/no-callback-literal
storage.exists = (file, cb) => fs.access(file, fs.constants.F_OK, (err) => { cb(!err) })
const exists = (file, cb) => fs.access(file, fs.constants.F_OK, (err) => { cb(!err) })
/**
* Returns Promise<true> if file exists
* @param {string} file
* @return {Promise<boolean>}
* @async
* @alias module:storage.existsAsync
*/
storage.existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false)
storage.rename = fs.rename
storage.renameAsync = fsPromises.rename
storage.writeFile = fs.writeFile
storage.writeFileAsync = fsPromises.writeFile
storage.writeFileStream = fs.createWriteStream
storage.unlink = fs.unlink
storage.unlinkAsync = fsPromises.unlink
storage.appendFile = fs.appendFile
storage.appendFileAsync = fsPromises.appendFile
storage.readFile = fs.readFile
storage.readFileAsync = fsPromises.readFile
storage.readFileStream = fs.createReadStream
storage.mkdir = fs.mkdir
storage.mkdirAsync = fsPromises.mkdir
const existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false)
/**
* @param {string} file
* Node.js' fs.rename
* @function
* @param {string} oldPath
* @param {string} newPath
* @param {NoParamCallback} c
* @return {void}
* @alias module:storage.rename
*/
const rename = fs.rename
/**
* Node.js' fs.promises.rename
* @function
* @param {string} oldPath
* @param {string} newPath
* @return {Promise<void>}
* @alias module:storage.renameAsync
* @async
*/
storage.ensureFileDoesntExistAsync = async file => {
if (await storage.existsAsync(file)) await storage.unlinkAsync(file)
}
const renameAsync = fsPromises.rename
/**
* Node.js' fs.writeFile
* @function
* @param {string} path
* @param {string} data
* @param {object} options
* @param {function} callback
* @alias module:storage.writeFile
*/
const writeFile = fs.writeFile
/**
* Node.js' fs.promises.writeFile
* @function
* @param {string} path
* @param {string} data
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storage.writeFileAsync
* @async
*/
const writeFileAsync = fsPromises.writeFile
/**
* Node.js' fs.createWriteStream
* @function
* @param {string} path
* @param {Object} [options]
* @return {fs.WriteStream}
* @alias module:storage.writeFileStream
*/
const writeFileStream = fs.createWriteStream
/**
* Node.js' fs.unlink
* @function
* @param {string} path
* @param {function} callback
* @alias module:storage.unlink
*/
const unlink = fs.unlink
/**
* Node.js' fs.promises.unlink
* @function
* @param {string} path
* @return {Promise<void>}
* @async
* @alias module:storage.unlinkAsync
*/
const unlinkAsync = fsPromises.unlink
/**
* Node.js' fs.appendFile
* @function
* @param {string} path
* @param {string} data
* @param {object} options
* @param {function} callback
* @alias module:storage.appendFile
*/
const appendFile = fs.appendFile
/**
* Node.js' fs.promises.appendFile
* @function
* @param {string} path
* @param {string} data
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storage.appendFileAsync
* @async
*/
const appendFileAsync = fsPromises.appendFile
/**
* Node.js' fs.readFile
* @function
* @param {string} path
* @param {object} options
* @param {function} callback
* @alias module:storage.readFile
*/
const readFile = fs.readFile
/**
* Node.js' fs.promises.readFile
* @function
* @param {string} path
* @param {object} [options]
* @return {Promise<Buffer>}
* @alias module:storage.readFileAsync
* @async
*/
const readFileAsync = fsPromises.readFile
/**
* Node.js' fs.createReadStream
* @function
* @param {string} path
* @param {Object} [options]
* @return {fs.ReadStream}
* @alias module:storage.readFileStream
*/
const readFileStream = fs.createReadStream
/**
* Node.js' fs.mkdir
* @function
* @param {string} path
* @param {object} options
* @param {function} callback
* @alias module:storage.mkdir
*/
const mkdir = fs.mkdir
/**
* Node.js' fs.promises.mkdir
* @function
* @param {string} path
* @param {object} options
* @return {Promise<void|string>}
* @alias module:storage.mkdirAsync
* @async
*/
const mkdirAsync = fsPromises.mkdir
/**
* @callback Storage~errorCallback
* @param {?Error} err
* @param {string} file
* @return {Promise<void>}
* @alias module:storage.ensureFileDoesntExistAsync
* @async
*/
const ensureFileDoesntExistAsync = async file => {
if (await existsAsync(file)) await unlinkAsync(file)
}
/**
* @param {string} file
* @param {Storage~errorCallback} callback
* @param {NoParamCallback} callback
* @alias module:storage.ensureFileDoesntExist
*/
storage.ensureFileDoesntExist = (file, callback) => callbackify(storage.ensureFileDoesntExistAsync)(file, err => callback(err))
const ensureFileDoesntExist = (file, callback) => callbackify(ensureFileDoesntExistAsync)(file, err => callback(err))
/**
* Flush data in OS buffer to storage if corresponding option is set
* @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested
* @param {string} [options.filename]
* @param {boolean} [options.isDir = false] Optional, defaults to false
* @param {Storage~errorCallback} callback
* @param {NoParamCallback} callback
* @alias module:storage.flushToStorage
*/
storage.flushToStorage = (options, callback) => callbackify(storage.flushToStorageAsync)(options, callback)
const flushToStorage = (options, callback) => callbackify(flushToStorageAsync)(options, callback)
/**
* Flush data in OS buffer to storage if corresponding option is set
@ -78,8 +214,10 @@ storage.flushToStorage = (options, callback) => callbackify(storage.flushToStora
* @param {string} [options.filename]
* @param {boolean} [options.isDir = false] Optional, defaults to false
* @return {Promise<void>}
* @alias module:storage.flushToStorageAsync
* @async
*/
storage.flushToStorageAsync = async (options) => {
const flushToStorageAsync = async (options) => {
let filename
let flags
if (typeof options === 'string') {
@ -131,11 +269,12 @@ storage.flushToStorageAsync = async (options) => {
* Fully write or rewrite the datafile
* @param {string} filename
* @param {string[]} lines
* @param {Storage~errorCallback} callback
* @param {NoParamCallback} callback
* @alias module:storage.writeFileLines
*/
storage.writeFileLines = (filename, lines, callback = () => {}) => {
const writeFileLines = (filename, lines, callback = () => {}) => {
try {
const stream = storage.writeFileStream(filename)
const stream = writeFileStream(filename)
const readable = Readable.from(lines)
readable.on('data', (line) => {
try {
@ -158,67 +297,109 @@ storage.writeFileLines = (filename, lines, callback = () => {}) => {
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
* @alias module:storage.writeFileLinesAsync
* @async
*/
storage.writeFileLinesAsync = (filename, lines) => promisify(storage.writeFileLines)(filename, lines)
const writeFileLinesAsync = (filename, lines) => promisify(writeFileLines)(filename, lines)
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {string} filename
* @param {string[]} lines
* @param {Storage~errorCallback} callback Optional callback, signature: err
* @param {NoParamCallback} [callback] Optional callback, signature: err
* @alias module:storage.crashSafeWriteFileLines
*/
storage.crashSafeWriteFileLines = (filename, lines, callback = () => {}) => {
callbackify(storage.crashSafeWriteFileLinesAsync)(filename, lines, callback)
const crashSafeWriteFileLines = (filename, lines, callback = () => {}) => {
callbackify(crashSafeWriteFileLinesAsync)(filename, lines, callback)
}
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost)
* @param {string} filename
* @param {string[]} lines
* @return {Promise<void>}
* @alias module:storage.crashSafeWriteFileLinesAsync
*/
storage.crashSafeWriteFileLinesAsync = async (filename, lines) => {
const crashSafeWriteFileLinesAsync = async (filename, lines) => {
const tempFilename = filename + '~'
await storage.flushToStorageAsync({ filename: path.dirname(filename), isDir: true })
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true })
const exists = await storage.existsAsync(filename)
if (exists) await storage.flushToStorageAsync({ filename })
const exists = await existsAsync(filename)
if (exists) await flushToStorageAsync({ filename })
await storage.writeFileLinesAsync(tempFilename, lines)
await writeFileLinesAsync(tempFilename, lines)
await storage.flushToStorageAsync(tempFilename)
await flushToStorageAsync(tempFilename)
await storage.renameAsync(tempFilename, filename)
await renameAsync(tempFilename, filename)
await storage.flushToStorageAsync({ filename: path.dirname(filename), isDir: true })
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true })
}
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* @param {string} filename
* @param {Storage~errorCallback} callback signature: err
* @param {NoParamCallback} callback signature: err
* @alias module:storage.ensureDatafileIntegrity
*/
storage.ensureDatafileIntegrity = (filename, callback) => callbackify(storage.ensureDatafileIntegrityAsync)(filename, callback)
const ensureDatafileIntegrity = (filename, callback) => callbackify(ensureDatafileIntegrityAsync)(filename, callback)
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write
* @param {string} filename
* @return {Promise<void>}
* @alias module:storage.ensureDatafileIntegrityAsync
*/
storage.ensureDatafileIntegrityAsync = async filename => {
const ensureDatafileIntegrityAsync = async filename => {
const tempFilename = filename + '~'
const filenameExists = await storage.existsAsync(filename)
const filenameExists = await existsAsync(filename)
// Write was successful
if (filenameExists) return
const oldFilenameExists = await storage.existsAsync(tempFilename)
const oldFilenameExists = await existsAsync(tempFilename)
// New database
if (!oldFilenameExists) await storage.writeFileAsync(filename, '', 'utf8')
if (!oldFilenameExists) await writeFileAsync(filename, '', 'utf8')
// Write failed, use old version
else await storage.renameAsync(tempFilename, filename)
else await renameAsync(tempFilename, filename)
}
// Interface
module.exports = storage
module.exports.exists = exists
module.exports.existsAsync = existsAsync
module.exports.rename = rename
module.exports.renameAsync = renameAsync
module.exports.writeFile = writeFile
module.exports.writeFileAsync = writeFileAsync
module.exports.writeFileLines = writeFileLines
module.exports.writeFileLinesAsync = writeFileLinesAsync
module.exports.crashSafeWriteFileLines = crashSafeWriteFileLines
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync
module.exports.appendFile = appendFile
module.exports.appendFileAsync = appendFileAsync
module.exports.readFile = readFile
module.exports.readFileAsync = readFileAsync
module.exports.unlink = unlink
module.exports.unlinkAsync = unlinkAsync
module.exports.mkdir = mkdir
module.exports.mkdirAsync = mkdirAsync
module.exports.readFileStream = writeFileStream
module.exports.readFileStream = readFileStream
module.exports.flushToStorage = flushToStorage
module.exports.flushToStorageAsync = flushToStorageAsync
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync
module.exports.ensureFileDoesntExist = ensureFileDoesntExist
module.exports.ensureFileDoesntExistAsync = ensureFileDoesntExistAsync

@ -1,3 +1,10 @@
/**
* Utility functions for all environments.
* This replaces the underscore dependency.
*
* @module utils
*/
/**
* Produces a duplicate-free version of the array, using === to test object equality. In particular only the first
* occurrence of each value is kept. If you want to compute unique items based on a transformation, pass an iteratee
@ -7,6 +14,7 @@
* @param {function} [iteratee] transformation applied to every element before checking for duplicates. This will not
* transform the items in the result.
* @return {Array}
* @alias module:utils.uniq
*/
const uniq = (array, iteratee) => {
if (iteratee) return [...(new Map(array.map(x => [iteratee(x), x]))).values()]
@ -26,6 +34,7 @@ const isObject = arg => typeof arg === 'object' && arg !== null
* Heavily inspired by https://underscorejs.org/#isDate
* @param {*} d
* @return {boolean}
* @alias module:utils.isDate
*/
const isDate = d => isObject(d) && Object.prototype.toString.call(d) === '[object Date]'
@ -34,6 +43,7 @@ const isDate = d => isObject(d) && Object.prototype.toString.call(d) === '[objec
* Heavily inspired by https://underscorejs.org/#isRegExp
* @param {*} re
* @return {boolean}
* @alias module:utils.isRegExp
*/
const isRegExp = re => isObject(re) && Object.prototype.toString.call(re) === '[object RegExp]'

326
package-lock.json generated

@ -21,7 +21,6 @@
"events": "^3.3.0",
"jest": "^27.3.1",
"jquery": "^3.6.0",
"jsdoc": "^3.6.7",
"karma": "^6.3.2",
"karma-chai": "^0.1.0",
"karma-chrome-launcher": "^3.1.0",
@ -4434,12 +4433,6 @@
"node": ">=8"
}
},
"node_modules/bluebird": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
"integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==",
"dev": true
},
"node_modules/body-parser": {
"version": "1.19.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.1.tgz",
@ -4712,18 +4705,6 @@
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/catharsis": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz",
"integrity": "sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==",
"dev": true,
"dependencies": {
"lodash": "^4.17.15"
},
"engines": {
"node": ">= 10"
}
},
"node_modules/chai": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz",
@ -5738,12 +5719,6 @@
"integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=",
"dev": true
},
"node_modules/entities": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/entities/-/entities-2.0.3.tgz",
"integrity": "sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ==",
"dev": true
},
"node_modules/envinfo": {
"version": "7.8.1",
"resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz",
@ -9105,15 +9080,6 @@
"js-yaml": "bin/js-yaml.js"
}
},
"node_modules/js2xmlparser": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.2.tgz",
"integrity": "sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA==",
"dev": true,
"dependencies": {
"xmlcreate": "^2.0.4"
}
},
"node_modules/jsc-android": {
"version": "250230.2.1",
"resolved": "https://registry.npmjs.org/jsc-android/-/jsc-android-250230.2.1.tgz",
@ -9306,64 +9272,6 @@
"signal-exit": "^3.0.2"
}
},
"node_modules/jsdoc": {
"version": "3.6.7",
"resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-3.6.7.tgz",
"integrity": "sha512-sxKt7h0vzCd+3Y81Ey2qinupL6DpRSZJclS04ugHDNmRUXGzqicMJ6iwayhSA0S0DwwX30c5ozyUthr1QKF6uw==",
"dev": true,
"dependencies": {
"@babel/parser": "^7.9.4",
"bluebird": "^3.7.2",
"catharsis": "^0.9.0",
"escape-string-regexp": "^2.0.0",
"js2xmlparser": "^4.0.1",
"klaw": "^3.0.0",
"markdown-it": "^10.0.0",
"markdown-it-anchor": "^5.2.7",
"marked": "^2.0.3",
"mkdirp": "^1.0.4",
"requizzle": "^0.2.3",
"strip-json-comments": "^3.1.0",
"taffydb": "2.6.2",
"underscore": "~1.13.1"
},
"bin": {
"jsdoc": "jsdoc.js"
},
"engines": {
"node": ">=8.15.0"
}
},
"node_modules/jsdoc/node_modules/escape-string-regexp": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
"integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
"dev": true,
"engines": {
"node": ">=8"
}
},
"node_modules/jsdoc/node_modules/klaw": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz",
"integrity": "sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==",
"dev": true,
"dependencies": {
"graceful-fs": "^4.1.9"
}
},
"node_modules/jsdoc/node_modules/mkdirp": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"dev": true,
"bin": {
"mkdirp": "bin/cmd.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/jsdom": {
"version": "16.7.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz",
@ -9647,15 +9555,6 @@
"immediate": "~3.0.5"
}
},
"node_modules/linkify-it": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz",
"integrity": "sha512-GnAl/knGn+i1U/wjBz3akz2stz+HrHLsxMwHQGofCDfPvlf+gDKN58UtfmUquTY4/MXeE2x7k19KQmeoZi94Iw==",
"dev": true,
"dependencies": {
"uc.micro": "^1.0.1"
}
},
"node_modules/load-json-file": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz",
@ -9955,43 +9854,6 @@
"node": ">=0.10.0"
}
},
"node_modules/markdown-it": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-10.0.0.tgz",
"integrity": "sha512-YWOP1j7UbDNz+TumYP1kpwnP0aEa711cJjrAQrzd0UXlbJfc5aAq0F/PZHjiioqDC1NKgvIMX+o+9Bk7yuM2dg==",
"dev": true,
"dependencies": {
"argparse": "^1.0.7",
"entities": "~2.0.0",
"linkify-it": "^2.0.0",
"mdurl": "^1.0.1",
"uc.micro": "^1.0.5"
},
"bin": {
"markdown-it": "bin/markdown-it.js"
}
},
"node_modules/markdown-it-anchor": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-5.3.0.tgz",
"integrity": "sha512-/V1MnLL/rgJ3jkMWo84UR+K+jF1cxNG1a+KwqeXqTIJ+jtA8aWSHuigx8lTzauiIjBDbwF3NcWQMotd0Dm39jA==",
"dev": true,
"peerDependencies": {
"markdown-it": "*"
}
},
"node_modules/marked": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/marked/-/marked-2.1.3.tgz",
"integrity": "sha512-/Q+7MGzaETqifOMWYEA7HVMaZb4XbcRfaOzcSsHZEith83KGlvaSG33u0SKu89Mj5h+T8V2hM+8O45Qc5XTgwA==",
"dev": true,
"bin": {
"marked": "bin/marked"
},
"engines": {
"node": ">= 10"
}
},
"node_modules/md5": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
@ -10003,12 +9865,6 @@
"is-buffer": "~1.1.6"
}
},
"node_modules/mdurl": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz",
"integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=",
"dev": true
},
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@ -13082,15 +12938,6 @@
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=",
"dev": true
},
"node_modules/requizzle": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.3.tgz",
"integrity": "sha512-YanoyJjykPxGHii0fZP0uUPEXpvqfBDxWV7s6GKAiiOsiqhX6vHNyW3Qzdmqp/iq/ExbhaGbVrjB4ruEVSM4GQ==",
"dev": true,
"dependencies": {
"lodash": "^4.17.14"
}
},
"node_modules/resolve": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz",
@ -14814,12 +14661,6 @@
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
"node_modules/taffydb": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/taffydb/-/taffydb-2.6.2.tgz",
"integrity": "sha1-fLy2S1oUG2ou/CxdLGe04VCyomg=",
"dev": true
},
"node_modules/tapable": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz",
@ -15338,12 +15179,6 @@
"node": "*"
}
},
"node_modules/uc.micro": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz",
"integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==",
"dev": true
},
"node_modules/uglify-es": {
"version": "3.3.9",
"resolved": "https://registry.npmjs.org/uglify-es/-/uglify-es-3.3.9.tgz",
@ -15391,12 +15226,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/underscore": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.2.tgz",
"integrity": "sha512-ekY1NhRzq0B08g4bGuX4wd2jZx5GnKz6mKSqFL4nqBlfyMGiG10gDFhDTMEfYmDL6Jy0FUIZp7wiRB+0BP7J2g==",
"dev": true
},
"node_modules/unicode-canonical-property-names-ecmascript": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz",
@ -16083,12 +15912,6 @@
"integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
"dev": true
},
"node_modules/xmlcreate": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.4.tgz",
"integrity": "sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg==",
"dev": true
},
"node_modules/xmldoc": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/xmldoc/-/xmldoc-1.1.2.tgz",
@ -19647,12 +19470,6 @@
"integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
"dev": true
},
"bluebird": {
"version": "3.7.2",
"resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz",
"integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==",
"dev": true
},
"body-parser": {
"version": "1.19.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.1.tgz",
@ -19876,15 +19693,6 @@
"rsvp": "^4.8.4"
}
},
"catharsis": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz",
"integrity": "sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==",
"dev": true,
"requires": {
"lodash": "^4.17.15"
}
},
"chai": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz",
@ -20696,12 +20504,6 @@
"integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=",
"dev": true
},
"entities": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/entities/-/entities-2.0.3.tgz",
"integrity": "sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ==",
"dev": true
},
"envinfo": {
"version": "7.8.1",
"resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz",
@ -23247,15 +23049,6 @@
"esprima": "^4.0.0"
}
},
"js2xmlparser": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.2.tgz",
"integrity": "sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA==",
"dev": true,
"requires": {
"xmlcreate": "^2.0.4"
}
},
"jsc-android": {
"version": "250230.2.1",
"resolved": "https://registry.npmjs.org/jsc-android/-/jsc-android-250230.2.1.tgz",
@ -23423,51 +23216,6 @@
}
}
},
"jsdoc": {
"version": "3.6.7",
"resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-3.6.7.tgz",
"integrity": "sha512-sxKt7h0vzCd+3Y81Ey2qinupL6DpRSZJclS04ugHDNmRUXGzqicMJ6iwayhSA0S0DwwX30c5ozyUthr1QKF6uw==",
"dev": true,
"requires": {
"@babel/parser": "^7.9.4",
"bluebird": "^3.7.2",
"catharsis": "^0.9.0",
"escape-string-regexp": "^2.0.0",
"js2xmlparser": "^4.0.1",
"klaw": "^3.0.0",
"markdown-it": "^10.0.0",
"markdown-it-anchor": "^5.2.7",
"marked": "^2.0.3",
"mkdirp": "^1.0.4",
"requizzle": "^0.2.3",
"strip-json-comments": "^3.1.0",
"taffydb": "2.6.2",
"underscore": "~1.13.1"
},
"dependencies": {
"escape-string-regexp": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
"integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
"dev": true
},
"klaw": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz",
"integrity": "sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==",
"dev": true,
"requires": {
"graceful-fs": "^4.1.9"
}
},
"mkdirp": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
"integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
"dev": true
}
}
},
"jsdom": {
"version": "16.7.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz",
@ -23694,15 +23442,6 @@
"immediate": "~3.0.5"
}
},
"linkify-it": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz",
"integrity": "sha512-GnAl/knGn+i1U/wjBz3akz2stz+HrHLsxMwHQGofCDfPvlf+gDKN58UtfmUquTY4/MXeE2x7k19KQmeoZi94Iw==",
"dev": true,
"requires": {
"uc.micro": "^1.0.1"
}
},
"load-json-file": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz",
@ -23951,32 +23690,6 @@
"object-visit": "^1.0.0"
}
},
"markdown-it": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-10.0.0.tgz",
"integrity": "sha512-YWOP1j7UbDNz+TumYP1kpwnP0aEa711cJjrAQrzd0UXlbJfc5aAq0F/PZHjiioqDC1NKgvIMX+o+9Bk7yuM2dg==",
"dev": true,
"requires": {
"argparse": "^1.0.7",
"entities": "~2.0.0",
"linkify-it": "^2.0.0",
"mdurl": "^1.0.1",
"uc.micro": "^1.0.5"
}
},
"markdown-it-anchor": {
"version": "5.3.0",
"resolved": "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-5.3.0.tgz",
"integrity": "sha512-/V1MnLL/rgJ3jkMWo84UR+K+jF1cxNG1a+KwqeXqTIJ+jtA8aWSHuigx8lTzauiIjBDbwF3NcWQMotd0Dm39jA==",
"dev": true,
"requires": {}
},
"marked": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/marked/-/marked-2.1.3.tgz",
"integrity": "sha512-/Q+7MGzaETqifOMWYEA7HVMaZb4XbcRfaOzcSsHZEith83KGlvaSG33u0SKu89Mj5h+T8V2hM+8O45Qc5XTgwA==",
"dev": true
},
"md5": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz",
@ -23988,12 +23701,6 @@
"is-buffer": "~1.1.6"
}
},
"mdurl": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz",
"integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=",
"dev": true
},
"media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@ -26509,15 +26216,6 @@
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=",
"dev": true
},
"requizzle": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.3.tgz",
"integrity": "sha512-YanoyJjykPxGHii0fZP0uUPEXpvqfBDxWV7s6GKAiiOsiqhX6vHNyW3Qzdmqp/iq/ExbhaGbVrjB4ruEVSM4GQ==",
"dev": true,
"requires": {
"lodash": "^4.17.14"
}
},
"resolve": {
"version": "1.20.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz",
@ -27906,12 +27604,6 @@
}
}
},
"taffydb": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/taffydb/-/taffydb-2.6.2.tgz",
"integrity": "sha1-fLy2S1oUG2ou/CxdLGe04VCyomg=",
"dev": true
},
"tapable": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz",
@ -28258,12 +27950,6 @@
"integrity": "sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ==",
"dev": true
},
"uc.micro": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz",
"integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==",
"dev": true
},
"uglify-es": {
"version": "3.3.9",
"resolved": "https://registry.npmjs.org/uglify-es/-/uglify-es-3.3.9.tgz",
@ -28303,12 +27989,6 @@
"which-boxed-primitive": "^1.0.2"
}
},
"underscore": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.2.tgz",
"integrity": "sha512-ekY1NhRzq0B08g4bGuX4wd2jZx5GnKz6mKSqFL4nqBlfyMGiG10gDFhDTMEfYmDL6Jy0FUIZp7wiRB+0BP7J2g==",
"dev": true
},
"unicode-canonical-property-names-ecmascript": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz",
@ -28840,12 +28520,6 @@
"integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
"dev": true
},
"xmlcreate": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.4.tgz",
"integrity": "sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg==",
"dev": true
},
"xmldoc": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/xmldoc/-/xmldoc-1.1.2.tgz",

@ -53,7 +53,6 @@
"events": "^3.3.0",
"jest": "^27.3.1",
"jquery": "^3.6.0",
"jsdoc": "^3.6.7",
"karma": "^6.3.2",
"karma-chai": "^0.1.0",
"karma-chrome-launcher": "^3.1.0",
@ -85,8 +84,7 @@
"test:browser": "xvfb-maybe karma start karma.conf.local.js",
"test:react-native": "jest test/react-native",
"test:typings": "ts-node ./typings-tests.ts",
"prepublishOnly": "npm run build:browser",
"generateDocs": "jsdoc -c jsdoc.conf.js"
"prepublishOnly": "npm run build:browser"
},
"main": "index.js",
"browser": {

Loading…
Cancel
Save