Move to ESM and build a commonjs version

feat/remove-native-modules-imports
Timothée Rebours 10 months ago
parent 08c8076ae9
commit 792eecd1a1
  1. 2
      .gitignore
  2. 4
      __mocks__/@react-native-async-storage/async-storage.js
  3. 1
      browser-version/lib/byline.js
  4. 3
      index.js
  5. 2
      jsdoc.conf.js
  6. 4
      karma.conf.local.js
  7. 6
      karma.conf.template.js
  8. 8283
      package-lock.json
  9. 31
      package.json
  10. 1
      src/browser/byline.js
  11. 5
      src/browser/customUtils.js
  12. 34
      src/browser/storage.browser.js
  13. 44
      src/browser/storage.react-native.js
  14. 8
      src/byline.js
  15. 31
      src/cursor.js
  16. 15
      src/customUtils.js
  17. 50
      src/datastore.js
  18. 4
      src/executor.js
  19. 36
      src/indexes.js
  20. 66
      src/model.js
  21. 77
      src/persistence.js
  22. 77
      src/storage.js
  23. 10
      src/utils.js
  24. 2
      src/waterfall.js
  25. 48
      test/byline.test.js
  26. 13
      test/cursor.async.test.js
  27. 15
      test/cursor.test.js
  28. 14
      test/customUtil.test.js
  29. 16
      test/db.async.test.js
  30. 16
      test/db.test.js
  31. 11
      test/executor.async.test.js
  32. 13
      test/executor.test.js
  33. 7
      test/fsUtils.test.js
  34. 4
      test/indexes.test.js
  35. 82
      test/model.test.js
  36. 64
      test/persistence.async.test.js
  37. 53
      test/persistence.test.js
  38. 6
      test/react-native/persistence.test.js
  39. 4
      test/react-native/resolver.js
  40. 33
      test/utils.test.js
  41. 2
      test_lac/loadAndCrash.test.cjs
  42. 5
      test_lac/openFds.test.js
  43. 25
      webpack.config.js

2
.gitignore vendored

@ -24,6 +24,6 @@ browser-version/node_modules
*~ *~
*.swo *.swo
browser-version/out
test-results test-results
typings-tests.js typings-tests.js
cjs/*

@ -1 +1,3 @@
module.exports.default = require('@react-native-async-storage/async-storage/jest/async-storage-mock') import generated from '@react-native-async-storage/async-storage/jest/async-storage-mock'
export default generated

@ -1 +0,0 @@
module.exports = {}

@ -1,3 +0,0 @@
const Datastore = require('./lib/datastore')
module.exports = Datastore

@ -1,5 +1,5 @@
'use strict' 'use strict'
module.exports = { export default {
plugins: ['plugins/markdown'] plugins: ['plugins/markdown']
} }

@ -1,8 +1,8 @@
'use strict' 'use strict'
const template = require('./karma.conf.template.js') import template from './karma.conf.template.js'
module.exports = function (config) { export default function (config) {
const localBrowser = { const localBrowser = {
ChromeHeadlessNoSandbox: { ChromeHeadlessNoSandbox: {
base: 'ChromeHeadless', base: 'ChromeHeadless',

@ -1,6 +1,6 @@
'use strict' 'use strict'
module.exports = (config) => ({ export default (config) => ({
// Increase timeout in case connection in CI is slow // Increase timeout in case connection in CI is slow
captureTimeout: 120000, captureTimeout: 120000,
browserNoActivityTimeout: 300000, browserNoActivityTimeout: 300000,
@ -14,8 +14,8 @@ module.exports = (config) => ({
// list of files / patterns to load in the browser // list of files / patterns to load in the browser
files: [ files: [
'node_modules/localforage/dist/localforage.min.js', 'node_modules/localforage/dist/localforage.min.js',
'browser-version/out/testutils.min.js', 'testutils.min.js',
'browser-version/out/nedb.min.js', 'nedb.min.js',
'test/browser/nedb-browser.spec.js', 'test/browser/nedb-browser.spec.js',
'test/browser/load.spec.js' 'test/browser/load.spec.js'
], ],

8283
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -7,6 +7,7 @@
"index.js", "index.js",
"index.d.ts" "index.d.ts"
], ],
"type": "module",
"types": "index.d.ts", "types": "index.d.ts",
"author": { "author": {
"name": "Timothée Rebours", "name": "Timothée Rebours",
@ -53,8 +54,6 @@
"@types/jest": "^27.5.2", "@types/jest": "^27.5.2",
"browser-resolve": "^2.0.0", "browser-resolve": "^2.0.0",
"chai": "^4.3.7", "chai": "^4.3.7",
"commander": "^7.2.0",
"events": "^3.3.0",
"jest": "^27.5.1", "jest": "^27.5.1",
"jsdoc-to-markdown": "^8.0.0", "jsdoc-to-markdown": "^8.0.0",
"karma": "^6.4.1", "karma": "^6.4.1",
@ -68,7 +67,8 @@
"path-browserify": "^1.0.1", "path-browserify": "^1.0.1",
"process": "^0.11.10", "process": "^0.11.10",
"react": "^18.2.0", "react": "^18.2.0",
"react-native": "^0.71.2", "react-native": "^0.73.2",
"rollup": "^4.9.5",
"semver": "^7.3.8", "semver": "^7.3.8",
"source-map-loader": "^4.0.1", "source-map-loader": "^4.0.1",
"standard": "^17.0.0", "standard": "^17.0.0",
@ -83,7 +83,9 @@
}, },
"scripts": { "scripts": {
"lint": "standard", "lint": "standard",
"pretest": "npm run build:cjs",
"test": "mocha --reporter spec --timeout 10000", "test": "mocha --reporter spec --timeout 10000",
"build:cjs": "rollup --preserveModules src/* src/browser/* --dir cjs --entryFileNames \"[name].cjs\" --format cjs --preserveEntrySignatures strict",
"build:browser": "webpack --config-name Nedb && webpack --config-name Nedb --env minimize && webpack --config-name testUtils --env minimize", "build:browser": "webpack --config-name Nedb && webpack --config-name Nedb --env minimize && webpack --config-name testUtils --env minimize",
"pretest:browser": "npm run build:browser", "pretest:browser": "npm run build:browser",
"test:browser": "xvfb-maybe karma start karma.conf.local.js", "test:browser": "xvfb-maybe karma start karma.conf.local.js",
@ -92,16 +94,23 @@
"prepublishOnly": "npm run build:browser", "prepublishOnly": "npm run build:browser",
"generateDocs:markdown": "jsdoc2md --no-cache -c jsdoc.conf.js --param-list-format list --files ./lib/*.js > API.md" "generateDocs:markdown": "jsdoc2md --no-cache -c jsdoc.conf.js --param-list-format list --files ./lib/*.js > API.md"
}, },
"main": "index.js", "main": "cjs/datastore.cjs",
"module": "src/datastore.js",
"browser": { "browser": {
"./lib/customUtils.js": "./browser-version/lib/customUtils.js", "./src/customUtils.js": "./src/browser/customUtils.js",
"./lib/storage.js": "./browser-version/lib/storage.browser.js", "./src/storage.js": "./src/browser/storage.browser.js",
"./lib/byline.js": "./browser-version/lib/byline.js" "./src/byline.js": "./src/browser/byline.js",
"./cjs/customUtils.cjs": "./src/browser/customUtils.cjs",
"./cjs/storage.cjs": "./src/browser/storage.browser.cjs",
"./cjs/byline.cjs": "./src/browser/byline.cjs"
}, },
"react-native": { "react-native": {
"./lib/customUtils.js": "./browser-version/lib/customUtils.js", "./src/customUtils.js": "./src/browser/customUtils.js",
"./lib/storage.js": "./browser-version/lib/storage.react-native.js", "./src/storage.js": "./src/browser/storage.react-native.js",
"./lib/byline.js": "./browser-version/lib/byline.js" "./src/byline.js": "./src/browser/byline.js",
"./cjs/customUtils.cjs": "./cjs/browser/customUtils.cjs",
"./cjs/storage.cjs": "./cjs/browser/storage.react-native.cjs",
"./cjs/byline.cjs": "./cjs/browser/byline.cjs"
}, },
"license": "MIT", "license": "MIT",
"publishConfig": { "publishConfig": {
@ -109,7 +118,7 @@
}, },
"standard": { "standard": {
"ignore": [ "ignore": [
"browser-version/out", "cjs",
"**/*.ts" "**/*.ts"
] ]
}, },

@ -0,0 +1 @@
export default {}

@ -73,7 +73,7 @@ const byteArrayToBase64 = uint8 => {
*/ */
const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len) const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len)
module.exports.uid = uid export { uid }
// Copyright Joyent, Inc. and other Node contributors. // Copyright Joyent, Inc. and other Node contributors.
// //
@ -162,4 +162,5 @@ function callbackify (original) {
getOwnPropertyDescriptors(original)) getOwnPropertyDescriptors(original))
return callbackified return callbackified
} }
module.exports.callbackify = callbackify
export { callbackify }

@ -8,7 +8,7 @@
* @private * @private
*/ */
const localforage = require('localforage') import localforage from 'localforage'
// Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name // Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name
const store = localforage.createInstance({ const store = localforage.createInstance({
@ -169,23 +169,17 @@ const crashSafeWriteFileLinesAsync = async (filename, lines) => {
await writeFileAsync(filename, lines.join('\n')) await writeFileAsync(filename, lines.join('\n'))
} }
// Interface const readFileStream = false
module.exports.existsAsync = existsAsync
module.exports.renameAsync = renameAsync
module.exports.writeFileAsync = writeFileAsync
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync
module.exports.appendFileAsync = appendFileAsync
module.exports.readFileAsync = readFileAsync
module.exports.unlinkAsync = unlinkAsync // Interface
export {
module.exports.mkdirAsync = mkdirAsync appendFileAsync,
crashSafeWriteFileLinesAsync,
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync ensureDatafileIntegrityAsync,
ensureParentDirectoryExistsAsync,
module.exports.ensureParentDirectoryExistsAsync = ensureParentDirectoryExistsAsync existsAsync,
readFileAsync,
readFileStream,
unlinkAsync,
mkdirAsync,
}

@ -8,8 +8,8 @@
* @private * @private
*/ */
const AsyncStorage = require('@react-native-async-storage/async-storage').default import AsyncStorage from '@react-native-async-storage/async-storage'
const { callbackify } = require('./customUtils') import { callbackify } from '../customUtils'
/** /**
* Async version of {@link module:storageReactNative.exists}. * Async version of {@link module:storageReactNative.exists}.
@ -264,32 +264,16 @@ const crashSafeWriteFileLines = callbackify(crashSafeWriteFileLinesAsync)
*/ */
const ensureParentDirectoryExistsAsync = async (file, mode) => Promise.resolve() const ensureParentDirectoryExistsAsync = async (file, mode) => Promise.resolve()
// Interface const readFileStream = false
module.exports.exists = exists
module.exports.existsAsync = existsAsync
module.exports.rename = rename export {
module.exports.renameAsync = renameAsync appendFileAsync,
crashSafeWriteFileLinesAsync,
module.exports.writeFile = writeFile ensureDatafileIntegrityAsync,
module.exports.writeFileAsync = writeFileAsync ensureParentDirectoryExistsAsync,
existsAsync,
module.exports.crashSafeWriteFileLines = crashSafeWriteFileLines readFileAsync,
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync readFileStream,
unlinkAsync,
module.exports.appendFile = appendFile mkdirAsync,
module.exports.appendFileAsync = appendFileAsync }
module.exports.readFile = readFile
module.exports.readFileAsync = readFileAsync
module.exports.unlink = unlink
module.exports.unlinkAsync = unlinkAsync
module.exports.mkdir = mkdir
module.exports.mkdirAsync = mkdirAsync
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync
module.exports.ensureParentDirectoryExistsAsync = ensureParentDirectoryExistsAsync

@ -23,9 +23,9 @@
* @module byline * @module byline
* @private * @private
*/ */
const stream = require('stream') import stream from 'node:stream'
const timers = require('timers') import timers from 'node:timers'
const { Buffer } = require('buffer') import { Buffer } from 'node:buffer'
const createLineStream = (readStream, options) => { const createLineStream = (readStream, options) => {
if (!readStream) throw new Error('expected readStream') if (!readStream) throw new Error('expected readStream')
@ -115,4 +115,4 @@ class LineStream extends stream.Transform {
} }
} }
module.exports = createLineStream export default createLineStream

@ -1,5 +1,5 @@
const model = require('./model.js') import { getDotValue, modify, match, compareThings } from './model.js'
const { callbackify } = require('./customUtils.js') import { callbackify } from './customUtils.js'
/** /**
* Has a callback * Has a callback
@ -127,30 +127,30 @@ class Cursor {
// Check for consistency // Check for consistency
const keys = Object.keys(this._projection) const keys = Object.keys(this._projection)
keys.forEach(k => { for (const k of keys) {
if (action !== undefined && this._projection[k] !== action) throw new Error('Can\'t both keep and omit fields except for _id') if (action !== undefined && this._projection[k] !== action) throw new Error('Can\'t both keep and omit fields except for _id')
action = this._projection[k] action = this._projection[k]
}) }
// Do the actual projection // Do the actual projection
candidates.forEach(candidate => { for (const candidate of candidates) {
let toPush let toPush
if (action === 1) { // pick-type projection if (action === 1) { // pick-type projection
toPush = { $set: {} } toPush = { $set: {} }
keys.forEach(k => { for (const k of keys) {
toPush.$set[k] = model.getDotValue(candidate, k) toPush.$set[k] = getDotValue(candidate, k)
if (toPush.$set[k] === undefined) delete toPush.$set[k] if (toPush.$set[k] === undefined) delete toPush.$set[k]
}) }
toPush = model.modify({}, toPush) toPush = modify({}, toPush)
} else { // omit-type projection } else { // omit-type projection
toPush = { $unset: {} } toPush = { $unset: {} }
keys.forEach(k => { toPush.$unset[k] = true }) for (const k of keys) { toPush.$unset[k] = true }
toPush = model.modify(candidate, toPush) toPush = modify(candidate, toPush)
} }
if (keepId) toPush._id = candidate._id if (keepId) toPush._id = candidate._id
else delete toPush._id else delete toPush._id
res.push(toPush) res.push(toPush)
}) }
return res return res
} }
@ -168,9 +168,8 @@ class Cursor {
let skipped = 0 let skipped = 0
const candidates = await this.db._getCandidatesAsync(this.query) const candidates = await this.db._getCandidatesAsync(this.query)
for (const candidate of candidates) { for (const candidate of candidates) {
if (model.match(candidate, this.query)) { if (match(candidate, this.query)) {
// If a sort is defined, wait for the results to be sorted before applying limit and skip // If a sort is defined, wait for the results to be sorted before applying limit and skip
if (!this._sort) { if (!this._sort) {
if (this._skip && this._skip > skipped) skipped += 1 if (this._skip && this._skip > skipped) skipped += 1
@ -189,7 +188,7 @@ class Cursor {
const criteria = Object.entries(this._sort).map(([key, direction]) => ({ key, direction })) const criteria = Object.entries(this._sort).map(([key, direction]) => ({ key, direction }))
res.sort((a, b) => { res.sort((a, b) => {
for (const criterion of criteria) { for (const criterion of criteria) {
const compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), this.db.compareStrings) const compare = criterion.direction * compareThings(getDotValue(a, criterion.key), getDotValue(b, criterion.key), this.db.compareStrings)
if (compare !== 0) return compare if (compare !== 0) return compare
} }
return 0 return 0
@ -247,4 +246,4 @@ class Cursor {
} }
// Interface // Interface
module.exports = Cursor export default Cursor

@ -4,9 +4,13 @@
* @module customUtilsNode * @module customUtilsNode
* @private * @private
*/ */
const crypto = require('crypto') import { randomBytes } from 'crypto'
const { callbackify } = require('util') import { callbackify } from 'util'
// Must use an intermediary variable, otherwise Rollup imports callbackify from util directly
// (along with crypto somehow) in files importing customUtils.
const _callbackify = callbackify
/** /**
* Return a random alphanumerical string of length len * Return a random alphanumerical string of length len
* There is a very small probability (less than 1/1,000,000) for the length to be less than len * There is a very small probability (less than 1/1,000,000) for the length to be less than len
@ -18,12 +22,9 @@ const { callbackify } = require('util')
* @return {string} * @return {string}
* @alias module:customUtilsNode.uid * @alias module:customUtilsNode.uid
*/ */
const uid = len => crypto.randomBytes(Math.ceil(Math.max(8, len * 2))) const uid = len => randomBytes(Math.ceil(Math.max(8, len * 2)))
.toString('base64') .toString('base64')
.replace(/[+/]/g, '') .replace(/[+/]/g, '')
.slice(0, len) .slice(0, len)
// Interface export { uid, _callbackify as callbackify }
module.exports.uid = uid
module.exports.callbackify = callbackify

@ -1,10 +1,10 @@
const Cursor = require('./cursor.js') import Cursor from './cursor.js'
const { uid, callbackify } = require('./customUtils.js') import { callbackify, uid } from './customUtils.js'
const Executor = require('./executor.js') import Executor from './executor.js'
const Index = require('./indexes.js') import Index from './indexes.js'
const model = require('./model.js') import { deepCopy, match, checkObject, modify } from './model.js'
const Persistence = require('./persistence.js') import Persistence from './persistence.js'
const { isDate, pick, filterIndexNames } = require('./utils.js') import { filterIndexNames, isDate, pick } from './utils.js'
/** /**
* Callback with no parameter * Callback with no parameter
@ -668,10 +668,10 @@ class Datastore {
const expiredDocsIds = [] const expiredDocsIds = []
const ttlIndexesFieldNames = Object.keys(this.ttlIndexes) const ttlIndexesFieldNames = Object.keys(this.ttlIndexes)
docs.forEach(doc => { for (const doc of docs) {
if (ttlIndexesFieldNames.every(i => !(doc[i] !== undefined && isDate(doc[i]) && Date.now() > doc[i].getTime() + this.ttlIndexes[i] * 1000))) validDocs.push(doc) if (ttlIndexesFieldNames.every(i => !(doc[i] !== undefined && isDate(doc[i]) && Date.now() > doc[i].getTime() + this.ttlIndexes[i] * 1000))) validDocs.push(doc)
else expiredDocsIds.push(doc._id) else expiredDocsIds.push(doc._id)
}) }
for (const _id of expiredDocsIds) { for (const _id of expiredDocsIds) {
await this._removeAsync({ _id }, {}) await this._removeAsync({ _id }, {})
} }
@ -691,7 +691,7 @@ class Datastore {
this._insertInCache(preparedDoc) this._insertInCache(preparedDoc)
await this.persistence.persistNewStateAsync(Array.isArray(preparedDoc) ? preparedDoc : [preparedDoc]) await this.persistence.persistNewStateAsync(Array.isArray(preparedDoc) ? preparedDoc : [preparedDoc])
return model.deepCopy(preparedDoc) return deepCopy(preparedDoc)
} }
/** /**
@ -718,14 +718,14 @@ class Datastore {
if (Array.isArray(newDoc)) { if (Array.isArray(newDoc)) {
preparedDoc = [] preparedDoc = []
newDoc.forEach(doc => { preparedDoc.push(this._prepareDocumentForInsertion(doc)) }) for (const doc of newDoc) { preparedDoc.push(this._prepareDocumentForInsertion(doc)) }
} else { } else {
preparedDoc = model.deepCopy(newDoc) preparedDoc = deepCopy(newDoc)
if (preparedDoc._id === undefined) preparedDoc._id = this._createNewId() if (preparedDoc._id === undefined) preparedDoc._id = this._createNewId()
const now = new Date() const now = new Date()
if (this.timestampData && preparedDoc.createdAt === undefined) preparedDoc.createdAt = now if (this.timestampData && preparedDoc.createdAt === undefined) preparedDoc.createdAt = now
if (this.timestampData && preparedDoc.updatedAt === undefined) preparedDoc.updatedAt = now if (this.timestampData && preparedDoc.updatedAt === undefined) preparedDoc.updatedAt = now
model.checkObject(preparedDoc) checkObject(preparedDoc)
} }
return preparedDoc return preparedDoc
@ -857,7 +857,7 @@ class Datastore {
* @async * @async
*/ */
findAsync (query, projection = {}) { findAsync (query, projection = {}) {
const cursor = new Cursor(this, query, docs => docs.map(doc => model.deepCopy(doc))) const cursor = new Cursor(this, query, docs => docs.map(doc => deepCopy(doc)))
cursor.projection(projection) cursor.projection(projection)
return cursor return cursor
@ -902,7 +902,7 @@ class Datastore {
* @return {Cursor<document>} * @return {Cursor<document>}
*/ */
findOneAsync (query, projection = {}) { findOneAsync (query, projection = {}) {
const cursor = new Cursor(this, query, docs => docs.length === 1 ? model.deepCopy(docs[0]) : null) const cursor = new Cursor(this, query, docs => docs.length === 1 ? deepCopy(docs[0]) : null)
cursor.projection(projection).limit(1) cursor.projection(projection).limit(1)
return cursor return cursor
@ -954,13 +954,13 @@ class Datastore {
let toBeInserted let toBeInserted
try { try {
model.checkObject(update) checkObject(update)
// updateQuery is a simple object with no modifier, use it as the document to insert // updateQuery is a simple object with no modifier, use it as the document to insert
toBeInserted = update toBeInserted = update
} catch (e) { } catch (e) {
// updateQuery contains modifiers, use the find query as the base, // updateQuery contains modifiers, use the find query as the base,
// strip it from all operators and update it according to updateQuery // strip it from all operators and update it according to updateQuery
toBeInserted = model.modify(model.deepCopy(query, true), update) toBeInserted = modify(deepCopy(query, true), update)
} }
const newDoc = await this._insertAsync(toBeInserted) const newDoc = await this._insertAsync(toBeInserted)
return { numAffected: 1, affectedDocuments: newDoc, upsert: true } return { numAffected: 1, affectedDocuments: newDoc, upsert: true }
@ -976,10 +976,10 @@ class Datastore {
// Preparing update (if an error is thrown here neither the datafile nor // Preparing update (if an error is thrown here neither the datafile nor
// the in-memory indexes are affected) // the in-memory indexes are affected)
for (const candidate of candidates) { for (const candidate of candidates) {
if (model.match(candidate, query) && (multi || numReplaced === 0)) { if (match(candidate, query) && (multi || numReplaced === 0)) {
numReplaced += 1 numReplaced += 1
if (this.timestampData) { createdAt = candidate.createdAt } if (this.timestampData) { createdAt = candidate.createdAt }
modifiedDoc = model.modify(candidate, update) modifiedDoc = modify(candidate, update)
if (this.timestampData) { if (this.timestampData) {
modifiedDoc.createdAt = createdAt modifiedDoc.createdAt = createdAt
modifiedDoc.updatedAt = new Date() modifiedDoc.updatedAt = new Date()
@ -997,7 +997,7 @@ class Datastore {
if (!options.returnUpdatedDocs) return { numAffected: numReplaced, upsert: false, affectedDocuments: null } if (!options.returnUpdatedDocs) return { numAffected: numReplaced, upsert: false, affectedDocuments: null }
else { else {
let updatedDocsDC = [] let updatedDocsDC = []
updatedDocs.forEach(doc => { updatedDocsDC.push(model.deepCopy(doc)) }) for (const doc of updatedDocs) { updatedDocsDC.push(deepCopy(doc)) }
if (!multi) updatedDocsDC = updatedDocsDC[0] if (!multi) updatedDocsDC = updatedDocsDC[0]
return { numAffected: numReplaced, affectedDocuments: updatedDocsDC, upsert: false } return { numAffected: numReplaced, affectedDocuments: updatedDocsDC, upsert: false }
} }
@ -1007,7 +1007,7 @@ class Datastore {
* Callback version of {@link Datastore#updateAsync}. * Callback version of {@link Datastore#updateAsync}.
* @param {query} query * @param {query} query
* @param {document|*} update * @param {document|*} update
* @param {Object|Datastore~updateCallback} [options|] * @param {Object|Datastore~updateCallback} [options]
* @param {boolean} [options.multi = false] * @param {boolean} [options.multi = false]
* @param {boolean} [options.upsert = false] * @param {boolean} [options.upsert = false]
* @param {boolean} [options.returnUpdatedDocs = false] * @param {boolean} [options.returnUpdatedDocs = false]
@ -1083,13 +1083,13 @@ class Datastore {
const removedDocs = [] const removedDocs = []
let numRemoved = 0 let numRemoved = 0
candidates.forEach(d => { for (const d of candidates) {
if (model.match(d, query) && (multi || numRemoved === 0)) { if (match(d, query) && (multi || numRemoved === 0)) {
numRemoved += 1 numRemoved += 1
removedDocs.push({ $$deleted: true, _id: d._id }) removedDocs.push({ $$deleted: true, _id: d._id })
this._removeFromIndexes(d) this._removeFromIndexes(d)
} }
}) }
await this.persistence.persistNewStateAsync(removedDocs) await this.persistence.persistNewStateAsync(removedDocs)
return numRemoved return numRemoved
@ -1125,4 +1125,4 @@ class Datastore {
} }
} }
module.exports = Datastore export default Datastore

@ -1,4 +1,4 @@
const Waterfall = require('./waterfall') import Waterfall from './waterfall.js'
/** /**
* Executes operations sequentially. * Executes operations sequentially.
@ -76,4 +76,4 @@ class Executor {
} }
// Interface // Interface
module.exports = Executor export default Executor

@ -1,6 +1,6 @@
const BinarySearchTree = require('@seald-io/binary-search-tree').AVLTree import { AVLTree as BinarySearchTree } from '@seald-io/binary-search-tree'
const model = require('./model.js') import { compareThings, getDotValues } from './model.js'
const { uniq, isDate } = require('./utils.js') import { isDate, uniq } from './utils.js'
/** /**
* Two indexed pointers are equal if they point to the same place * Two indexed pointers are equal if they point to the same place
@ -73,7 +73,7 @@ class Index {
* Options object given to the underlying BinarySearchTree. * Options object given to the underlying BinarySearchTree.
* @type {{unique: boolean, checkValueEquality: (function(*, *): boolean), compareKeys: ((function(*, *, compareStrings): (number|number))|*)}} * @type {{unique: boolean, checkValueEquality: (function(*, *): boolean), compareKeys: ((function(*, *, compareStrings): (number|number))|*)}}
*/ */
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality } this.treeOptions = { unique: this.unique, compareKeys: compareThings, checkValueEquality }
/** /**
* Underlying BinarySearchTree for this index. Uses an AVLTree for optimization. * Underlying BinarySearchTree for this index. Uses an AVLTree for optimization.
@ -109,7 +109,7 @@ class Index {
return return
} }
const key = model.getDotValues(doc, this._fields) const key = getDotValues(doc, this._fields)
// We don't index documents that don't contain the field if the index is sparse // We don't index documents that don't contain the field if the index is sparse
if ((key === undefined || (typeof key === 'object' && key !== null && Object.values(key).every(el => el === undefined))) && this.sparse) return if ((key === undefined || (typeof key === 'object' && key !== null && Object.values(key).every(el => el === undefined))) && this.sparse) return
@ -177,19 +177,19 @@ class Index {
*/ */
remove (doc) { remove (doc) {
if (Array.isArray(doc)) { if (Array.isArray(doc)) {
doc.forEach(d => { this.remove(d) }) for (const d of doc) { this.remove(d) }
return return
} }
const key = model.getDotValues(doc, this._fields) const key = getDotValues(doc, this._fields)
if (key === undefined && this.sparse) return if (key === undefined && this.sparse) return
if (!Array.isArray(key)) { if (!Array.isArray(key)) {
this.tree.delete(key, doc) this.tree.delete(key, doc)
} else { } else {
uniq(key, projectForUnique).forEach(_key => { for (const _key of uniq(key, projectForUnique)) {
this.tree.delete(_key, doc) this.tree.delete(_key, doc)
}) }
} }
} }
@ -268,9 +268,9 @@ class Index {
if (!Array.isArray(oldDoc)) this.update(newDoc, oldDoc) if (!Array.isArray(oldDoc)) this.update(newDoc, oldDoc)
else { else {
oldDoc.forEach(pair => { for (const pair of oldDoc) {
revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc }) revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc })
}) }
this.update(revert) this.update(revert)
} }
} }
@ -286,15 +286,15 @@ class Index {
const _res = {} const _res = {}
const res = [] const res = []
value.forEach(v => { for (const v of value) {
this.getMatching(v).forEach(doc => { for (const doc of this.getMatching(v)) {
_res[doc._id] = doc _res[doc._id] = doc
}) }
}) }
Object.keys(_res).forEach(_id => { for (const _id of Object.keys(_res)) {
res.push(_res[_id]) res.push(_res[_id])
}) }
return res return res
} }
@ -330,4 +330,4 @@ class Index {
} }
// Interface // Interface
module.exports = Index export default Index

@ -6,7 +6,7 @@
* @module model * @module model
* @private * @private
*/ */
const { uniq, isDate, isRegExp } = require('./utils.js') import { isDate, isRegExp, uniq } from './utils.js'
/** /**
* Check a key, throw an error if the key is non valid * Check a key, throw an error if the key is non valid
@ -39,9 +39,9 @@ const checkKey = (k, v) => {
*/ */
const checkObject = obj => { const checkObject = obj => {
if (Array.isArray(obj)) { if (Array.isArray(obj)) {
obj.forEach(o => { for (const o of obj) {
checkObject(o) checkObject(o)
}) }
} }
if (typeof obj === 'object' && obj !== null) { if (typeof obj === 'object' && obj !== null) {
@ -162,9 +162,19 @@ const isPrimitiveType = obj => (
* @private * @private
*/ */
const compareNSB = (a, b) => { const compareNSB = (a, b) => {
if (a < b) return -1 if (a === b) return 0
if (a > b) return 1 switch (typeof a) { // types are assumed to be equal
return 0 case 'string':
if (a < b) return -1
else if (a > b) return 1
else return 0
case 'boolean':
return a - b
case 'number':
return Math.sign(a - b)
default:
throw new Error('Invalid types')
}
} }
/** /**
@ -292,14 +302,14 @@ const $addToSetPartial = (obj, field, value) => {
if (Object.keys(value).length > 1) throw new Error('Can\'t use another field in conjunction with $each') if (Object.keys(value).length > 1) throw new Error('Can\'t use another field in conjunction with $each')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value') if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => { for (const v of value.$each) {
$addToSetPartial(obj, field, v) $addToSetPartial(obj, field, v)
}) }
} else { } else {
let addToSet = true let addToSet = true
obj[field].forEach(v => { for (const v of obj[field]) {
if (compareThings(v, value) === 0) addToSet = false if (compareThings(v, value) === 0) addToSet = false
}) }
if (addToSet) obj[field].push(value) if (addToSet) obj[field].push(value)
} }
} }
@ -399,9 +409,9 @@ const modifierFunctions = {
) throw new Error('Can only use $slice in cunjunction with $each when $push to array') ) throw new Error('Can only use $slice in cunjunction with $each when $push to array')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value') if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => { for (const v of value.$each) {
obj[field].push(v) obj[field].push(v)
}) }
if (value.$slice === undefined || typeof value.$slice !== 'number') return if (value.$slice === undefined || typeof value.$slice !== 'number') return
@ -452,7 +462,7 @@ const modify = (obj, updateQuery) => {
// Apply modifiers // Apply modifiers
modifiers = uniq(keys) modifiers = uniq(keys)
newDoc = deepCopy(obj) newDoc = deepCopy(obj)
modifiers.forEach(m => { for (const m of modifiers) {
if (!modifierFunctions[m]) throw new Error(`Unknown modifier ${m}`) if (!modifierFunctions[m]) throw new Error(`Unknown modifier ${m}`)
// Can't rely on Object.keys throwing on non objects since ES6 // Can't rely on Object.keys throwing on non objects since ES6
@ -460,10 +470,10 @@ const modify = (obj, updateQuery) => {
if (typeof updateQuery[m] !== 'object') throw new Error(`Modifier ${m}'s argument must be an object`) if (typeof updateQuery[m] !== 'object') throw new Error(`Modifier ${m}'s argument must be an object`)
const keys = Object.keys(updateQuery[m]) const keys = Object.keys(updateQuery[m])
keys.forEach(k => { for (const k of keys) {
modifierFunctions[m](newDoc, k, updateQuery[m][k]) modifierFunctions[m](newDoc, k, updateQuery[m][k])
}) }
}) }
} }
// Check result is valid and return it // Check result is valid and return it
@ -814,14 +824,16 @@ function matchQueryPart (obj, queryKey, queryValue, treatObjAsValue) {
} }
// Interface // Interface
module.exports.serialize = serialize export {
module.exports.deserialize = deserialize serialize,
module.exports.deepCopy = deepCopy deserialize,
module.exports.checkObject = checkObject deepCopy,
module.exports.isPrimitiveType = isPrimitiveType checkObject,
module.exports.modify = modify isPrimitiveType,
module.exports.getDotValue = getDotValue modify,
module.exports.getDotValues = getDotValues getDotValue,
module.exports.match = match getDotValues,
module.exports.areThingsEqual = areThingsEqual match,
module.exports.compareThings = compareThings areThingsEqual,
compareThings
}

@ -1,8 +1,17 @@
const byline = require('./byline') import byline from './byline.js'
const customUtils = require('./customUtils.js') import { uid } from './customUtils.js'
const Index = require('./indexes.js') import Index from './indexes.js'
const model = require('./model.js') import { serialize, deserialize } from './model.js'
const storage = require('./storage.js') import {
appendFileAsync,
crashSafeWriteFileLinesAsync,
ensureDatafileIntegrityAsync,
ensureParentDirectoryExistsAsync,
existsAsync,
readFileAsync,
readFileStream,
unlinkAsync,
} from './storage.js'
const DEFAULT_DIR_MODE = 0o755 const DEFAULT_DIR_MODE = 0o755
const DEFAULT_FILE_MODE = 0o644 const DEFAULT_FILE_MODE = 0o644
@ -54,7 +63,12 @@ class Persistence {
this.inMemoryOnly = this.db.inMemoryOnly this.inMemoryOnly = this.db.inMemoryOnly
this.filename = this.db.filename this.filename = this.db.filename
this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1 this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1
this.modes = options.modes !== undefined ? options.modes : { fileMode: DEFAULT_FILE_MODE, dirMode: DEFAULT_DIR_MODE } this.modes = options.modes !== undefined
? options.modes
: {
fileMode: DEFAULT_FILE_MODE,
dirMode: DEFAULT_DIR_MODE
}
if (this.modes.fileMode === undefined) this.modes.fileMode = DEFAULT_FILE_MODE if (this.modes.fileMode === undefined) this.modes.fileMode = DEFAULT_FILE_MODE
if (this.modes.dirMode === undefined) this.modes.dirMode = DEFAULT_DIR_MODE if (this.modes.dirMode === undefined) this.modes.dirMode = DEFAULT_DIR_MODE
if ( if (
@ -79,7 +93,7 @@ class Persistence {
if (options.testSerializationHooks === undefined || options.testSerializationHooks) { if (options.testSerializationHooks === undefined || options.testSerializationHooks) {
for (let i = 1; i < 30; i += 1) { for (let i = 1; i < 30; i += 1) {
for (let j = 0; j < 10; j += 1) { for (let j = 0; j < 10; j += 1) {
const randomString = customUtils.uid(i) const randomString = uid(i)
if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) { if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) {
throw new Error('beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss') throw new Error('beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss')
} }
@ -99,12 +113,12 @@ class Persistence {
if (this.inMemoryOnly) return if (this.inMemoryOnly) return
this.db.getAllData().forEach(doc => { for (const doc of this.db.getAllData()) {
lines.push(this.afterSerialization(model.serialize(doc))) lines.push(this.afterSerialization(serialize(doc)))
}) }
Object.keys(this.db.indexes).forEach(fieldName => { for (const fieldName of Object.keys(this.db.indexes)) {
if (fieldName !== '_id') { // The special _id index is managed by datastore.js, the others need to be persisted if (fieldName !== '_id') { // The special _id index is managed by datastore.js, the others need to be persisted
lines.push(this.afterSerialization(model.serialize({ lines.push(this.afterSerialization(serialize({
$$indexCreated: { $$indexCreated: {
fieldName: this.db.indexes[fieldName].fieldName, fieldName: this.db.indexes[fieldName].fieldName,
unique: this.db.indexes[fieldName].unique, unique: this.db.indexes[fieldName].unique,
@ -112,9 +126,9 @@ class Persistence {
} }
}))) })))
} }
}) }
await storage.crashSafeWriteFileLinesAsync(this.filename, lines, this.modes) await crashSafeWriteFileLinesAsync(this.filename, lines, this.modes)
if (typeof this.db.oncompaction === 'function') this.db.oncompaction(null) if (typeof this.db.oncompaction === 'function') this.db.oncompaction(null)
} catch (error) { } catch (error) {
if (typeof this.db.oncompaction === 'function') this.db.oncompaction(error) if (typeof this.db.oncompaction === 'function') this.db.oncompaction(error)
@ -137,13 +151,13 @@ class Persistence {
// In-memory only datastore // In-memory only datastore
if (this.inMemoryOnly) return if (this.inMemoryOnly) return
newDocs.forEach(doc => { for (const doc of newDocs) {
toPersist += this.afterSerialization(model.serialize(doc)) + '\n' toPersist += this.afterSerialization(serialize(doc)) + '\n'
}) }
if (toPersist.length === 0) return if (toPersist.length === 0) return
await storage.appendFileAsync(this.filename, toPersist, { encoding: 'utf8', mode: this.modes.fileMode }) await appendFileAsync(this.filename, toPersist, { encoding: 'utf8', mode: this.modes.fileMode })
} }
/** /**
@ -171,9 +185,12 @@ class Persistence {
let corruptItems = 0 let corruptItems = 0
for (const datum of data) { for (const datum of data) {
if (datum === '') { dataLength--; continue } if (datum === '') {
dataLength--
continue
}
try { try {
const doc = model.deserialize(this.beforeDeserialization(datum)) const doc = deserialize(this.beforeDeserialization(datum))
if (doc._id) { if (doc._id) {
if (doc.$$deleted === true) delete dataById[doc._id] if (doc.$$deleted === true) delete dataById[doc._id]
else dataById[doc._id] = doc else dataById[doc._id] = doc
@ -229,7 +246,7 @@ class Persistence {
lineStream.on('data', (line) => { lineStream.on('data', (line) => {
if (line === '') return if (line === '') return
try { try {
const doc = model.deserialize(this.beforeDeserialization(line)) const doc = deserialize(this.beforeDeserialization(line))
if (doc._id) { if (doc._id) {
if (doc.$$deleted === true) delete dataById[doc._id] if (doc.$$deleted === true) delete dataById[doc._id]
else dataById[doc._id] = doc else dataById[doc._id] = doc
@ -286,22 +303,22 @@ class Persistence {
// In-memory only datastore // In-memory only datastore
if (this.inMemoryOnly) return if (this.inMemoryOnly) return
await Persistence.ensureParentDirectoryExistsAsync(this.filename, this.modes.dirMode) await Persistence.ensureParentDirectoryExistsAsync(this.filename, this.modes.dirMode)
await storage.ensureDatafileIntegrityAsync(this.filename, this.modes.fileMode) await ensureDatafileIntegrityAsync(this.filename, this.modes.fileMode)
let treatedData let treatedData
if (storage.readFileStream) { if (readFileStream) {
// Server side // Server side
const fileStream = storage.readFileStream(this.filename, { encoding: 'utf8', mode: this.modes.fileMode }) const fileStream = readFileStream(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
treatedData = await this.treatRawStreamAsync(fileStream) treatedData = await this.treatRawStreamAsync(fileStream)
} else { } else {
// Browser // Browser
const rawData = await storage.readFileAsync(this.filename, { encoding: 'utf8', mode: this.modes.fileMode }) const rawData = await readFileAsync(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
treatedData = this.treatRawData(rawData) treatedData = this.treatRawData(rawData)
} }
// Recreate all indexes in the datafile // Recreate all indexes in the datafile
Object.keys(treatedData.indexes).forEach(key => { for (const key of Object.keys(treatedData.indexes)) {
this.db.indexes[key] = new Index(treatedData.indexes[key]) this.db.indexes[key] = new Index(treatedData.indexes[key])
}) }
// Fill cached database (i.e. all indexes) with data // Fill cached database (i.e. all indexes) with data
try { try {
@ -337,7 +354,7 @@ class Persistence {
// remove datastore file // remove datastore file
if (!this.db.inMemoryOnly) { if (!this.db.inMemoryOnly) {
await this.db.executor.pushAsync(async () => { await this.db.executor.pushAsync(async () => {
if (await storage.existsAsync(this.filename)) await storage.unlinkAsync(this.filename) if (await existsAsync(this.filename)) await unlinkAsync(this.filename)
}, true) }, true)
} }
} }
@ -350,9 +367,9 @@ class Persistence {
* @private * @private
*/ */
static async ensureParentDirectoryExistsAsync (dir, mode = DEFAULT_DIR_MODE) { static async ensureParentDirectoryExistsAsync (dir, mode = DEFAULT_DIR_MODE) {
return storage.ensureParentDirectoryExistsAsync(dir, mode) return ensureParentDirectoryExistsAsync(dir, mode)
} }
} }
// Interface // Interface
module.exports = Persistence export default Persistence

@ -8,10 +8,10 @@
* @module storage * @module storage
* @private * @private
*/ */
const fs = require('fs') import { constants as fsConstants, createWriteStream, createReadStream } from 'node:fs'
const fsPromises = fs.promises import { access, rename, writeFile, unlink, appendFile, readFile, mkdir, open } from 'node:fs/promises'
const path = require('path') import { dirname, parse, resolve } from 'node:path'
const { Readable } = require('stream') import { Readable } from 'node:stream'
const DEFAULT_DIR_MODE = 0o755 const DEFAULT_DIR_MODE = 0o755
const DEFAULT_FILE_MODE = 0o644 const DEFAULT_FILE_MODE = 0o644
@ -24,7 +24,7 @@ const DEFAULT_FILE_MODE = 0o644
* @alias module:storage.existsAsync * @alias module:storage.existsAsync
* @see module:storage.exists * @see module:storage.exists
*/ */
const existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false) const existsAsync = file => access(file, fsConstants.F_OK).then(() => true, () => false)
/** /**
* Node.js' [fsPromises.rename]{@link https://nodejs.org/api/fs.html#fspromisesrenameoldpath-newpath} * Node.js' [fsPromises.rename]{@link https://nodejs.org/api/fs.html#fspromisesrenameoldpath-newpath}
@ -35,7 +35,7 @@ const existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() =
* @alias module:storage.renameAsync * @alias module:storage.renameAsync
* @async * @async
*/ */
const renameAsync = fsPromises.rename const renameAsync = rename
/** /**
* Node.js' [fsPromises.writeFile]{@link https://nodejs.org/api/fs.html#fspromiseswritefilefile-data-options}. * Node.js' [fsPromises.writeFile]{@link https://nodejs.org/api/fs.html#fspromiseswritefilefile-data-options}.
@ -47,7 +47,7 @@ const renameAsync = fsPromises.rename
* @alias module:storage.writeFileAsync * @alias module:storage.writeFileAsync
* @async * @async
*/ */
const writeFileAsync = fsPromises.writeFile const writeFileAsync = writeFile
/** /**
* Node.js' [fs.createWriteStream]{@link https://nodejs.org/api/fs.html#fscreatewritestreampath-options}. * Node.js' [fs.createWriteStream]{@link https://nodejs.org/api/fs.html#fscreatewritestreampath-options}.
@ -57,7 +57,7 @@ const writeFileAsync = fsPromises.writeFile
* @return {fs.WriteStream} * @return {fs.WriteStream}
* @alias module:storage.writeFileStream * @alias module:storage.writeFileStream
*/ */
const writeFileStream = fs.createWriteStream const writeFileStream = createWriteStream
/** /**
* Node.js' [fsPromises.unlink]{@link https://nodejs.org/api/fs.html#fspromisesunlinkpath}. * Node.js' [fsPromises.unlink]{@link https://nodejs.org/api/fs.html#fspromisesunlinkpath}.
@ -67,7 +67,7 @@ const writeFileStream = fs.createWriteStream
* @async * @async
* @alias module:storage.unlinkAsync * @alias module:storage.unlinkAsync
*/ */
const unlinkAsync = fsPromises.unlink const unlinkAsync = unlink
/** /**
* Node.js' [fsPromises.appendFile]{@link https://nodejs.org/api/fs.html#fspromisesappendfilepath-data-options}. * Node.js' [fsPromises.appendFile]{@link https://nodejs.org/api/fs.html#fspromisesappendfilepath-data-options}.
@ -79,7 +79,7 @@ const unlinkAsync = fsPromises.unlink
* @alias module:storage.appendFileAsync * @alias module:storage.appendFileAsync
* @async * @async
*/ */
const appendFileAsync = fsPromises.appendFile const appendFileAsync = appendFile
/** /**
* Node.js' [fsPromises.readFile]{@link https://nodejs.org/api/fs.html#fspromisesreadfilepath-options}. * Node.js' [fsPromises.readFile]{@link https://nodejs.org/api/fs.html#fspromisesreadfilepath-options}.
@ -90,7 +90,7 @@ const appendFileAsync = fsPromises.appendFile
* @alias module:storage.readFileAsync * @alias module:storage.readFileAsync
* @async * @async
*/ */
const readFileAsync = fsPromises.readFile const readFileAsync = readFile
/** /**
* Node.js' [fs.createReadStream]{@link https://nodejs.org/api/fs.html#fscreatereadstreampath-options}. * Node.js' [fs.createReadStream]{@link https://nodejs.org/api/fs.html#fscreatereadstreampath-options}.
@ -100,7 +100,7 @@ const readFileAsync = fsPromises.readFile
* @return {fs.ReadStream} * @return {fs.ReadStream}
* @alias module:storage.readFileStream * @alias module:storage.readFileStream
*/ */
const readFileStream = fs.createReadStream const readFileStream = createReadStream
/** /**
* Node.js' [fsPromises.mkdir]{@link https://nodejs.org/api/fs.html#fspromisesmkdirpath-options}. * Node.js' [fsPromises.mkdir]{@link https://nodejs.org/api/fs.html#fspromisesmkdirpath-options}.
@ -111,7 +111,7 @@ const readFileStream = fs.createReadStream
* @alias module:storage.mkdirAsync * @alias module:storage.mkdirAsync
* @async * @async
*/ */
const mkdirAsync = fsPromises.mkdir const mkdirAsync = mkdir
/** /**
* Removes file if it exists. * Removes file if it exists.
@ -161,7 +161,7 @@ const flushToStorageAsync = async (options) => {
let filehandle, errorOnFsync, errorOnClose let filehandle, errorOnFsync, errorOnClose
try { try {
filehandle = await fsPromises.open(filename, flags, mode) filehandle = await open(filename, flags, mode)
try { try {
await filehandle.sync() await filehandle.sync()
} catch (errFS) { } catch (errFS) {
@ -236,7 +236,7 @@ const writeFileLinesAsync = (filename, lines, mode = DEFAULT_FILE_MODE) => new P
const crashSafeWriteFileLinesAsync = async (filename, lines, modes = { fileMode: DEFAULT_FILE_MODE, dirMode: DEFAULT_DIR_MODE }) => { const crashSafeWriteFileLinesAsync = async (filename, lines, modes = { fileMode: DEFAULT_FILE_MODE, dirMode: DEFAULT_DIR_MODE }) => {
const tempFilename = filename + '~' const tempFilename = filename + '~'
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true, mode: modes.dirMode }) await flushToStorageAsync({ filename: dirname(filename), isDir: true, mode: modes.dirMode })
const exists = await existsAsync(filename) const exists = await existsAsync(filename)
if (exists) await flushToStorageAsync({ filename, mode: modes.fileMode }) if (exists) await flushToStorageAsync({ filename, mode: modes.fileMode })
@ -247,7 +247,7 @@ const crashSafeWriteFileLinesAsync = async (filename, lines, modes = { fileMode:
await renameAsync(tempFilename, filename) await renameAsync(tempFilename, filename)
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true, mode: modes.dirMode }) await flushToStorageAsync({ filename: dirname(filename), isDir: true, mode: modes.dirMode })
} }
/** /**
@ -279,39 +279,26 @@ const ensureDatafileIntegrityAsync = async (filename, mode = DEFAULT_FILE_MODE)
* @private * @private
*/ */
const ensureParentDirectoryExistsAsync = async (filename, mode) => { const ensureParentDirectoryExistsAsync = async (filename, mode) => {
const dir = path.dirname(filename) const dir = dirname(filename)
const parsedDir = path.parse(path.resolve(dir)) const parsedDir = parse(resolve(dir))
// this is because on Windows mkdir throws a permission error when called on the root directory of a volume // this is because on Windows mkdir throws a permission error when called on the root directory of a volume
if (process.platform !== 'win32' || parsedDir.dir !== parsedDir.root || parsedDir.base !== '') { if (process.platform !== 'win32' || parsedDir.dir !== parsedDir.root || parsedDir.base !== '') {
await mkdirAsync(dir, { recursive: true, mode }) await mkdirAsync(dir, { recursive: true, mode })
} }
} }
// Interface // For tests only (not used by Nedb), not ported to browser/react-native
module.exports.existsAsync = existsAsync export { ensureParentDirectoryExistsAsync }
module.exports.renameAsync = renameAsync
module.exports.writeFileAsync = writeFileAsync
module.exports.writeFileLinesAsync = writeFileLinesAsync
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync
module.exports.appendFileAsync = appendFileAsync
module.exports.readFileAsync = readFileAsync
module.exports.unlinkAsync = unlinkAsync // Interface
export {
module.exports.mkdirAsync = mkdirAsync appendFileAsync,
crashSafeWriteFileLinesAsync,
module.exports.readFileStream = readFileStream ensureDatafileIntegrityAsync,
ensureFileDoesntExistAsync,
module.exports.flushToStorageAsync = flushToStorageAsync existsAsync,
readFileAsync,
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync readFileStream,
unlinkAsync,
module.exports.ensureFileDoesntExistAsync = ensureFileDoesntExistAsync mkdirAsync,
}
module.exports.ensureParentDirectoryExistsAsync = ensureParentDirectoryExistsAsync

@ -77,8 +77,8 @@ const pick = (object, keys) => {
const filterIndexNames = (indexNames) => ([k, v]) => !!(typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || isDate(v) || v === null) && const filterIndexNames = (indexNames) => ([k, v]) => !!(typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || isDate(v) || v === null) &&
indexNames.includes(k) indexNames.includes(k)
module.exports.uniq = uniq export { uniq }
module.exports.isDate = isDate export { isDate }
module.exports.isRegExp = isRegExp export { isRegExp }
module.exports.pick = pick export { pick }
module.exports.filterIndexNames = filterIndexNames export { filterIndexNames }

@ -45,4 +45,4 @@ class Waterfall {
} }
} }
module.exports = Waterfall export default Waterfall

@ -19,33 +19,37 @@
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE. // IN THE SOFTWARE.
const chai = require('chai') import chai from 'chai'
const fs = require('fs') import { createReadStream, createWriteStream, readFileSync, unlinkSync } from 'node:fs'
const path = require('path') import { dirname, join } from 'node:path'
const byline = require('../lib/byline') import { fileURLToPath } from 'node:url'
import { Buffer } from 'node:buffer'
import byline from '../src/byline.js'
const __dirname = dirname(fileURLToPath(import.meta.url))
const { assert } = chai const { assert } = chai
const regEx = /\r\n|[\n\v\f\r\x85\u2028\u2029]/g const regEx = /\r\n|[\n\v\f\r\x85\u2028\u2029]/g
const localPath = file => path.join(__dirname, 'byline', file) const localPath = file => join(__dirname, 'byline', file)
describe('byline', function () { describe('byline', function () {
it('should pipe a small file', function (done) { it('should pipe a small file', function (done) {
const input = fs.createReadStream(localPath('empty.txt')) const input = createReadStream(localPath('empty.txt'))
const lineStream = byline(input) // convinience API const lineStream = byline(input) // convenience API
const output = fs.createWriteStream(localPath('test.txt')) const output = createWriteStream(localPath('test.txt'))
lineStream.pipe(output) lineStream.pipe(output)
output.on('close', function () { output.on('close', function () {
const out = fs.readFileSync(localPath('test.txt'), 'utf8') const out = readFileSync(localPath('test.txt'), 'utf8')
const in_ = fs.readFileSync(localPath('empty.txt'), 'utf8').replace(/\r?\n/g, '') const in_ = readFileSync(localPath('empty.txt'), 'utf8').replace(/\r?\n/g, '')
assert.equal(in_, out) assert.equal(in_, out)
fs.unlinkSync(localPath('test.txt')) unlinkSync(localPath('test.txt'))
done() done()
}) })
}) })
it('should work with streams2 API', function (done) { it('should work with streams2 API', function (done) {
let stream = fs.createReadStream(localPath('empty.txt')) let stream = createReadStream(localPath('empty.txt'))
stream = byline(stream) stream = byline(stream)
stream.on('readable', function () { stream.on('readable', function () {
@ -60,7 +64,7 @@ describe('byline', function () {
}) })
it('should ignore empty lines by default', function (done) { it('should ignore empty lines by default', function (done) {
const input = fs.createReadStream(localPath('empty.txt')) const input = createReadStream(localPath('empty.txt'))
const lineStream = byline(input) const lineStream = byline(input)
lineStream.setEncoding('utf8') lineStream.setEncoding('utf8')
@ -70,7 +74,7 @@ describe('byline', function () {
}) })
lineStream.on('end', function () { lineStream.on('end', function () {
let lines2 = fs.readFileSync(localPath('empty.txt'), 'utf8').split(regEx) let lines2 = readFileSync(localPath('empty.txt'), 'utf8').split(regEx)
lines2 = lines2.filter(function (line) { lines2 = lines2.filter(function (line) {
return line.length > 0 return line.length > 0
}) })
@ -80,7 +84,7 @@ describe('byline', function () {
}) })
it('should keep empty lines when keepEmptyLines is true', function (done) { it('should keep empty lines when keepEmptyLines is true', function (done) {
const input = fs.createReadStream(localPath('empty.txt')) const input = createReadStream(localPath('empty.txt'))
const lineStream = byline(input, { keepEmptyLines: true }) const lineStream = byline(input, { keepEmptyLines: true })
lineStream.setEncoding('utf8') lineStream.setEncoding('utf8')
@ -96,7 +100,7 @@ describe('byline', function () {
}) })
it('should not split a CRLF which spans two chunks', function (done) { it('should not split a CRLF which spans two chunks', function (done) {
const input = fs.createReadStream(localPath('CRLF.txt')) const input = createReadStream(localPath('CRLF.txt'))
const lineStream = byline(input, { keepEmptyLines: true }) const lineStream = byline(input, { keepEmptyLines: true })
lineStream.setEncoding('utf8') lineStream.setEncoding('utf8')
@ -121,11 +125,11 @@ describe('byline', function () {
}) })
function readFile (filename, done) { function readFile (filename, done) {
const input = fs.createReadStream(filename) const input = createReadStream(filename)
const lineStream = byline(input) const lineStream = byline(input)
lineStream.setEncoding('utf8') lineStream.setEncoding('utf8')
let lines2 = fs.readFileSync(filename, 'utf8').split(regEx) let lines2 = readFileSync(filename, 'utf8').split(regEx)
lines2 = lines2.filter(function (line) { lines2 = lines2.filter(function (line) {
return line.length > 0 return line.length > 0
}) })
@ -158,11 +162,11 @@ describe('byline', function () {
}) })
it('should pause() and resume() with a huge file', function (done) { it('should pause() and resume() with a huge file', function (done) {
const input = fs.createReadStream(localPath('rfc_huge.txt')) const input = createReadStream(localPath('rfc_huge.txt'))
const lineStream = byline(input) const lineStream = byline(input)
lineStream.setEncoding('utf8') lineStream.setEncoding('utf8')
let lines2 = fs.readFileSync(localPath('rfc_huge.txt'), 'utf8').split(regEx) let lines2 = readFileSync(localPath('rfc_huge.txt'), 'utf8').split(regEx)
lines2 = lines2.filter(function (line) { lines2 = lines2.filter(function (line) {
return line.length > 0 return line.length > 0
}) })
@ -193,8 +197,8 @@ describe('byline', function () {
}) })
function areStreamsEqualTypes (options, callback) { function areStreamsEqualTypes (options, callback) {
const fsStream = fs.createReadStream(localPath('empty.txt'), options) const fsStream = createReadStream(localPath('empty.txt'), options)
const lineStream = byline(fs.createReadStream(localPath('empty.txt'), options)) const lineStream = byline(createReadStream(localPath('empty.txt'), options))
fsStream.on('data', function (data1) { fsStream.on('data', function (data1) {
lineStream.on('data', function (data2) { lineStream.on('data', function (data2) {
assert.equal(Buffer.isBuffer(data1), Buffer.isBuffer(data2)) assert.equal(Buffer.isBuffer(data1), Buffer.isBuffer(data2))

@ -1,11 +1,12 @@
/* eslint-env mocha */ /* eslint-env mocha */
import fs from 'node:fs/promises'
import assert from 'node:assert/strict'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import Cursor from '../src/cursor.js'
import { exists } from './fsUtils.test.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const assert = require('assert').strict
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const Cursor = require('../lib/cursor')
const { exists } = require('./utils.test.js')
describe('Cursor Async', function () { describe('Cursor Async', function () {
let d let d

@ -1,12 +1,13 @@
/* eslint-env mocha */ /* eslint-env mocha */
const chai = require('chai') import fs from 'node:fs'
import { callbackify } from 'node:util'
import chai from 'chai'
import { each, waterfall } from './utils.test.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import Cursor from '../src/cursor.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const fs = require('fs')
const { each, waterfall } = require('./utils.test.js')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const Cursor = require('../lib/cursor')
const { callbackify } = require('util')
const { assert } = chai const { assert } = chai
chai.should() chai.should()

@ -1,20 +1,20 @@
/* eslint-env mocha */ /* eslint-env mocha */
const chai = require('chai') import chai from 'chai'
const customUtils = require('../lib/customUtils') import { uid } from '../src/customUtils.js'
chai.should() chai.should()
describe('customUtils', function () { describe('customUtils', function () {
describe('uid', function () { describe('uid', function () {
it('Generates a string of the expected length', function () { it('Generates a string of the expected length', function () {
customUtils.uid(3).length.should.equal(3) uid(3).length.should.equal(3)
customUtils.uid(16).length.should.equal(16) uid(16).length.should.equal(16)
customUtils.uid(42).length.should.equal(42) uid(42).length.should.equal(42)
customUtils.uid(1000).length.should.equal(1000) uid(1000).length.should.equal(1000)
}) })
// Very small probability of conflict // Very small probability of conflict
it('Generated uids should not be the same', function () { it('Generated uids should not be the same', function () {
customUtils.uid(56).should.not.equal(customUtils.uid(56)) uid(56).should.not.equal(uid(56))
}) })
}) })
}) })

@ -1,12 +1,14 @@
/* eslint-env mocha */ /* eslint-env mocha */
import fs from 'node:fs/promises'
import assert from 'node:assert/strict'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { wait } from './utils.test.js'
import { exists } from './fsUtils.test.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const assert = require('assert').strict
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { wait } = require('./utils.test')
const { exists } = require('./utils.test.js')
const reloadTimeUpperBound = 200 // In ms, an upper bound for the reload time used to check createdAt and updatedAt const reloadTimeUpperBound = 200 // In ms, an upper bound for the reload time used to check createdAt and updatedAt
describe('Database async', function () { describe('Database async', function () {

@ -1,12 +1,14 @@
/* eslint-env mocha */ /* eslint-env mocha */
const chai = require('chai') import fs from 'node:fs'
import { callbackify } from 'node:util'
import chai from 'chai'
import { apply, each, waterfall } from './utils.test.js'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const fs = require('fs')
const { apply, each, waterfall } = require('./utils.test.js')
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { callbackify } = require('util')
const reloadTimeUpperBound = 200 // In ms, an upper bound for the reload time used to check createdAt and updatedAt const reloadTimeUpperBound = 200 // In ms, an upper bound for the reload time used to check createdAt and updatedAt
const { assert } = chai const { assert } = chai

@ -1,10 +1,11 @@
/* eslint-env mocha */ /* eslint-env mocha */
import fs from 'node:fs/promises'
import assert from 'node:assert/strict'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { exists } from './fsUtils.test.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const assert = require('assert').strict
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { exists } = require('./utils.test.js')
// Test that operations are executed in the right order // Test that operations are executed in the right order
// We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends // We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends

@ -1,11 +1,12 @@
/* eslint-env mocha */ /* eslint-env mocha */
const chai = require('chai') import fs from 'node:fs'
import { callbackify } from 'node:util'
import chai from 'chai'
import { waterfall } from './utils.test.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const fs = require('fs')
const { waterfall } = require('./utils.test.js')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { callbackify } = require('util')
const { assert } = chai const { assert } = chai
chai.should() chai.should()

@ -0,0 +1,7 @@
import fs from 'node:fs/promises'
import { constants as fsConstants } from 'node:fs'
export const exists = path => fs.access(path, fsConstants.FS_OK).then(() => true, () => false)
// eslint-disable-next-line n/no-callback-literal
export const existsCallback = (path, callback) => fs.access(path, fsConstants.FS_OK).then(() => callback(true), () => callback(false))

@ -1,6 +1,6 @@
/* eslint-env mocha */ /* eslint-env mocha */
const Index = require('../lib/indexes') import chai from 'chai'
const chai = require('chai') import Index from '../src/indexes.js'
const { assert } = chai const { assert } = chai
chai.should() chai.should()

@ -1,9 +1,9 @@
/* eslint-env mocha */ /* eslint-env mocha */
const model = require('../lib/model') import fs from 'node:fs'
const chai = require('chai') import util from 'node:util'
const util = require('util') import chai from 'chai'
const Datastore = require('../lib/datastore') import * as model from '../src/model.js'
const fs = require('fs') import Datastore from '../src/datastore.js'
const { assert, expect } = chai const { assert, expect } = chai
chai.should() chai.should()
@ -370,14 +370,6 @@ describe('Model', function () {
totally: { doesnt: { exist: 'now it does' } } totally: { doesnt: { exist: 'now it does' } }
}) })
}) })
it('Doesn\'t replace a falsy field by an object when recursively following dot notation', function () {
const obj = { nested: false }
const updateQuery = { $set: { 'nested.now': 'it is' } }
const modified = model.modify(obj, updateQuery)
assert.deepStrictEqual(modified, { nested: false }) // Object not modified as the nested field doesn't exist
})
}) // End of '$set modifier' }) // End of '$set modifier'
describe('$unset modifier', function () { describe('$unset modifier', function () {
@ -794,10 +786,10 @@ describe('Model', function () {
model.compareThings(undefined, undefined).should.equal(0) model.compareThings(undefined, undefined).should.equal(0)
otherStuff.forEach(function (stuff) { for (const stuff of otherStuff) {
model.compareThings(undefined, stuff).should.equal(-1) model.compareThings(undefined, stuff).should.equal(-1)
model.compareThings(stuff, undefined).should.equal(1) model.compareThings(stuff, undefined).should.equal(1)
}) }
}) })
it('Then null', function () { it('Then null', function () {
@ -805,10 +797,10 @@ describe('Model', function () {
model.compareThings(null, null).should.equal(0) model.compareThings(null, null).should.equal(0)
otherStuff.forEach(function (stuff) { for (const stuff of otherStuff) {
model.compareThings(null, stuff).should.equal(-1) model.compareThings(null, stuff).should.equal(-1)
model.compareThings(stuff, null).should.equal(1) model.compareThings(stuff, null).should.equal(1)
}) }
}) })
it('Then numbers', function () { it('Then numbers', function () {
@ -823,12 +815,12 @@ describe('Model', function () {
model.compareThings(-2.6, -2.6).should.equal(0) model.compareThings(-2.6, -2.6).should.equal(0)
model.compareThings(5, 5).should.equal(0) model.compareThings(5, 5).should.equal(0)
otherStuff.forEach(function (stuff) { for (const stuff of otherStuff) {
numbers.forEach(function (number) { numbers.forEach(function (number) {
model.compareThings(number, stuff).should.equal(-1) model.compareThings(number, stuff).should.equal(-1)
model.compareThings(stuff, number).should.equal(1) model.compareThings(stuff, number).should.equal(1)
}) })
}) }
}) })
it('Then strings', function () { it('Then strings', function () {
@ -840,12 +832,12 @@ describe('Model', function () {
model.compareThings('hey', 'hew').should.equal(1) model.compareThings('hey', 'hew').should.equal(1)
model.compareThings('hey', 'hey').should.equal(0) model.compareThings('hey', 'hey').should.equal(0)
otherStuff.forEach(function (stuff) { for (const stuff of otherStuff) {
strings.forEach(function (string) { strings.forEach(function (string) {
model.compareThings(string, stuff).should.equal(-1) model.compareThings(string, stuff).should.equal(-1)
model.compareThings(stuff, string).should.equal(1) model.compareThings(stuff, string).should.equal(1)
}) })
}) }
}) })
it('Then booleans', function () { it('Then booleans', function () {
@ -857,12 +849,12 @@ describe('Model', function () {
model.compareThings(true, false).should.equal(1) model.compareThings(true, false).should.equal(1)
model.compareThings(false, true).should.equal(-1) model.compareThings(false, true).should.equal(-1)
otherStuff.forEach(function (stuff) { for (const stuff of otherStuff) {
bools.forEach(function (bool) { for (const bool of bools) {
model.compareThings(bool, stuff).should.equal(-1) model.compareThings(bool, stuff).should.equal(-1)
model.compareThings(stuff, bool).should.equal(1) model.compareThings(stuff, bool).should.equal(1)
}) }
}) }
}) })
it('Then dates', function () { it('Then dates', function () {
@ -876,12 +868,12 @@ describe('Model', function () {
model.compareThings(new Date(0), new Date(-54341)).should.equal(1) model.compareThings(new Date(0), new Date(-54341)).should.equal(1)
model.compareThings(new Date(123), new Date(4341)).should.equal(-1) model.compareThings(new Date(123), new Date(4341)).should.equal(-1)
otherStuff.forEach(function (stuff) { for (const stuff of otherStuff) {
dates.forEach(function (date) { for (const date of dates) {
model.compareThings(date, stuff).should.equal(-1) model.compareThings(date, stuff).should.equal(-1)
model.compareThings(stuff, date).should.equal(1) model.compareThings(stuff, date).should.equal(1)
}) }
}) }
}) })
it('Then arrays', function () { it('Then arrays', function () {
@ -896,12 +888,12 @@ describe('Model', function () {
model.compareThings(['hello', 'zzz'], ['hello', 'world']).should.equal(1) model.compareThings(['hello', 'zzz'], ['hello', 'world']).should.equal(1)
model.compareThings(['hello', 'world'], ['hello', 'world']).should.equal(0) model.compareThings(['hello', 'world'], ['hello', 'world']).should.equal(0)
otherStuff.forEach(function (stuff) { for (const stuff of otherStuff) {
arrays.forEach(function (array) { for (const array of arrays) {
model.compareThings(array, stuff).should.equal(-1) model.compareThings(array, stuff).should.equal(-1)
model.compareThings(stuff, array).should.equal(1) model.compareThings(stuff, array).should.equal(1)
}) }
}) }
}) })
it('And finally objects', function () { it('And finally objects', function () {
@ -1341,23 +1333,6 @@ describe('Model', function () {
model.match({ a: 5 }, { a: { $size: 1 } }).should.equal(false) model.match({ a: 5 }, { a: { $size: 1 } }).should.equal(false)
}) })
it('Can use $size several times in the same matcher', function () {
model.match({ childrens: ['Riri', 'Fifi', 'Loulou'] }, {
childrens: {
$size: 3,
// eslint-disable-next-line no-dupe-keys
$size: 3
}
}).should.equal(true)
model.match({ childrens: ['Riri', 'Fifi', 'Loulou'] }, {
childrens: {
$size: 3,
// eslint-disable-next-line no-dupe-keys
$size: 4
}
}).should.equal(false) // Of course this can never be true
})
it('Can query array documents with multiple simultaneous conditions', function () { it('Can query array documents with multiple simultaneous conditions', function () {
// Non nested documents // Non nested documents
model.match({ model.match({
@ -1491,10 +1466,9 @@ describe('Model', function () {
}) })
it('Should throw an error if a logical operator is used without an array or if an unknown logical operator is used', function () { it('Should throw an error if a logical operator is used without an array or if an unknown logical operator is used', function () {
(function () { model.match({ a: 5 }, { $or: { a: 5, b: 6 } }) }).should.throw();
// eslint-disable-next-line no-dupe-keys // eslint-disable-next-line no-dupe-keys
(function () { model.match({ a: 5 }, { $or: { a: 5, a: 6 } }) }).should.throw(); (function () { model.match({ a: 5 }, { $and: { a: 5, b: 6 } }) }).should.throw();
// eslint-disable-next-line no-dupe-keys
(function () { model.match({ a: 5 }, { $and: { a: 5, a: 6 } }) }).should.throw();
(function () { model.match({ a: 5 }, { $unknown: [{ a: 5 }] }) }).should.throw() (function () { model.match({ a: 5 }, { $unknown: [{ a: 5 }] }) }).should.throw()
}) })
}) })
@ -1547,8 +1521,6 @@ describe('Model', function () {
model.match({ tags: ['node', 'js', 'db'] }, { tags: 'python' }).should.equal(false) model.match({ tags: ['node', 'js', 'db'] }, { tags: 'python' }).should.equal(false)
model.match({ tags: ['node', 'js', 'db'] }, { tagss: 'js' }).should.equal(false) model.match({ tags: ['node', 'js', 'db'] }, { tagss: 'js' }).should.equal(false)
model.match({ tags: ['node', 'js', 'db'] }, { tags: 'js' }).should.equal(true) model.match({ tags: ['node', 'js', 'db'] }, { tags: 'js' }).should.equal(true)
// eslint-disable-next-line no-dupe-keys
model.match({ tags: ['node', 'js', 'db'] }, { tags: 'js', tags: 'node' }).should.equal(true)
// Mixed matching with array and non array // Mixed matching with array and non array
model.match({ tags: ['node', 'js', 'db'], nedb: true }, { tags: 'js', nedb: true }).should.equal(true) model.match({ tags: ['node', 'js', 'db'], nedb: true }, { tags: 'js', nedb: true }).should.equal(true)

@ -1,19 +1,19 @@
/* eslint-env mocha */ /* eslint-env mocha */
import { execFile, fork } from 'node:child_process'
import { promisify } from 'node:util'
import { once } from 'node:events'
import { Readable } from 'node:stream'
import fs from 'node:fs/promises'
import path from 'node:path'
import assert from 'node:assert/strict'
import { wait } from './utils.test.js'
import { exists } from './fsUtils.test.js'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { ensureFileDoesntExistAsync, ensureDatafileIntegrityAsync } from '../src/storage.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const path = require('path')
const assert = require('assert').strict
const { exists } = require('./utils.test.js')
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const storage = require('../lib/storage')
const { execFile, fork } = require('child_process')
const { promisify } = require('util')
const { ensureFileDoesntExistAsync } = require('../lib/storage')
const { once } = require('events')
const { wait } = require('./utils.test')
const Readable = require('stream').Readable
describe('Persistence async', function () { describe('Persistence async', function () {
let d let d
@ -267,7 +267,7 @@ describe('Persistence async', function () {
const data = (await fs.readFile(d.filename, 'utf8')).split('\n') const data = (await fs.readFile(d.filename, 'utf8')).split('\n')
let filledCount = 0 let filledCount = 0
data.forEach(item => { if (item.length > 0) { filledCount += 1 } }) for (const item of data) { if (item.length > 0) { filledCount += 1 } }
assert.equal(filledCount, 3) assert.equal(filledCount, 3)
await d.loadDatabaseAsync() await d.loadDatabaseAsync()
@ -276,7 +276,7 @@ describe('Persistence async', function () {
const data2 = (await fs.readFile(d.filename, 'utf8')).split('\n') const data2 = (await fs.readFile(d.filename, 'utf8')).split('\n')
filledCount = 0 filledCount = 0
data2.forEach(function (item) { if (item.length > 0) { filledCount += 1 } }) for (const item of data2) { if (item.length > 0) { filledCount += 1 } }
assert.equal(filledCount, 1) assert.equal(filledCount, 1)
}) })
@ -422,7 +422,7 @@ describe('Persistence async', function () {
it('Declaring only one hook will throw an exception to prevent data loss', async () => { it('Declaring only one hook will throw an exception to prevent data loss', async () => {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename) await ensureFileDoesntExistAsync(hookTestFilename)
await fs.writeFile(hookTestFilename, 'Some content', 'utf8') await fs.writeFile(hookTestFilename, 'Some content', 'utf8')
assert.throws(() => { assert.throws(() => {
// eslint-disable-next-line no-new // eslint-disable-next-line no-new
@ -449,7 +449,7 @@ describe('Persistence async', function () {
it('Declaring two hooks that are not reverse of one another will cause an exception to prevent data loss', async () => { it('Declaring two hooks that are not reverse of one another will cause an exception to prevent data loss', async () => {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename) await ensureFileDoesntExistAsync(hookTestFilename)
await fs.writeFile(hookTestFilename, 'Some content', 'utf8') await fs.writeFile(hookTestFilename, 'Some content', 'utf8')
assert.throws(() => { assert.throws(() => {
// eslint-disable-next-line no-new // eslint-disable-next-line no-new
@ -467,7 +467,7 @@ describe('Persistence async', function () {
it('Declaring two hooks that are not reverse of one another will not cause exception if options.testSerializationHooks === false', async () => { it('Declaring two hooks that are not reverse of one another will not cause exception if options.testSerializationHooks === false', async () => {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename) await ensureFileDoesntExistAsync(hookTestFilename)
await fs.writeFile(hookTestFilename, 'Some content', 'utf8') await fs.writeFile(hookTestFilename, 'Some content', 'utf8')
const db = new Datastore({ const db = new Datastore({
filename: hookTestFilename, filename: hookTestFilename,
@ -481,7 +481,7 @@ describe('Persistence async', function () {
it('A serialization hook can be used to transform data before writing new state to disk', async () => { it('A serialization hook can be used to transform data before writing new state to disk', async () => {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename) await ensureFileDoesntExistAsync(hookTestFilename)
const d = new Datastore({ const d = new Datastore({
filename: hookTestFilename, filename: hookTestFilename,
autoload: true, autoload: true,
@ -549,7 +549,7 @@ describe('Persistence async', function () {
it('Use serialization hook when persisting cached database or compacting', async () => { it('Use serialization hook when persisting cached database or compacting', async () => {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename) await ensureFileDoesntExistAsync(hookTestFilename)
const d = new Datastore({ const d = new Datastore({
filename: hookTestFilename, filename: hookTestFilename,
autoload: true, autoload: true,
@ -600,7 +600,7 @@ describe('Persistence async', function () {
it('Deserialization hook is correctly used when loading data', async () => { it('Deserialization hook is correctly used when loading data', async () => {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename) await ensureFileDoesntExistAsync(hookTestFilename)
const d = new Datastore({ const d = new Datastore({
filename: hookTestFilename, filename: hookTestFilename,
autoload: true, autoload: true,
@ -656,7 +656,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), false) assert.equal(await exists('workspace/it.db'), false)
assert.equal(await exists('workspace/it.db~'), false) assert.equal(await exists('workspace/it.db~'), false)
await storage.ensureDatafileIntegrityAsync(p.filename) await ensureDatafileIntegrityAsync(p.filename)
assert.equal(await exists('workspace/it.db'), true) assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false) assert.equal(await exists('workspace/it.db~'), false)
@ -675,7 +675,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), true) assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false) assert.equal(await exists('workspace/it.db~'), false)
await storage.ensureDatafileIntegrityAsync(p.filename) await ensureDatafileIntegrityAsync(p.filename)
assert.equal(await exists('workspace/it.db'), true) assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false) assert.equal(await exists('workspace/it.db~'), false)
@ -694,7 +694,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), false) assert.equal(await exists('workspace/it.db'), false)
assert.equal(await exists('workspace/it.db~'), true) assert.equal(await exists('workspace/it.db~'), true)
await storage.ensureDatafileIntegrityAsync(p.filename) await ensureDatafileIntegrityAsync(p.filename)
assert.equal(await exists('workspace/it.db'), true) assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false) assert.equal(await exists('workspace/it.db~'), false)
@ -715,7 +715,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), true) assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), true) assert.equal(await exists('workspace/it.db~'), true)
await storage.ensureDatafileIntegrityAsync(theDb.persistence.filename) await ensureDatafileIntegrityAsync(theDb.persistence.filename)
assert.equal(await exists('workspace/it.db'), true) assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), true) assert.equal(await exists('workspace/it.db~'), true)
@ -812,8 +812,8 @@ describe('Persistence async', function () {
it('Persistence works as expected when everything goes fine', async () => { it('Persistence works as expected when everything goes fine', async () => {
const dbFile = 'workspace/test2.db' const dbFile = 'workspace/test2.db'
await storage.ensureFileDoesntExistAsync(dbFile) await ensureFileDoesntExistAsync(dbFile)
await storage.ensureFileDoesntExistAsync(dbFile + '~') await ensureFileDoesntExistAsync(dbFile + '~')
const theDb = new Datastore({ filename: dbFile }) const theDb = new Datastore({ filename: dbFile })
await theDb.loadDatabaseAsync() await theDb.loadDatabaseAsync()
@ -872,7 +872,7 @@ describe('Persistence async', function () {
assert(datafileLength > 5000) assert(datafileLength > 5000)
// Loading it in a separate process that we will crash before finishing the loadDatabase // Loading it in a separate process that we will crash before finishing the loadDatabase
const child = fork('test_lac/loadAndCrash.test', [], { stdio: 'inherit' }) const child = fork('test_lac/loadAndCrash.test.cjs', [], { stdio: 'inherit' })
const [code] = await once(child, 'exit') const [code] = await once(child, 'exit')
assert.equal(code, 1) // See test_lac/loadAndCrash.test.js assert.equal(code, 1) // See test_lac/loadAndCrash.test.js
@ -900,7 +900,7 @@ describe('Persistence async', function () {
// Not run on Windows as there is no clean way to set maximum file descriptors. Not an issue as the code itself is tested. // Not run on Windows as there is no clean way to set maximum file descriptors. Not an issue as the code itself is tested.
it('Cannot cause EMFILE errors by opening too many file descriptors', async function () { it('Cannot cause EMFILE errors by opening too many file descriptors', async function () {
this.timeout(10000) this.timeout(10000)
if (process.platform === 'win32' || process.platform === 'win64') { return } if (process.platform === 'win32' || process.platform === 'win64') { this.skip() }
try { try {
const { stdout, stderr } = await promisify(execFile)('test_lac/openFdsLaunch.sh') const { stdout, stderr } = await promisify(execFile)('test_lac/openFdsLaunch.sh')
// The subprocess will not output anything to stdout unless part of the test fails // The subprocess will not output anything to stdout unless part of the test fails
@ -919,7 +919,7 @@ describe('Persistence async', function () {
describe('ensureFileDoesntExist', function () { describe('ensureFileDoesntExist', function () {
it('Doesnt do anything if file already doesnt exist', async () => { it('Doesnt do anything if file already doesnt exist', async () => {
await storage.ensureFileDoesntExistAsync('workspace/nonexisting') await ensureFileDoesntExistAsync('workspace/nonexisting')
assert.equal(await exists('workspace/nonexisting'), false) assert.equal(await exists('workspace/nonexisting'), false)
}) })
@ -927,7 +927,7 @@ describe('Persistence async', function () {
await fs.writeFile('workspace/existing', 'hello world', 'utf8') await fs.writeFile('workspace/existing', 'hello world', 'utf8')
assert.equal(await exists('workspace/existing'), true) assert.equal(await exists('workspace/existing'), true)
await storage.ensureFileDoesntExistAsync('workspace/existing') await ensureFileDoesntExistAsync('workspace/existing')
assert.equal(await exists('workspace/existing'), false) assert.equal(await exists('workspace/existing'), false)
}) })
}) // ==== End of 'ensureFileDoesntExist' ==== }) // ==== End of 'ensureFileDoesntExist' ====

@ -1,17 +1,18 @@
/* eslint-env mocha */ /* eslint-env mocha */
const chai = require('chai')
import { callbackify } from 'node:util'
import fs from 'node:fs'
import { Readable } from 'node:stream'
import { execFile, fork } from 'node:child_process'
import chai from 'chai'
import { apply, waterfall } from './utils.test.js'
import { existsCallback } from './fsUtils.test.js'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { ensureFileDoesntExistAsync, ensureDatafileIntegrityAsync } from '../src/storage.js'
const testDb = 'workspace/test.db' const testDb = 'workspace/test.db'
const fs = require('fs')
const { apply, waterfall } = require('./utils.test.js')
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const storage = require('../lib/storage')
const { execFile, fork } = require('child_process')
const { callbackify } = require('util')
const { existsCallback } = require('./utils.test')
const { ensureFileDoesntExistAsync } = require('../lib/storage')
const Readable = require('stream').Readable
const { assert } = chai const { assert } = chai
chai.should() chai.should()
@ -304,7 +305,7 @@ describe('Persistence', function () {
const data = fs.readFileSync(d.filename, 'utf8').split('\n') const data = fs.readFileSync(d.filename, 'utf8').split('\n')
let filledCount = 0 let filledCount = 0
data.forEach(function (item) { if (item.length > 0) { filledCount += 1 } }) for (const item of data) { if (item.length > 0) { filledCount += 1 } }
filledCount.should.equal(3) filledCount.should.equal(3)
d.loadDatabase(function (err) { d.loadDatabase(function (err) {
@ -314,7 +315,7 @@ describe('Persistence', function () {
const data = fs.readFileSync(d.filename, 'utf8').split('\n') const data = fs.readFileSync(d.filename, 'utf8').split('\n')
let filledCount = 0 let filledCount = 0
data.forEach(function (item) { if (item.length > 0) { filledCount += 1 } }) for (const item of data) { if (item.length > 0) { filledCount += 1 } }
filledCount.should.equal(1) filledCount.should.equal(1)
done() done()
@ -467,7 +468,7 @@ describe('Persistence', function () {
it('Declaring only one hook will throw an exception to prevent data loss', function (done) { it('Declaring only one hook will throw an exception to prevent data loss', function (done) {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
fs.writeFileSync(hookTestFilename, 'Some content', 'utf8'); fs.writeFileSync(hookTestFilename, 'Some content', 'utf8');
(function () { (function () {
@ -500,7 +501,7 @@ describe('Persistence', function () {
it('Declaring two hooks that are not reverse of one another will cause an exception to prevent data loss', function (done) { it('Declaring two hooks that are not reverse of one another will cause an exception to prevent data loss', function (done) {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
fs.writeFileSync(hookTestFilename, 'Some content', 'utf8'); fs.writeFileSync(hookTestFilename, 'Some content', 'utf8');
(function () { (function () {
@ -522,7 +523,7 @@ describe('Persistence', function () {
it('A serialization hook can be used to transform data before writing new state to disk', function (done) { it('A serialization hook can be used to transform data before writing new state to disk', function (done) {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
const d = new Datastore({ const d = new Datastore({
filename: hookTestFilename, filename: hookTestFilename,
autoload: true, autoload: true,
@ -599,7 +600,7 @@ describe('Persistence', function () {
it('Use serialization hook when persisting cached database or compacting', function (done) { it('Use serialization hook when persisting cached database or compacting', function (done) {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
const d = new Datastore({ const d = new Datastore({
filename: hookTestFilename, filename: hookTestFilename,
autoload: true, autoload: true,
@ -659,7 +660,7 @@ describe('Persistence', function () {
it('Deserialization hook is correctly used when loading data', function (done) { it('Deserialization hook is correctly used when loading data', function (done) {
const hookTestFilename = 'workspace/hookTest.db' const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () { callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
const d = new Datastore({ const d = new Datastore({
filename: hookTestFilename, filename: hookTestFilename,
autoload: true, autoload: true,
@ -727,7 +728,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(false) fs.existsSync('workspace/it.db').should.equal(false)
fs.existsSync('workspace/it.db~').should.equal(false) fs.existsSync('workspace/it.db~').should.equal(false)
callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) { callbackify(ensureDatafileIntegrityAsync)(p.filename, function (err) {
assert.isNull(err) assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db').should.equal(true)
@ -750,7 +751,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db').should.equal(true)
fs.existsSync('workspace/it.db~').should.equal(false) fs.existsSync('workspace/it.db~').should.equal(false)
callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) { callbackify(ensureDatafileIntegrityAsync)(p.filename, function (err) {
assert.isNull(err) assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db').should.equal(true)
@ -773,7 +774,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(false) fs.existsSync('workspace/it.db').should.equal(false)
fs.existsSync('workspace/it.db~').should.equal(true) fs.existsSync('workspace/it.db~').should.equal(true)
callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) { callbackify(ensureDatafileIntegrityAsync)(p.filename, function (err) {
assert.isNull(err) assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db').should.equal(true)
@ -798,7 +799,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db').should.equal(true)
fs.existsSync('workspace/it.db~').should.equal(true) fs.existsSync('workspace/it.db~').should.equal(true)
callbackify(storage.ensureDatafileIntegrityAsync)(theDb.persistence.filename, function (err) { callbackify(ensureDatafileIntegrityAsync)(theDb.persistence.filename, function (err) {
assert.isNull(err) assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true) fs.existsSync('workspace/it.db').should.equal(true)
@ -925,8 +926,8 @@ describe('Persistence', function () {
let theDb, theDb2, doc1, doc2 let theDb, theDb2, doc1, doc2
waterfall([ waterfall([
apply(callbackify(storage.ensureFileDoesntExistAsync), dbFile), apply(callbackify(ensureFileDoesntExistAsync), dbFile),
apply(callbackify(storage.ensureFileDoesntExistAsync), dbFile + '~'), apply(callbackify(ensureFileDoesntExistAsync), dbFile + '~'),
function (cb) { function (cb) {
theDb = new Datastore({ filename: dbFile }) theDb = new Datastore({ filename: dbFile })
theDb.loadDatabase(cb) theDb.loadDatabase(cb)
@ -1019,7 +1020,7 @@ describe('Persistence', function () {
assert(datafileLength > 5000) assert(datafileLength > 5000)
// Loading it in a separate process that we will crash before finishing the loadDatabase // Loading it in a separate process that we will crash before finishing the loadDatabase
fork('test_lac/loadAndCrash.test').on('exit', function (code) { fork('test_lac/loadAndCrash.test.cjs').on('exit', function (code) {
code.should.equal(1) // See test_lac/loadAndCrash.test.js code.should.equal(1) // See test_lac/loadAndCrash.test.js
fs.existsSync('workspace/lac.db').should.equal(true) fs.existsSync('workspace/lac.db').should.equal(true)

@ -1,8 +1,8 @@
/* eslint-env jest */ /* eslint-env jest */
// Forked from https://github.com/antoniopresto/react-native-local-mongodb/blob/93acbc8a9aaca86aed1d632855cd8b984501147b/test/persistence.test.js // Forked from https://github.com/antoniopresto/react-native-local-mongodb/blob/93acbc8a9aaca86aed1d632855cd8b984501147b/test/persistence.test.js
const { promisify } = require('util') import { promisify } from 'util'
const AsyncStorage = require('@react-native-async-storage/async-storage').default import AsyncStorage from '@react-native-async-storage/async-storage'
const DataStore = require('../../') import DataStore from '../../'
const getDb = async () => { const getDb = async () => {
await AsyncStorage.clear() await AsyncStorage.clear()

@ -1,3 +1,3 @@
const browserResolve = require('browser-resolve') import browserResolve from 'browser-resolve'
module.exports = (id, opts) => browserResolve.sync(id, { ...opts, browser: 'react-native' }) export default (id, opts) => browserResolve.sync(id, { ...opts, browser: 'react-native' })

@ -1,17 +1,19 @@
const { callbackify, promisify } = require('util') import { callbackify, promisify } from 'util'
const { promises: fs, constants: fsConstants } = require('fs')
// Must use an intermediary variable, otherwise Rollup imports callbackify from util directly
// (along with crypto somehow) in files importing customUtils.
const _callbackify = callbackify
const waterfallAsync = async tasks => { const waterfallAsync = async tasks => {
for (const task of tasks) { for (const task of tasks) {
await promisify(task)() await promisify(task)()
} }
} }
const waterfall = callbackify(waterfallAsync) const waterfall = _callbackify(waterfallAsync)
const eachAsync = async (arr, iterator) => Promise.all(arr.map(el => promisify(iterator)(el))) const eachAsync = async (arr, iterator) => Promise.all(arr.map(el => promisify(iterator)(el)))
const each = callbackify(eachAsync) const each = _callbackify(eachAsync)
const apply = function (fn) { const apply = function (fn) {
const args = Array.prototype.slice.call(arguments, 1) const args = Array.prototype.slice.call(arguments, 1)
@ -26,21 +28,16 @@ const whilstAsync = async (test, fn) => {
while (test()) await promisify(fn)() while (test()) await promisify(fn)()
} }
const whilst = callbackify(whilstAsync) const whilst = _callbackify(whilstAsync)
const wait = delay => new Promise(resolve => { const wait = delay => new Promise(resolve => {
setTimeout(resolve, delay) setTimeout(resolve, delay)
}) })
const exists = path => fs.access(path, fsConstants.FS_OK).then(() => true, () => false)
export {
// eslint-disable-next-line n/no-callback-literal whilst,
const existsCallback = (path, callback) => fs.access(path, fsConstants.FS_OK).then(() => callback(true), () => callback(false)) apply,
waterfall,
module.exports.whilst = whilst each,
module.exports.apply = apply wait
module.exports.waterfall = waterfall }
module.exports.each = each
module.exports.wait = wait
module.exports.exists = exists
module.exports.existsCallback = existsCallback
module.exports.callbackify = callbackify

@ -53,7 +53,7 @@ class FakeFsWriteStream extends Writable {
fs.createWriteStream = path => new FakeFsWriteStream(path) fs.createWriteStream = path => new FakeFsWriteStream(path)
// End of fs monkey patching // End of fs monkey patching
const Nedb = require('../lib/datastore.js') const Nedb = require('../cjs/datastore.cjs')
const db = new Nedb({ filename: 'workspace/lac.db' }) const db = new Nedb({ filename: 'workspace/lac.db' })
db.loadDatabaseAsync() // no need to await db.loadDatabaseAsync() // no need to await

@ -1,6 +1,5 @@
const fs = require('fs') import fsPromises from 'node:fs/promises'
const fsPromises = fs.promises import Nedb from '../src/datastore.js'
const Nedb = require('../lib/datastore')
const N = 64 const N = 64
// A console.error triggers an error of the parent test // A console.error triggers an error of the parent test

@ -1,9 +1,12 @@
'use strict' 'use strict'
const path = require('path') import { dirname, resolve, join } from 'node:path'
const webpack = require('webpack') import { fileURLToPath } from 'node:url'
import webpack from 'webpack'
module.exports = (env, argv) => { const __dirname = dirname(fileURLToPath(import.meta.url))
export default (env, argv) => {
const minimize = argv.env.minimize || false const minimize = argv.env.minimize || false
const baseConfig = { const baseConfig = {
@ -15,7 +18,7 @@ module.exports = (env, argv) => {
minimize minimize
}, },
output: { output: {
path: path.join(__dirname, 'browser-version/out'), path: __dirname,
filename: pathData => `${pathData.chunk.name.toLowerCase()}${minimize ? '.min' : ''}.js`, filename: pathData => `${pathData.chunk.name.toLowerCase()}${minimize ? '.min' : ''}.js`,
libraryTarget: 'window', libraryTarget: 'window',
library: '[name]' library: '[name]'
@ -23,9 +26,9 @@ module.exports = (env, argv) => {
} }
const pluginsNedb = [ const pluginsNedb = [
new webpack.NormalModuleReplacementPlugin(new RegExp(path.resolve(__dirname, 'lib/storage.js')), path.resolve(__dirname, 'browser-version/lib/storage.browser.js')), new webpack.NormalModuleReplacementPlugin(new RegExp(resolve(__dirname, 'src/storage.js')), resolve(__dirname, 'src/browser/storage.browser.js')),
new webpack.NormalModuleReplacementPlugin(new RegExp(path.resolve(__dirname, 'lib/customUtils.js')), path.resolve(__dirname, 'browser-version/lib/customUtils.js')), new webpack.NormalModuleReplacementPlugin(new RegExp(resolve(__dirname, 'src/customUtils.js')), resolve(__dirname, 'src/browser/customUtils.js')),
new webpack.NormalModuleReplacementPlugin(/byline/, path.resolve(__dirname, 'browser-version/lib/byline.js')) new webpack.NormalModuleReplacementPlugin(new RegExp(resolve(__dirname, 'src/byline.js')), resolve(__dirname, 'src/browser/byline.js'))
] ]
const polyfillPlugins = [ const polyfillPlugins = [
@ -43,7 +46,7 @@ module.exports = (env, argv) => {
name: 'Nedb', name: 'Nedb',
plugins: pluginsNedb, plugins: pluginsNedb,
entry: { entry: {
Nedb: path.join(__dirname, 'lib', 'datastore.js') Nedb: join(__dirname, 'src', 'datastore.js')
} }
}, },
{ {
@ -53,13 +56,13 @@ module.exports = (env, argv) => {
resolve: { resolve: {
fallback: { fallback: {
fs: false, fs: false,
path: require.resolve('path-browserify'), path: import.meta.resolve('path-browserify'),
util: require.resolve('util/'), util: import.meta.resolve('util/'),
crypto: false crypto: false
} }
}, },
entry: { entry: {
testUtils: path.join(__dirname, 'test', 'utils.test.js') testUtils: join(__dirname, 'test', 'utils.test.js')
} }
} }
] ]

Loading…
Cancel
Save