Move to ESM and build a commonjs version

feat/remove-native-modules-imports
Timothée Rebours 10 months ago
parent 08c8076ae9
commit 792eecd1a1
  1. 2
      .gitignore
  2. 4
      __mocks__/@react-native-async-storage/async-storage.js
  3. 1
      browser-version/lib/byline.js
  4. 3
      index.js
  5. 2
      jsdoc.conf.js
  6. 4
      karma.conf.local.js
  7. 6
      karma.conf.template.js
  8. 8283
      package-lock.json
  9. 31
      package.json
  10. 1
      src/browser/byline.js
  11. 5
      src/browser/customUtils.js
  12. 34
      src/browser/storage.browser.js
  13. 44
      src/browser/storage.react-native.js
  14. 8
      src/byline.js
  15. 31
      src/cursor.js
  16. 15
      src/customUtils.js
  17. 50
      src/datastore.js
  18. 4
      src/executor.js
  19. 36
      src/indexes.js
  20. 66
      src/model.js
  21. 77
      src/persistence.js
  22. 77
      src/storage.js
  23. 10
      src/utils.js
  24. 2
      src/waterfall.js
  25. 48
      test/byline.test.js
  26. 13
      test/cursor.async.test.js
  27. 15
      test/cursor.test.js
  28. 14
      test/customUtil.test.js
  29. 16
      test/db.async.test.js
  30. 16
      test/db.test.js
  31. 11
      test/executor.async.test.js
  32. 13
      test/executor.test.js
  33. 7
      test/fsUtils.test.js
  34. 4
      test/indexes.test.js
  35. 82
      test/model.test.js
  36. 64
      test/persistence.async.test.js
  37. 53
      test/persistence.test.js
  38. 6
      test/react-native/persistence.test.js
  39. 4
      test/react-native/resolver.js
  40. 33
      test/utils.test.js
  41. 2
      test_lac/loadAndCrash.test.cjs
  42. 5
      test_lac/openFds.test.js
  43. 25
      webpack.config.js

2
.gitignore vendored

@ -24,6 +24,6 @@ browser-version/node_modules
*~
*.swo
browser-version/out
test-results
typings-tests.js
cjs/*

@ -1 +1,3 @@
module.exports.default = require('@react-native-async-storage/async-storage/jest/async-storage-mock')
import generated from '@react-native-async-storage/async-storage/jest/async-storage-mock'
export default generated

@ -1 +0,0 @@
module.exports = {}

@ -1,3 +0,0 @@
const Datastore = require('./lib/datastore')
module.exports = Datastore

@ -1,5 +1,5 @@
'use strict'
module.exports = {
export default {
plugins: ['plugins/markdown']
}

@ -1,8 +1,8 @@
'use strict'
const template = require('./karma.conf.template.js')
import template from './karma.conf.template.js'
module.exports = function (config) {
export default function (config) {
const localBrowser = {
ChromeHeadlessNoSandbox: {
base: 'ChromeHeadless',

@ -1,6 +1,6 @@
'use strict'
module.exports = (config) => ({
export default (config) => ({
// Increase timeout in case connection in CI is slow
captureTimeout: 120000,
browserNoActivityTimeout: 300000,
@ -14,8 +14,8 @@ module.exports = (config) => ({
// list of files / patterns to load in the browser
files: [
'node_modules/localforage/dist/localforage.min.js',
'browser-version/out/testutils.min.js',
'browser-version/out/nedb.min.js',
'testutils.min.js',
'nedb.min.js',
'test/browser/nedb-browser.spec.js',
'test/browser/load.spec.js'
],

8283
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -7,6 +7,7 @@
"index.js",
"index.d.ts"
],
"type": "module",
"types": "index.d.ts",
"author": {
"name": "Timothée Rebours",
@ -53,8 +54,6 @@
"@types/jest": "^27.5.2",
"browser-resolve": "^2.0.0",
"chai": "^4.3.7",
"commander": "^7.2.0",
"events": "^3.3.0",
"jest": "^27.5.1",
"jsdoc-to-markdown": "^8.0.0",
"karma": "^6.4.1",
@ -68,7 +67,8 @@
"path-browserify": "^1.0.1",
"process": "^0.11.10",
"react": "^18.2.0",
"react-native": "^0.71.2",
"react-native": "^0.73.2",
"rollup": "^4.9.5",
"semver": "^7.3.8",
"source-map-loader": "^4.0.1",
"standard": "^17.0.0",
@ -83,7 +83,9 @@
},
"scripts": {
"lint": "standard",
"pretest": "npm run build:cjs",
"test": "mocha --reporter spec --timeout 10000",
"build:cjs": "rollup --preserveModules src/* src/browser/* --dir cjs --entryFileNames \"[name].cjs\" --format cjs --preserveEntrySignatures strict",
"build:browser": "webpack --config-name Nedb && webpack --config-name Nedb --env minimize && webpack --config-name testUtils --env minimize",
"pretest:browser": "npm run build:browser",
"test:browser": "xvfb-maybe karma start karma.conf.local.js",
@ -92,16 +94,23 @@
"prepublishOnly": "npm run build:browser",
"generateDocs:markdown": "jsdoc2md --no-cache -c jsdoc.conf.js --param-list-format list --files ./lib/*.js > API.md"
},
"main": "index.js",
"main": "cjs/datastore.cjs",
"module": "src/datastore.js",
"browser": {
"./lib/customUtils.js": "./browser-version/lib/customUtils.js",
"./lib/storage.js": "./browser-version/lib/storage.browser.js",
"./lib/byline.js": "./browser-version/lib/byline.js"
"./src/customUtils.js": "./src/browser/customUtils.js",
"./src/storage.js": "./src/browser/storage.browser.js",
"./src/byline.js": "./src/browser/byline.js",
"./cjs/customUtils.cjs": "./src/browser/customUtils.cjs",
"./cjs/storage.cjs": "./src/browser/storage.browser.cjs",
"./cjs/byline.cjs": "./src/browser/byline.cjs"
},
"react-native": {
"./lib/customUtils.js": "./browser-version/lib/customUtils.js",
"./lib/storage.js": "./browser-version/lib/storage.react-native.js",
"./lib/byline.js": "./browser-version/lib/byline.js"
"./src/customUtils.js": "./src/browser/customUtils.js",
"./src/storage.js": "./src/browser/storage.react-native.js",
"./src/byline.js": "./src/browser/byline.js",
"./cjs/customUtils.cjs": "./cjs/browser/customUtils.cjs",
"./cjs/storage.cjs": "./cjs/browser/storage.react-native.cjs",
"./cjs/byline.cjs": "./cjs/browser/byline.cjs"
},
"license": "MIT",
"publishConfig": {
@ -109,7 +118,7 @@
},
"standard": {
"ignore": [
"browser-version/out",
"cjs",
"**/*.ts"
]
},

@ -0,0 +1 @@
export default {}

@ -73,7 +73,7 @@ const byteArrayToBase64 = uint8 => {
*/
const uid = len => byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+/]/g, '').slice(0, len)
module.exports.uid = uid
export { uid }
// Copyright Joyent, Inc. and other Node contributors.
//
@ -162,4 +162,5 @@ function callbackify (original) {
getOwnPropertyDescriptors(original))
return callbackified
}
module.exports.callbackify = callbackify
export { callbackify }

@ -8,7 +8,7 @@
* @private
*/
const localforage = require('localforage')
import localforage from 'localforage'
// Configure localforage to display NeDB name for now. Would be a good idea to let user use his own app name
const store = localforage.createInstance({
@ -169,23 +169,17 @@ const crashSafeWriteFileLinesAsync = async (filename, lines) => {
await writeFileAsync(filename, lines.join('\n'))
}
// Interface
module.exports.existsAsync = existsAsync
module.exports.renameAsync = renameAsync
module.exports.writeFileAsync = writeFileAsync
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync
module.exports.appendFileAsync = appendFileAsync
module.exports.readFileAsync = readFileAsync
const readFileStream = false
module.exports.unlinkAsync = unlinkAsync
module.exports.mkdirAsync = mkdirAsync
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync
module.exports.ensureParentDirectoryExistsAsync = ensureParentDirectoryExistsAsync
// Interface
export {
appendFileAsync,
crashSafeWriteFileLinesAsync,
ensureDatafileIntegrityAsync,
ensureParentDirectoryExistsAsync,
existsAsync,
readFileAsync,
readFileStream,
unlinkAsync,
mkdirAsync,
}

@ -8,8 +8,8 @@
* @private
*/
const AsyncStorage = require('@react-native-async-storage/async-storage').default
const { callbackify } = require('./customUtils')
import AsyncStorage from '@react-native-async-storage/async-storage'
import { callbackify } from '../customUtils'
/**
* Async version of {@link module:storageReactNative.exists}.
@ -264,32 +264,16 @@ const crashSafeWriteFileLines = callbackify(crashSafeWriteFileLinesAsync)
*/
const ensureParentDirectoryExistsAsync = async (file, mode) => Promise.resolve()
// Interface
module.exports.exists = exists
module.exports.existsAsync = existsAsync
const readFileStream = false
module.exports.rename = rename
module.exports.renameAsync = renameAsync
module.exports.writeFile = writeFile
module.exports.writeFileAsync = writeFileAsync
module.exports.crashSafeWriteFileLines = crashSafeWriteFileLines
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync
module.exports.appendFile = appendFile
module.exports.appendFileAsync = appendFileAsync
module.exports.readFile = readFile
module.exports.readFileAsync = readFileAsync
module.exports.unlink = unlink
module.exports.unlinkAsync = unlinkAsync
module.exports.mkdir = mkdir
module.exports.mkdirAsync = mkdirAsync
module.exports.ensureDatafileIntegrity = ensureDatafileIntegrity
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync
module.exports.ensureParentDirectoryExistsAsync = ensureParentDirectoryExistsAsync
export {
appendFileAsync,
crashSafeWriteFileLinesAsync,
ensureDatafileIntegrityAsync,
ensureParentDirectoryExistsAsync,
existsAsync,
readFileAsync,
readFileStream,
unlinkAsync,
mkdirAsync,
}

@ -23,9 +23,9 @@
* @module byline
* @private
*/
const stream = require('stream')
const timers = require('timers')
const { Buffer } = require('buffer')
import stream from 'node:stream'
import timers from 'node:timers'
import { Buffer } from 'node:buffer'
const createLineStream = (readStream, options) => {
if (!readStream) throw new Error('expected readStream')
@ -115,4 +115,4 @@ class LineStream extends stream.Transform {
}
}
module.exports = createLineStream
export default createLineStream

@ -1,5 +1,5 @@
const model = require('./model.js')
const { callbackify } = require('./customUtils.js')
import { getDotValue, modify, match, compareThings } from './model.js'
import { callbackify } from './customUtils.js'
/**
* Has a callback
@ -127,30 +127,30 @@ class Cursor {
// Check for consistency
const keys = Object.keys(this._projection)
keys.forEach(k => {
for (const k of keys) {
if (action !== undefined && this._projection[k] !== action) throw new Error('Can\'t both keep and omit fields except for _id')
action = this._projection[k]
})
}
// Do the actual projection
candidates.forEach(candidate => {
for (const candidate of candidates) {
let toPush
if (action === 1) { // pick-type projection
toPush = { $set: {} }
keys.forEach(k => {
toPush.$set[k] = model.getDotValue(candidate, k)
for (const k of keys) {
toPush.$set[k] = getDotValue(candidate, k)
if (toPush.$set[k] === undefined) delete toPush.$set[k]
})
toPush = model.modify({}, toPush)
}
toPush = modify({}, toPush)
} else { // omit-type projection
toPush = { $unset: {} }
keys.forEach(k => { toPush.$unset[k] = true })
toPush = model.modify(candidate, toPush)
for (const k of keys) { toPush.$unset[k] = true }
toPush = modify(candidate, toPush)
}
if (keepId) toPush._id = candidate._id
else delete toPush._id
res.push(toPush)
})
}
return res
}
@ -168,9 +168,8 @@ class Cursor {
let skipped = 0
const candidates = await this.db._getCandidatesAsync(this.query)
for (const candidate of candidates) {
if (model.match(candidate, this.query)) {
if (match(candidate, this.query)) {
// If a sort is defined, wait for the results to be sorted before applying limit and skip
if (!this._sort) {
if (this._skip && this._skip > skipped) skipped += 1
@ -189,7 +188,7 @@ class Cursor {
const criteria = Object.entries(this._sort).map(([key, direction]) => ({ key, direction }))
res.sort((a, b) => {
for (const criterion of criteria) {
const compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), this.db.compareStrings)
const compare = criterion.direction * compareThings(getDotValue(a, criterion.key), getDotValue(b, criterion.key), this.db.compareStrings)
if (compare !== 0) return compare
}
return 0
@ -247,4 +246,4 @@ class Cursor {
}
// Interface
module.exports = Cursor
export default Cursor

@ -4,9 +4,13 @@
* @module customUtilsNode
* @private
*/
const crypto = require('crypto')
const { callbackify } = require('util')
import { randomBytes } from 'crypto'
import { callbackify } from 'util'
// Must use an intermediary variable, otherwise Rollup imports callbackify from util directly
// (along with crypto somehow) in files importing customUtils.
const _callbackify = callbackify
/**
* Return a random alphanumerical string of length len
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
@ -18,12 +22,9 @@ const { callbackify } = require('util')
* @return {string}
* @alias module:customUtilsNode.uid
*/
const uid = len => crypto.randomBytes(Math.ceil(Math.max(8, len * 2)))
const uid = len => randomBytes(Math.ceil(Math.max(8, len * 2)))
.toString('base64')
.replace(/[+/]/g, '')
.slice(0, len)
// Interface
module.exports.uid = uid
module.exports.callbackify = callbackify
export { uid, _callbackify as callbackify }

@ -1,10 +1,10 @@
const Cursor = require('./cursor.js')
const { uid, callbackify } = require('./customUtils.js')
const Executor = require('./executor.js')
const Index = require('./indexes.js')
const model = require('./model.js')
const Persistence = require('./persistence.js')
const { isDate, pick, filterIndexNames } = require('./utils.js')
import Cursor from './cursor.js'
import { callbackify, uid } from './customUtils.js'
import Executor from './executor.js'
import Index from './indexes.js'
import { deepCopy, match, checkObject, modify } from './model.js'
import Persistence from './persistence.js'
import { filterIndexNames, isDate, pick } from './utils.js'
/**
* Callback with no parameter
@ -668,10 +668,10 @@ class Datastore {
const expiredDocsIds = []
const ttlIndexesFieldNames = Object.keys(this.ttlIndexes)
docs.forEach(doc => {
for (const doc of docs) {
if (ttlIndexesFieldNames.every(i => !(doc[i] !== undefined && isDate(doc[i]) && Date.now() > doc[i].getTime() + this.ttlIndexes[i] * 1000))) validDocs.push(doc)
else expiredDocsIds.push(doc._id)
})
}
for (const _id of expiredDocsIds) {
await this._removeAsync({ _id }, {})
}
@ -691,7 +691,7 @@ class Datastore {
this._insertInCache(preparedDoc)
await this.persistence.persistNewStateAsync(Array.isArray(preparedDoc) ? preparedDoc : [preparedDoc])
return model.deepCopy(preparedDoc)
return deepCopy(preparedDoc)
}
/**
@ -718,14 +718,14 @@ class Datastore {
if (Array.isArray(newDoc)) {
preparedDoc = []
newDoc.forEach(doc => { preparedDoc.push(this._prepareDocumentForInsertion(doc)) })
for (const doc of newDoc) { preparedDoc.push(this._prepareDocumentForInsertion(doc)) }
} else {
preparedDoc = model.deepCopy(newDoc)
preparedDoc = deepCopy(newDoc)
if (preparedDoc._id === undefined) preparedDoc._id = this._createNewId()
const now = new Date()
if (this.timestampData && preparedDoc.createdAt === undefined) preparedDoc.createdAt = now
if (this.timestampData && preparedDoc.updatedAt === undefined) preparedDoc.updatedAt = now
model.checkObject(preparedDoc)
checkObject(preparedDoc)
}
return preparedDoc
@ -857,7 +857,7 @@ class Datastore {
* @async
*/
findAsync (query, projection = {}) {
const cursor = new Cursor(this, query, docs => docs.map(doc => model.deepCopy(doc)))
const cursor = new Cursor(this, query, docs => docs.map(doc => deepCopy(doc)))
cursor.projection(projection)
return cursor
@ -902,7 +902,7 @@ class Datastore {
* @return {Cursor<document>}
*/
findOneAsync (query, projection = {}) {
const cursor = new Cursor(this, query, docs => docs.length === 1 ? model.deepCopy(docs[0]) : null)
const cursor = new Cursor(this, query, docs => docs.length === 1 ? deepCopy(docs[0]) : null)
cursor.projection(projection).limit(1)
return cursor
@ -954,13 +954,13 @@ class Datastore {
let toBeInserted
try {
model.checkObject(update)
checkObject(update)
// updateQuery is a simple object with no modifier, use it as the document to insert
toBeInserted = update
} catch (e) {
// updateQuery contains modifiers, use the find query as the base,
// strip it from all operators and update it according to updateQuery
toBeInserted = model.modify(model.deepCopy(query, true), update)
toBeInserted = modify(deepCopy(query, true), update)
}
const newDoc = await this._insertAsync(toBeInserted)
return { numAffected: 1, affectedDocuments: newDoc, upsert: true }
@ -976,10 +976,10 @@ class Datastore {
// Preparing update (if an error is thrown here neither the datafile nor
// the in-memory indexes are affected)
for (const candidate of candidates) {
if (model.match(candidate, query) && (multi || numReplaced === 0)) {
if (match(candidate, query) && (multi || numReplaced === 0)) {
numReplaced += 1
if (this.timestampData) { createdAt = candidate.createdAt }
modifiedDoc = model.modify(candidate, update)
modifiedDoc = modify(candidate, update)
if (this.timestampData) {
modifiedDoc.createdAt = createdAt
modifiedDoc.updatedAt = new Date()
@ -997,7 +997,7 @@ class Datastore {
if (!options.returnUpdatedDocs) return { numAffected: numReplaced, upsert: false, affectedDocuments: null }
else {
let updatedDocsDC = []
updatedDocs.forEach(doc => { updatedDocsDC.push(model.deepCopy(doc)) })
for (const doc of updatedDocs) { updatedDocsDC.push(deepCopy(doc)) }
if (!multi) updatedDocsDC = updatedDocsDC[0]
return { numAffected: numReplaced, affectedDocuments: updatedDocsDC, upsert: false }
}
@ -1007,7 +1007,7 @@ class Datastore {
* Callback version of {@link Datastore#updateAsync}.
* @param {query} query
* @param {document|*} update
* @param {Object|Datastore~updateCallback} [options|]
* @param {Object|Datastore~updateCallback} [options]
* @param {boolean} [options.multi = false]
* @param {boolean} [options.upsert = false]
* @param {boolean} [options.returnUpdatedDocs = false]
@ -1083,13 +1083,13 @@ class Datastore {
const removedDocs = []
let numRemoved = 0
candidates.forEach(d => {
if (model.match(d, query) && (multi || numRemoved === 0)) {
for (const d of candidates) {
if (match(d, query) && (multi || numRemoved === 0)) {
numRemoved += 1
removedDocs.push({ $$deleted: true, _id: d._id })
this._removeFromIndexes(d)
}
})
}
await this.persistence.persistNewStateAsync(removedDocs)
return numRemoved
@ -1125,4 +1125,4 @@ class Datastore {
}
}
module.exports = Datastore
export default Datastore

@ -1,4 +1,4 @@
const Waterfall = require('./waterfall')
import Waterfall from './waterfall.js'
/**
* Executes operations sequentially.
@ -76,4 +76,4 @@ class Executor {
}
// Interface
module.exports = Executor
export default Executor

@ -1,6 +1,6 @@
const BinarySearchTree = require('@seald-io/binary-search-tree').AVLTree
const model = require('./model.js')
const { uniq, isDate } = require('./utils.js')
import { AVLTree as BinarySearchTree } from '@seald-io/binary-search-tree'
import { compareThings, getDotValues } from './model.js'
import { isDate, uniq } from './utils.js'
/**
* Two indexed pointers are equal if they point to the same place
@ -73,7 +73,7 @@ class Index {
* Options object given to the underlying BinarySearchTree.
* @type {{unique: boolean, checkValueEquality: (function(*, *): boolean), compareKeys: ((function(*, *, compareStrings): (number|number))|*)}}
*/
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality }
this.treeOptions = { unique: this.unique, compareKeys: compareThings, checkValueEquality }
/**
* Underlying BinarySearchTree for this index. Uses an AVLTree for optimization.
@ -109,7 +109,7 @@ class Index {
return
}
const key = model.getDotValues(doc, this._fields)
const key = getDotValues(doc, this._fields)
// We don't index documents that don't contain the field if the index is sparse
if ((key === undefined || (typeof key === 'object' && key !== null && Object.values(key).every(el => el === undefined))) && this.sparse) return
@ -177,19 +177,19 @@ class Index {
*/
remove (doc) {
if (Array.isArray(doc)) {
doc.forEach(d => { this.remove(d) })
for (const d of doc) { this.remove(d) }
return
}
const key = model.getDotValues(doc, this._fields)
const key = getDotValues(doc, this._fields)
if (key === undefined && this.sparse) return
if (!Array.isArray(key)) {
this.tree.delete(key, doc)
} else {
uniq(key, projectForUnique).forEach(_key => {
for (const _key of uniq(key, projectForUnique)) {
this.tree.delete(_key, doc)
})
}
}
}
@ -268,9 +268,9 @@ class Index {
if (!Array.isArray(oldDoc)) this.update(newDoc, oldDoc)
else {
oldDoc.forEach(pair => {
for (const pair of oldDoc) {
revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc })
})
}
this.update(revert)
}
}
@ -286,15 +286,15 @@ class Index {
const _res = {}
const res = []
value.forEach(v => {
this.getMatching(v).forEach(doc => {
for (const v of value) {
for (const doc of this.getMatching(v)) {
_res[doc._id] = doc
})
})
}
}
Object.keys(_res).forEach(_id => {
for (const _id of Object.keys(_res)) {
res.push(_res[_id])
})
}
return res
}
@ -330,4 +330,4 @@ class Index {
}
// Interface
module.exports = Index
export default Index

@ -6,7 +6,7 @@
* @module model
* @private
*/
const { uniq, isDate, isRegExp } = require('./utils.js')
import { isDate, isRegExp, uniq } from './utils.js'
/**
* Check a key, throw an error if the key is non valid
@ -39,9 +39,9 @@ const checkKey = (k, v) => {
*/
const checkObject = obj => {
if (Array.isArray(obj)) {
obj.forEach(o => {
for (const o of obj) {
checkObject(o)
})
}
}
if (typeof obj === 'object' && obj !== null) {
@ -162,9 +162,19 @@ const isPrimitiveType = obj => (
* @private
*/
const compareNSB = (a, b) => {
if (a < b) return -1
if (a > b) return 1
return 0
if (a === b) return 0
switch (typeof a) { // types are assumed to be equal
case 'string':
if (a < b) return -1
else if (a > b) return 1
else return 0
case 'boolean':
return a - b
case 'number':
return Math.sign(a - b)
default:
throw new Error('Invalid types')
}
}
/**
@ -292,14 +302,14 @@ const $addToSetPartial = (obj, field, value) => {
if (Object.keys(value).length > 1) throw new Error('Can\'t use another field in conjunction with $each')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => {
for (const v of value.$each) {
$addToSetPartial(obj, field, v)
})
}
} else {
let addToSet = true
obj[field].forEach(v => {
for (const v of obj[field]) {
if (compareThings(v, value) === 0) addToSet = false
})
}
if (addToSet) obj[field].push(value)
}
}
@ -399,9 +409,9 @@ const modifierFunctions = {
) throw new Error('Can only use $slice in cunjunction with $each when $push to array')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => {
for (const v of value.$each) {
obj[field].push(v)
})
}
if (value.$slice === undefined || typeof value.$slice !== 'number') return
@ -452,7 +462,7 @@ const modify = (obj, updateQuery) => {
// Apply modifiers
modifiers = uniq(keys)
newDoc = deepCopy(obj)
modifiers.forEach(m => {
for (const m of modifiers) {
if (!modifierFunctions[m]) throw new Error(`Unknown modifier ${m}`)
// Can't rely on Object.keys throwing on non objects since ES6
@ -460,10 +470,10 @@ const modify = (obj, updateQuery) => {
if (typeof updateQuery[m] !== 'object') throw new Error(`Modifier ${m}'s argument must be an object`)
const keys = Object.keys(updateQuery[m])
keys.forEach(k => {
for (const k of keys) {
modifierFunctions[m](newDoc, k, updateQuery[m][k])
})
})
}
}
}
// Check result is valid and return it
@ -814,14 +824,16 @@ function matchQueryPart (obj, queryKey, queryValue, treatObjAsValue) {
}
// Interface
module.exports.serialize = serialize
module.exports.deserialize = deserialize
module.exports.deepCopy = deepCopy
module.exports.checkObject = checkObject
module.exports.isPrimitiveType = isPrimitiveType
module.exports.modify = modify
module.exports.getDotValue = getDotValue
module.exports.getDotValues = getDotValues
module.exports.match = match
module.exports.areThingsEqual = areThingsEqual
module.exports.compareThings = compareThings
export {
serialize,
deserialize,
deepCopy,
checkObject,
isPrimitiveType,
modify,
getDotValue,
getDotValues,
match,
areThingsEqual,
compareThings
}

@ -1,8 +1,17 @@
const byline = require('./byline')
const customUtils = require('./customUtils.js')
const Index = require('./indexes.js')
const model = require('./model.js')
const storage = require('./storage.js')
import byline from './byline.js'
import { uid } from './customUtils.js'
import Index from './indexes.js'
import { serialize, deserialize } from './model.js'
import {
appendFileAsync,
crashSafeWriteFileLinesAsync,
ensureDatafileIntegrityAsync,
ensureParentDirectoryExistsAsync,
existsAsync,
readFileAsync,
readFileStream,
unlinkAsync,
} from './storage.js'
const DEFAULT_DIR_MODE = 0o755
const DEFAULT_FILE_MODE = 0o644
@ -54,7 +63,12 @@ class Persistence {
this.inMemoryOnly = this.db.inMemoryOnly
this.filename = this.db.filename
this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1
this.modes = options.modes !== undefined ? options.modes : { fileMode: DEFAULT_FILE_MODE, dirMode: DEFAULT_DIR_MODE }
this.modes = options.modes !== undefined
? options.modes
: {
fileMode: DEFAULT_FILE_MODE,
dirMode: DEFAULT_DIR_MODE
}
if (this.modes.fileMode === undefined) this.modes.fileMode = DEFAULT_FILE_MODE
if (this.modes.dirMode === undefined) this.modes.dirMode = DEFAULT_DIR_MODE
if (
@ -79,7 +93,7 @@ class Persistence {
if (options.testSerializationHooks === undefined || options.testSerializationHooks) {
for (let i = 1; i < 30; i += 1) {
for (let j = 0; j < 10; j += 1) {
const randomString = customUtils.uid(i)
const randomString = uid(i)
if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) {
throw new Error('beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss')
}
@ -99,12 +113,12 @@ class Persistence {
if (this.inMemoryOnly) return
this.db.getAllData().forEach(doc => {
lines.push(this.afterSerialization(model.serialize(doc)))
})
Object.keys(this.db.indexes).forEach(fieldName => {
for (const doc of this.db.getAllData()) {
lines.push(this.afterSerialization(serialize(doc)))
}
for (const fieldName of Object.keys(this.db.indexes)) {
if (fieldName !== '_id') { // The special _id index is managed by datastore.js, the others need to be persisted
lines.push(this.afterSerialization(model.serialize({
lines.push(this.afterSerialization(serialize({
$$indexCreated: {
fieldName: this.db.indexes[fieldName].fieldName,
unique: this.db.indexes[fieldName].unique,
@ -112,9 +126,9 @@ class Persistence {
}
})))
}
})
}
await storage.crashSafeWriteFileLinesAsync(this.filename, lines, this.modes)
await crashSafeWriteFileLinesAsync(this.filename, lines, this.modes)
if (typeof this.db.oncompaction === 'function') this.db.oncompaction(null)
} catch (error) {
if (typeof this.db.oncompaction === 'function') this.db.oncompaction(error)
@ -137,13 +151,13 @@ class Persistence {
// In-memory only datastore
if (this.inMemoryOnly) return
newDocs.forEach(doc => {
toPersist += this.afterSerialization(model.serialize(doc)) + '\n'
})
for (const doc of newDocs) {
toPersist += this.afterSerialization(serialize(doc)) + '\n'
}
if (toPersist.length === 0) return
await storage.appendFileAsync(this.filename, toPersist, { encoding: 'utf8', mode: this.modes.fileMode })
await appendFileAsync(this.filename, toPersist, { encoding: 'utf8', mode: this.modes.fileMode })
}
/**
@ -171,9 +185,12 @@ class Persistence {
let corruptItems = 0
for (const datum of data) {
if (datum === '') { dataLength--; continue }
if (datum === '') {
dataLength--
continue
}
try {
const doc = model.deserialize(this.beforeDeserialization(datum))
const doc = deserialize(this.beforeDeserialization(datum))
if (doc._id) {
if (doc.$$deleted === true) delete dataById[doc._id]
else dataById[doc._id] = doc
@ -229,7 +246,7 @@ class Persistence {
lineStream.on('data', (line) => {
if (line === '') return
try {
const doc = model.deserialize(this.beforeDeserialization(line))
const doc = deserialize(this.beforeDeserialization(line))
if (doc._id) {
if (doc.$$deleted === true) delete dataById[doc._id]
else dataById[doc._id] = doc
@ -286,22 +303,22 @@ class Persistence {
// In-memory only datastore
if (this.inMemoryOnly) return
await Persistence.ensureParentDirectoryExistsAsync(this.filename, this.modes.dirMode)
await storage.ensureDatafileIntegrityAsync(this.filename, this.modes.fileMode)
await ensureDatafileIntegrityAsync(this.filename, this.modes.fileMode)
let treatedData
if (storage.readFileStream) {
if (readFileStream) {
// Server side
const fileStream = storage.readFileStream(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
const fileStream = readFileStream(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
treatedData = await this.treatRawStreamAsync(fileStream)
} else {
// Browser
const rawData = await storage.readFileAsync(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
const rawData = await readFileAsync(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
treatedData = this.treatRawData(rawData)
}
// Recreate all indexes in the datafile
Object.keys(treatedData.indexes).forEach(key => {
for (const key of Object.keys(treatedData.indexes)) {
this.db.indexes[key] = new Index(treatedData.indexes[key])
})
}
// Fill cached database (i.e. all indexes) with data
try {
@ -337,7 +354,7 @@ class Persistence {
// remove datastore file
if (!this.db.inMemoryOnly) {
await this.db.executor.pushAsync(async () => {
if (await storage.existsAsync(this.filename)) await storage.unlinkAsync(this.filename)
if (await existsAsync(this.filename)) await unlinkAsync(this.filename)
}, true)
}
}
@ -350,9 +367,9 @@ class Persistence {
* @private
*/
static async ensureParentDirectoryExistsAsync (dir, mode = DEFAULT_DIR_MODE) {
return storage.ensureParentDirectoryExistsAsync(dir, mode)
return ensureParentDirectoryExistsAsync(dir, mode)
}
}
// Interface
module.exports = Persistence
export default Persistence

@ -8,10 +8,10 @@
* @module storage
* @private
*/
const fs = require('fs')
const fsPromises = fs.promises
const path = require('path')
const { Readable } = require('stream')
import { constants as fsConstants, createWriteStream, createReadStream } from 'node:fs'
import { access, rename, writeFile, unlink, appendFile, readFile, mkdir, open } from 'node:fs/promises'
import { dirname, parse, resolve } from 'node:path'
import { Readable } from 'node:stream'
const DEFAULT_DIR_MODE = 0o755
const DEFAULT_FILE_MODE = 0o644
@ -24,7 +24,7 @@ const DEFAULT_FILE_MODE = 0o644
* @alias module:storage.existsAsync
* @see module:storage.exists
*/
const existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false)
const existsAsync = file => access(file, fsConstants.F_OK).then(() => true, () => false)
/**
* Node.js' [fsPromises.rename]{@link https://nodejs.org/api/fs.html#fspromisesrenameoldpath-newpath}
@ -35,7 +35,7 @@ const existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() =
* @alias module:storage.renameAsync
* @async
*/
const renameAsync = fsPromises.rename
const renameAsync = rename
/**
* Node.js' [fsPromises.writeFile]{@link https://nodejs.org/api/fs.html#fspromiseswritefilefile-data-options}.
@ -47,7 +47,7 @@ const renameAsync = fsPromises.rename
* @alias module:storage.writeFileAsync
* @async
*/
const writeFileAsync = fsPromises.writeFile
const writeFileAsync = writeFile
/**
* Node.js' [fs.createWriteStream]{@link https://nodejs.org/api/fs.html#fscreatewritestreampath-options}.
@ -57,7 +57,7 @@ const writeFileAsync = fsPromises.writeFile
* @return {fs.WriteStream}
* @alias module:storage.writeFileStream
*/
const writeFileStream = fs.createWriteStream
const writeFileStream = createWriteStream
/**
* Node.js' [fsPromises.unlink]{@link https://nodejs.org/api/fs.html#fspromisesunlinkpath}.
@ -67,7 +67,7 @@ const writeFileStream = fs.createWriteStream
* @async
* @alias module:storage.unlinkAsync
*/
const unlinkAsync = fsPromises.unlink
const unlinkAsync = unlink
/**
* Node.js' [fsPromises.appendFile]{@link https://nodejs.org/api/fs.html#fspromisesappendfilepath-data-options}.
@ -79,7 +79,7 @@ const unlinkAsync = fsPromises.unlink
* @alias module:storage.appendFileAsync
* @async
*/
const appendFileAsync = fsPromises.appendFile
const appendFileAsync = appendFile
/**
* Node.js' [fsPromises.readFile]{@link https://nodejs.org/api/fs.html#fspromisesreadfilepath-options}.
@ -90,7 +90,7 @@ const appendFileAsync = fsPromises.appendFile
* @alias module:storage.readFileAsync
* @async
*/
const readFileAsync = fsPromises.readFile
const readFileAsync = readFile
/**
* Node.js' [fs.createReadStream]{@link https://nodejs.org/api/fs.html#fscreatereadstreampath-options}.
@ -100,7 +100,7 @@ const readFileAsync = fsPromises.readFile
* @return {fs.ReadStream}
* @alias module:storage.readFileStream
*/
const readFileStream = fs.createReadStream
const readFileStream = createReadStream
/**
* Node.js' [fsPromises.mkdir]{@link https://nodejs.org/api/fs.html#fspromisesmkdirpath-options}.
@ -111,7 +111,7 @@ const readFileStream = fs.createReadStream
* @alias module:storage.mkdirAsync
* @async
*/
const mkdirAsync = fsPromises.mkdir
const mkdirAsync = mkdir
/**
* Removes file if it exists.
@ -161,7 +161,7 @@ const flushToStorageAsync = async (options) => {
let filehandle, errorOnFsync, errorOnClose
try {
filehandle = await fsPromises.open(filename, flags, mode)
filehandle = await open(filename, flags, mode)
try {
await filehandle.sync()
} catch (errFS) {
@ -236,7 +236,7 @@ const writeFileLinesAsync = (filename, lines, mode = DEFAULT_FILE_MODE) => new P
const crashSafeWriteFileLinesAsync = async (filename, lines, modes = { fileMode: DEFAULT_FILE_MODE, dirMode: DEFAULT_DIR_MODE }) => {
const tempFilename = filename + '~'
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true, mode: modes.dirMode })
await flushToStorageAsync({ filename: dirname(filename), isDir: true, mode: modes.dirMode })
const exists = await existsAsync(filename)
if (exists) await flushToStorageAsync({ filename, mode: modes.fileMode })
@ -247,7 +247,7 @@ const crashSafeWriteFileLinesAsync = async (filename, lines, modes = { fileMode:
await renameAsync(tempFilename, filename)
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true, mode: modes.dirMode })
await flushToStorageAsync({ filename: dirname(filename), isDir: true, mode: modes.dirMode })
}
/**
@ -279,39 +279,26 @@ const ensureDatafileIntegrityAsync = async (filename, mode = DEFAULT_FILE_MODE)
* @private
*/
const ensureParentDirectoryExistsAsync = async (filename, mode) => {
const dir = path.dirname(filename)
const parsedDir = path.parse(path.resolve(dir))
const dir = dirname(filename)
const parsedDir = parse(resolve(dir))
// this is because on Windows mkdir throws a permission error when called on the root directory of a volume
if (process.platform !== 'win32' || parsedDir.dir !== parsedDir.root || parsedDir.base !== '') {
await mkdirAsync(dir, { recursive: true, mode })
}
}
// Interface
module.exports.existsAsync = existsAsync
module.exports.renameAsync = renameAsync
module.exports.writeFileAsync = writeFileAsync
module.exports.writeFileLinesAsync = writeFileLinesAsync
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync
module.exports.appendFileAsync = appendFileAsync
module.exports.readFileAsync = readFileAsync
module.exports.unlinkAsync = unlinkAsync
// For tests only (not used by Nedb), not ported to browser/react-native
export { ensureParentDirectoryExistsAsync }
module.exports.mkdirAsync = mkdirAsync
module.exports.readFileStream = readFileStream
module.exports.flushToStorageAsync = flushToStorageAsync
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync
module.exports.ensureFileDoesntExistAsync = ensureFileDoesntExistAsync
module.exports.ensureParentDirectoryExistsAsync = ensureParentDirectoryExistsAsync
// Interface
export {
appendFileAsync,
crashSafeWriteFileLinesAsync,
ensureDatafileIntegrityAsync,
ensureFileDoesntExistAsync,
existsAsync,
readFileAsync,
readFileStream,
unlinkAsync,
mkdirAsync,
}

@ -77,8 +77,8 @@ const pick = (object, keys) => {
const filterIndexNames = (indexNames) => ([k, v]) => !!(typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || isDate(v) || v === null) &&
indexNames.includes(k)
module.exports.uniq = uniq
module.exports.isDate = isDate
module.exports.isRegExp = isRegExp
module.exports.pick = pick
module.exports.filterIndexNames = filterIndexNames
export { uniq }
export { isDate }
export { isRegExp }
export { pick }
export { filterIndexNames }

@ -45,4 +45,4 @@ class Waterfall {
}
}
module.exports = Waterfall
export default Waterfall

@ -19,33 +19,37 @@
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
const chai = require('chai')
const fs = require('fs')
const path = require('path')
const byline = require('../lib/byline')
import chai from 'chai'
import { createReadStream, createWriteStream, readFileSync, unlinkSync } from 'node:fs'
import { dirname, join } from 'node:path'
import { fileURLToPath } from 'node:url'
import { Buffer } from 'node:buffer'
import byline from '../src/byline.js'
const __dirname = dirname(fileURLToPath(import.meta.url))
const { assert } = chai
const regEx = /\r\n|[\n\v\f\r\x85\u2028\u2029]/g
const localPath = file => path.join(__dirname, 'byline', file)
const localPath = file => join(__dirname, 'byline', file)
describe('byline', function () {
it('should pipe a small file', function (done) {
const input = fs.createReadStream(localPath('empty.txt'))
const lineStream = byline(input) // convinience API
const output = fs.createWriteStream(localPath('test.txt'))
const input = createReadStream(localPath('empty.txt'))
const lineStream = byline(input) // convenience API
const output = createWriteStream(localPath('test.txt'))
lineStream.pipe(output)
output.on('close', function () {
const out = fs.readFileSync(localPath('test.txt'), 'utf8')
const in_ = fs.readFileSync(localPath('empty.txt'), 'utf8').replace(/\r?\n/g, '')
const out = readFileSync(localPath('test.txt'), 'utf8')
const in_ = readFileSync(localPath('empty.txt'), 'utf8').replace(/\r?\n/g, '')
assert.equal(in_, out)
fs.unlinkSync(localPath('test.txt'))
unlinkSync(localPath('test.txt'))
done()
})
})
it('should work with streams2 API', function (done) {
let stream = fs.createReadStream(localPath('empty.txt'))
let stream = createReadStream(localPath('empty.txt'))
stream = byline(stream)
stream.on('readable', function () {
@ -60,7 +64,7 @@ describe('byline', function () {
})
it('should ignore empty lines by default', function (done) {
const input = fs.createReadStream(localPath('empty.txt'))
const input = createReadStream(localPath('empty.txt'))
const lineStream = byline(input)
lineStream.setEncoding('utf8')
@ -70,7 +74,7 @@ describe('byline', function () {
})
lineStream.on('end', function () {
let lines2 = fs.readFileSync(localPath('empty.txt'), 'utf8').split(regEx)
let lines2 = readFileSync(localPath('empty.txt'), 'utf8').split(regEx)
lines2 = lines2.filter(function (line) {
return line.length > 0
})
@ -80,7 +84,7 @@ describe('byline', function () {
})
it('should keep empty lines when keepEmptyLines is true', function (done) {
const input = fs.createReadStream(localPath('empty.txt'))
const input = createReadStream(localPath('empty.txt'))
const lineStream = byline(input, { keepEmptyLines: true })
lineStream.setEncoding('utf8')
@ -96,7 +100,7 @@ describe('byline', function () {
})
it('should not split a CRLF which spans two chunks', function (done) {
const input = fs.createReadStream(localPath('CRLF.txt'))
const input = createReadStream(localPath('CRLF.txt'))
const lineStream = byline(input, { keepEmptyLines: true })
lineStream.setEncoding('utf8')
@ -121,11 +125,11 @@ describe('byline', function () {
})
function readFile (filename, done) {
const input = fs.createReadStream(filename)
const input = createReadStream(filename)
const lineStream = byline(input)
lineStream.setEncoding('utf8')
let lines2 = fs.readFileSync(filename, 'utf8').split(regEx)
let lines2 = readFileSync(filename, 'utf8').split(regEx)
lines2 = lines2.filter(function (line) {
return line.length > 0
})
@ -158,11 +162,11 @@ describe('byline', function () {
})
it('should pause() and resume() with a huge file', function (done) {
const input = fs.createReadStream(localPath('rfc_huge.txt'))
const input = createReadStream(localPath('rfc_huge.txt'))
const lineStream = byline(input)
lineStream.setEncoding('utf8')
let lines2 = fs.readFileSync(localPath('rfc_huge.txt'), 'utf8').split(regEx)
let lines2 = readFileSync(localPath('rfc_huge.txt'), 'utf8').split(regEx)
lines2 = lines2.filter(function (line) {
return line.length > 0
})
@ -193,8 +197,8 @@ describe('byline', function () {
})
function areStreamsEqualTypes (options, callback) {
const fsStream = fs.createReadStream(localPath('empty.txt'), options)
const lineStream = byline(fs.createReadStream(localPath('empty.txt'), options))
const fsStream = createReadStream(localPath('empty.txt'), options)
const lineStream = byline(createReadStream(localPath('empty.txt'), options))
fsStream.on('data', function (data1) {
lineStream.on('data', function (data2) {
assert.equal(Buffer.isBuffer(data1), Buffer.isBuffer(data2))

@ -1,11 +1,12 @@
/* eslint-env mocha */
import fs from 'node:fs/promises'
import assert from 'node:assert/strict'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import Cursor from '../src/cursor.js'
import { exists } from './fsUtils.test.js'
const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const assert = require('assert').strict
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const Cursor = require('../lib/cursor')
const { exists } = require('./utils.test.js')
describe('Cursor Async', function () {
let d

@ -1,12 +1,13 @@
/* eslint-env mocha */
const chai = require('chai')
import fs from 'node:fs'
import { callbackify } from 'node:util'
import chai from 'chai'
import { each, waterfall } from './utils.test.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import Cursor from '../src/cursor.js'
const testDb = 'workspace/test.db'
const fs = require('fs')
const { each, waterfall } = require('./utils.test.js')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const Cursor = require('../lib/cursor')
const { callbackify } = require('util')
const { assert } = chai
chai.should()

@ -1,20 +1,20 @@
/* eslint-env mocha */
const chai = require('chai')
const customUtils = require('../lib/customUtils')
import chai from 'chai'
import { uid } from '../src/customUtils.js'
chai.should()
describe('customUtils', function () {
describe('uid', function () {
it('Generates a string of the expected length', function () {
customUtils.uid(3).length.should.equal(3)
customUtils.uid(16).length.should.equal(16)
customUtils.uid(42).length.should.equal(42)
customUtils.uid(1000).length.should.equal(1000)
uid(3).length.should.equal(3)
uid(16).length.should.equal(16)
uid(42).length.should.equal(42)
uid(1000).length.should.equal(1000)
})
// Very small probability of conflict
it('Generated uids should not be the same', function () {
customUtils.uid(56).should.not.equal(customUtils.uid(56))
uid(56).should.not.equal(uid(56))
})
})
})

@ -1,12 +1,14 @@
/* eslint-env mocha */
import fs from 'node:fs/promises'
import assert from 'node:assert/strict'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { wait } from './utils.test.js'
import { exists } from './fsUtils.test.js'
const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const assert = require('assert').strict
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { wait } = require('./utils.test')
const { exists } = require('./utils.test.js')
const reloadTimeUpperBound = 200 // In ms, an upper bound for the reload time used to check createdAt and updatedAt
describe('Database async', function () {

@ -1,12 +1,14 @@
/* eslint-env mocha */
const chai = require('chai')
import fs from 'node:fs'
import { callbackify } from 'node:util'
import chai from 'chai'
import { apply, each, waterfall } from './utils.test.js'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
const testDb = 'workspace/test.db'
const fs = require('fs')
const { apply, each, waterfall } = require('./utils.test.js')
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { callbackify } = require('util')
const reloadTimeUpperBound = 200 // In ms, an upper bound for the reload time used to check createdAt and updatedAt
const { assert } = chai

@ -1,10 +1,11 @@
/* eslint-env mocha */
import fs from 'node:fs/promises'
import assert from 'node:assert/strict'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { exists } from './fsUtils.test.js'
const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const assert = require('assert').strict
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { exists } = require('./utils.test.js')
// Test that operations are executed in the right order
// We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends

@ -1,11 +1,12 @@
/* eslint-env mocha */
const chai = require('chai')
import fs from 'node:fs'
import { callbackify } from 'node:util'
import chai from 'chai'
import { waterfall } from './utils.test.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
const testDb = 'workspace/test.db'
const fs = require('fs')
const { waterfall } = require('./utils.test.js')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const { callbackify } = require('util')
const { assert } = chai
chai.should()

@ -0,0 +1,7 @@
import fs from 'node:fs/promises'
import { constants as fsConstants } from 'node:fs'
export const exists = path => fs.access(path, fsConstants.FS_OK).then(() => true, () => false)
// eslint-disable-next-line n/no-callback-literal
export const existsCallback = (path, callback) => fs.access(path, fsConstants.FS_OK).then(() => callback(true), () => callback(false))

@ -1,6 +1,6 @@
/* eslint-env mocha */
const Index = require('../lib/indexes')
const chai = require('chai')
import chai from 'chai'
import Index from '../src/indexes.js'
const { assert } = chai
chai.should()

@ -1,9 +1,9 @@
/* eslint-env mocha */
const model = require('../lib/model')
const chai = require('chai')
const util = require('util')
const Datastore = require('../lib/datastore')
const fs = require('fs')
import fs from 'node:fs'
import util from 'node:util'
import chai from 'chai'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
const { assert, expect } = chai
chai.should()
@ -370,14 +370,6 @@ describe('Model', function () {
totally: { doesnt: { exist: 'now it does' } }
})
})
it('Doesn\'t replace a falsy field by an object when recursively following dot notation', function () {
const obj = { nested: false }
const updateQuery = { $set: { 'nested.now': 'it is' } }
const modified = model.modify(obj, updateQuery)
assert.deepStrictEqual(modified, { nested: false }) // Object not modified as the nested field doesn't exist
})
}) // End of '$set modifier'
describe('$unset modifier', function () {
@ -794,10 +786,10 @@ describe('Model', function () {
model.compareThings(undefined, undefined).should.equal(0)
otherStuff.forEach(function (stuff) {
for (const stuff of otherStuff) {
model.compareThings(undefined, stuff).should.equal(-1)
model.compareThings(stuff, undefined).should.equal(1)
})
}
})
it('Then null', function () {
@ -805,10 +797,10 @@ describe('Model', function () {
model.compareThings(null, null).should.equal(0)
otherStuff.forEach(function (stuff) {
for (const stuff of otherStuff) {
model.compareThings(null, stuff).should.equal(-1)
model.compareThings(stuff, null).should.equal(1)
})
}
})
it('Then numbers', function () {
@ -823,12 +815,12 @@ describe('Model', function () {
model.compareThings(-2.6, -2.6).should.equal(0)
model.compareThings(5, 5).should.equal(0)
otherStuff.forEach(function (stuff) {
for (const stuff of otherStuff) {
numbers.forEach(function (number) {
model.compareThings(number, stuff).should.equal(-1)
model.compareThings(stuff, number).should.equal(1)
})
})
}
})
it('Then strings', function () {
@ -840,12 +832,12 @@ describe('Model', function () {
model.compareThings('hey', 'hew').should.equal(1)
model.compareThings('hey', 'hey').should.equal(0)
otherStuff.forEach(function (stuff) {
for (const stuff of otherStuff) {
strings.forEach(function (string) {
model.compareThings(string, stuff).should.equal(-1)
model.compareThings(stuff, string).should.equal(1)
})
})
}
})
it('Then booleans', function () {
@ -857,12 +849,12 @@ describe('Model', function () {
model.compareThings(true, false).should.equal(1)
model.compareThings(false, true).should.equal(-1)
otherStuff.forEach(function (stuff) {
bools.forEach(function (bool) {
for (const stuff of otherStuff) {
for (const bool of bools) {
model.compareThings(bool, stuff).should.equal(-1)
model.compareThings(stuff, bool).should.equal(1)
})
})
}
}
})
it('Then dates', function () {
@ -876,12 +868,12 @@ describe('Model', function () {
model.compareThings(new Date(0), new Date(-54341)).should.equal(1)
model.compareThings(new Date(123), new Date(4341)).should.equal(-1)
otherStuff.forEach(function (stuff) {
dates.forEach(function (date) {
for (const stuff of otherStuff) {
for (const date of dates) {
model.compareThings(date, stuff).should.equal(-1)
model.compareThings(stuff, date).should.equal(1)
})
})
}
}
})
it('Then arrays', function () {
@ -896,12 +888,12 @@ describe('Model', function () {
model.compareThings(['hello', 'zzz'], ['hello', 'world']).should.equal(1)
model.compareThings(['hello', 'world'], ['hello', 'world']).should.equal(0)
otherStuff.forEach(function (stuff) {
arrays.forEach(function (array) {
for (const stuff of otherStuff) {
for (const array of arrays) {
model.compareThings(array, stuff).should.equal(-1)
model.compareThings(stuff, array).should.equal(1)
})
})
}
}
})
it('And finally objects', function () {
@ -1341,23 +1333,6 @@ describe('Model', function () {
model.match({ a: 5 }, { a: { $size: 1 } }).should.equal(false)
})
it('Can use $size several times in the same matcher', function () {
model.match({ childrens: ['Riri', 'Fifi', 'Loulou'] }, {
childrens: {
$size: 3,
// eslint-disable-next-line no-dupe-keys
$size: 3
}
}).should.equal(true)
model.match({ childrens: ['Riri', 'Fifi', 'Loulou'] }, {
childrens: {
$size: 3,
// eslint-disable-next-line no-dupe-keys
$size: 4
}
}).should.equal(false) // Of course this can never be true
})
it('Can query array documents with multiple simultaneous conditions', function () {
// Non nested documents
model.match({
@ -1491,10 +1466,9 @@ describe('Model', function () {
})
it('Should throw an error if a logical operator is used without an array or if an unknown logical operator is used', function () {
(function () { model.match({ a: 5 }, { $or: { a: 5, b: 6 } }) }).should.throw();
// eslint-disable-next-line no-dupe-keys
(function () { model.match({ a: 5 }, { $or: { a: 5, a: 6 } }) }).should.throw();
// eslint-disable-next-line no-dupe-keys
(function () { model.match({ a: 5 }, { $and: { a: 5, a: 6 } }) }).should.throw();
(function () { model.match({ a: 5 }, { $and: { a: 5, b: 6 } }) }).should.throw();
(function () { model.match({ a: 5 }, { $unknown: [{ a: 5 }] }) }).should.throw()
})
})
@ -1547,8 +1521,6 @@ describe('Model', function () {
model.match({ tags: ['node', 'js', 'db'] }, { tags: 'python' }).should.equal(false)
model.match({ tags: ['node', 'js', 'db'] }, { tagss: 'js' }).should.equal(false)
model.match({ tags: ['node', 'js', 'db'] }, { tags: 'js' }).should.equal(true)
// eslint-disable-next-line no-dupe-keys
model.match({ tags: ['node', 'js', 'db'] }, { tags: 'js', tags: 'node' }).should.equal(true)
// Mixed matching with array and non array
model.match({ tags: ['node', 'js', 'db'], nedb: true }, { tags: 'js', nedb: true }).should.equal(true)

@ -1,19 +1,19 @@
/* eslint-env mocha */
import { execFile, fork } from 'node:child_process'
import { promisify } from 'node:util'
import { once } from 'node:events'
import { Readable } from 'node:stream'
import fs from 'node:fs/promises'
import path from 'node:path'
import assert from 'node:assert/strict'
import { wait } from './utils.test.js'
import { exists } from './fsUtils.test.js'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { ensureFileDoesntExistAsync, ensureDatafileIntegrityAsync } from '../src/storage.js'
const testDb = 'workspace/test.db'
const { promises: fs } = require('fs')
const path = require('path')
const assert = require('assert').strict
const { exists } = require('./utils.test.js')
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const storage = require('../lib/storage')
const { execFile, fork } = require('child_process')
const { promisify } = require('util')
const { ensureFileDoesntExistAsync } = require('../lib/storage')
const { once } = require('events')
const { wait } = require('./utils.test')
const Readable = require('stream').Readable
describe('Persistence async', function () {
let d
@ -267,7 +267,7 @@ describe('Persistence async', function () {
const data = (await fs.readFile(d.filename, 'utf8')).split('\n')
let filledCount = 0
data.forEach(item => { if (item.length > 0) { filledCount += 1 } })
for (const item of data) { if (item.length > 0) { filledCount += 1 } }
assert.equal(filledCount, 3)
await d.loadDatabaseAsync()
@ -276,7 +276,7 @@ describe('Persistence async', function () {
const data2 = (await fs.readFile(d.filename, 'utf8')).split('\n')
filledCount = 0
data2.forEach(function (item) { if (item.length > 0) { filledCount += 1 } })
for (const item of data2) { if (item.length > 0) { filledCount += 1 } }
assert.equal(filledCount, 1)
})
@ -422,7 +422,7 @@ describe('Persistence async', function () {
it('Declaring only one hook will throw an exception to prevent data loss', async () => {
const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename)
await ensureFileDoesntExistAsync(hookTestFilename)
await fs.writeFile(hookTestFilename, 'Some content', 'utf8')
assert.throws(() => {
// eslint-disable-next-line no-new
@ -449,7 +449,7 @@ describe('Persistence async', function () {
it('Declaring two hooks that are not reverse of one another will cause an exception to prevent data loss', async () => {
const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename)
await ensureFileDoesntExistAsync(hookTestFilename)
await fs.writeFile(hookTestFilename, 'Some content', 'utf8')
assert.throws(() => {
// eslint-disable-next-line no-new
@ -467,7 +467,7 @@ describe('Persistence async', function () {
it('Declaring two hooks that are not reverse of one another will not cause exception if options.testSerializationHooks === false', async () => {
const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename)
await ensureFileDoesntExistAsync(hookTestFilename)
await fs.writeFile(hookTestFilename, 'Some content', 'utf8')
const db = new Datastore({
filename: hookTestFilename,
@ -481,7 +481,7 @@ describe('Persistence async', function () {
it('A serialization hook can be used to transform data before writing new state to disk', async () => {
const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename)
await ensureFileDoesntExistAsync(hookTestFilename)
const d = new Datastore({
filename: hookTestFilename,
autoload: true,
@ -549,7 +549,7 @@ describe('Persistence async', function () {
it('Use serialization hook when persisting cached database or compacting', async () => {
const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename)
await ensureFileDoesntExistAsync(hookTestFilename)
const d = new Datastore({
filename: hookTestFilename,
autoload: true,
@ -600,7 +600,7 @@ describe('Persistence async', function () {
it('Deserialization hook is correctly used when loading data', async () => {
const hookTestFilename = 'workspace/hookTest.db'
await storage.ensureFileDoesntExistAsync(hookTestFilename)
await ensureFileDoesntExistAsync(hookTestFilename)
const d = new Datastore({
filename: hookTestFilename,
autoload: true,
@ -656,7 +656,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), false)
assert.equal(await exists('workspace/it.db~'), false)
await storage.ensureDatafileIntegrityAsync(p.filename)
await ensureDatafileIntegrityAsync(p.filename)
assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false)
@ -675,7 +675,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false)
await storage.ensureDatafileIntegrityAsync(p.filename)
await ensureDatafileIntegrityAsync(p.filename)
assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false)
@ -694,7 +694,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), false)
assert.equal(await exists('workspace/it.db~'), true)
await storage.ensureDatafileIntegrityAsync(p.filename)
await ensureDatafileIntegrityAsync(p.filename)
assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), false)
@ -715,7 +715,7 @@ describe('Persistence async', function () {
assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), true)
await storage.ensureDatafileIntegrityAsync(theDb.persistence.filename)
await ensureDatafileIntegrityAsync(theDb.persistence.filename)
assert.equal(await exists('workspace/it.db'), true)
assert.equal(await exists('workspace/it.db~'), true)
@ -812,8 +812,8 @@ describe('Persistence async', function () {
it('Persistence works as expected when everything goes fine', async () => {
const dbFile = 'workspace/test2.db'
await storage.ensureFileDoesntExistAsync(dbFile)
await storage.ensureFileDoesntExistAsync(dbFile + '~')
await ensureFileDoesntExistAsync(dbFile)
await ensureFileDoesntExistAsync(dbFile + '~')
const theDb = new Datastore({ filename: dbFile })
await theDb.loadDatabaseAsync()
@ -872,7 +872,7 @@ describe('Persistence async', function () {
assert(datafileLength > 5000)
// Loading it in a separate process that we will crash before finishing the loadDatabase
const child = fork('test_lac/loadAndCrash.test', [], { stdio: 'inherit' })
const child = fork('test_lac/loadAndCrash.test.cjs', [], { stdio: 'inherit' })
const [code] = await once(child, 'exit')
assert.equal(code, 1) // See test_lac/loadAndCrash.test.js
@ -900,7 +900,7 @@ describe('Persistence async', function () {
// Not run on Windows as there is no clean way to set maximum file descriptors. Not an issue as the code itself is tested.
it('Cannot cause EMFILE errors by opening too many file descriptors', async function () {
this.timeout(10000)
if (process.platform === 'win32' || process.platform === 'win64') { return }
if (process.platform === 'win32' || process.platform === 'win64') { this.skip() }
try {
const { stdout, stderr } = await promisify(execFile)('test_lac/openFdsLaunch.sh')
// The subprocess will not output anything to stdout unless part of the test fails
@ -919,7 +919,7 @@ describe('Persistence async', function () {
describe('ensureFileDoesntExist', function () {
it('Doesnt do anything if file already doesnt exist', async () => {
await storage.ensureFileDoesntExistAsync('workspace/nonexisting')
await ensureFileDoesntExistAsync('workspace/nonexisting')
assert.equal(await exists('workspace/nonexisting'), false)
})
@ -927,7 +927,7 @@ describe('Persistence async', function () {
await fs.writeFile('workspace/existing', 'hello world', 'utf8')
assert.equal(await exists('workspace/existing'), true)
await storage.ensureFileDoesntExistAsync('workspace/existing')
await ensureFileDoesntExistAsync('workspace/existing')
assert.equal(await exists('workspace/existing'), false)
})
}) // ==== End of 'ensureFileDoesntExist' ====

@ -1,17 +1,18 @@
/* eslint-env mocha */
const chai = require('chai')
import { callbackify } from 'node:util'
import fs from 'node:fs'
import { Readable } from 'node:stream'
import { execFile, fork } from 'node:child_process'
import chai from 'chai'
import { apply, waterfall } from './utils.test.js'
import { existsCallback } from './fsUtils.test.js'
import * as model from '../src/model.js'
import Datastore from '../src/datastore.js'
import Persistence from '../src/persistence.js'
import { ensureFileDoesntExistAsync, ensureDatafileIntegrityAsync } from '../src/storage.js'
const testDb = 'workspace/test.db'
const fs = require('fs')
const { apply, waterfall } = require('./utils.test.js')
const model = require('../lib/model')
const Datastore = require('../lib/datastore')
const Persistence = require('../lib/persistence')
const storage = require('../lib/storage')
const { execFile, fork } = require('child_process')
const { callbackify } = require('util')
const { existsCallback } = require('./utils.test')
const { ensureFileDoesntExistAsync } = require('../lib/storage')
const Readable = require('stream').Readable
const { assert } = chai
chai.should()
@ -304,7 +305,7 @@ describe('Persistence', function () {
const data = fs.readFileSync(d.filename, 'utf8').split('\n')
let filledCount = 0
data.forEach(function (item) { if (item.length > 0) { filledCount += 1 } })
for (const item of data) { if (item.length > 0) { filledCount += 1 } }
filledCount.should.equal(3)
d.loadDatabase(function (err) {
@ -314,7 +315,7 @@ describe('Persistence', function () {
const data = fs.readFileSync(d.filename, 'utf8').split('\n')
let filledCount = 0
data.forEach(function (item) { if (item.length > 0) { filledCount += 1 } })
for (const item of data) { if (item.length > 0) { filledCount += 1 } }
filledCount.should.equal(1)
done()
@ -467,7 +468,7 @@ describe('Persistence', function () {
it('Declaring only one hook will throw an exception to prevent data loss', function (done) {
const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () {
callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
fs.writeFileSync(hookTestFilename, 'Some content', 'utf8');
(function () {
@ -500,7 +501,7 @@ describe('Persistence', function () {
it('Declaring two hooks that are not reverse of one another will cause an exception to prevent data loss', function (done) {
const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () {
callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
fs.writeFileSync(hookTestFilename, 'Some content', 'utf8');
(function () {
@ -522,7 +523,7 @@ describe('Persistence', function () {
it('A serialization hook can be used to transform data before writing new state to disk', function (done) {
const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () {
callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
const d = new Datastore({
filename: hookTestFilename,
autoload: true,
@ -599,7 +600,7 @@ describe('Persistence', function () {
it('Use serialization hook when persisting cached database or compacting', function (done) {
const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () {
callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
const d = new Datastore({
filename: hookTestFilename,
autoload: true,
@ -659,7 +660,7 @@ describe('Persistence', function () {
it('Deserialization hook is correctly used when loading data', function (done) {
const hookTestFilename = 'workspace/hookTest.db'
callbackify(storage.ensureFileDoesntExistAsync)(hookTestFilename, function () {
callbackify(ensureFileDoesntExistAsync)(hookTestFilename, function () {
const d = new Datastore({
filename: hookTestFilename,
autoload: true,
@ -727,7 +728,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(false)
fs.existsSync('workspace/it.db~').should.equal(false)
callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) {
callbackify(ensureDatafileIntegrityAsync)(p.filename, function (err) {
assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true)
@ -750,7 +751,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(true)
fs.existsSync('workspace/it.db~').should.equal(false)
callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) {
callbackify(ensureDatafileIntegrityAsync)(p.filename, function (err) {
assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true)
@ -773,7 +774,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(false)
fs.existsSync('workspace/it.db~').should.equal(true)
callbackify(storage.ensureDatafileIntegrityAsync)(p.filename, function (err) {
callbackify(ensureDatafileIntegrityAsync)(p.filename, function (err) {
assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true)
@ -798,7 +799,7 @@ describe('Persistence', function () {
fs.existsSync('workspace/it.db').should.equal(true)
fs.existsSync('workspace/it.db~').should.equal(true)
callbackify(storage.ensureDatafileIntegrityAsync)(theDb.persistence.filename, function (err) {
callbackify(ensureDatafileIntegrityAsync)(theDb.persistence.filename, function (err) {
assert.isNull(err)
fs.existsSync('workspace/it.db').should.equal(true)
@ -925,8 +926,8 @@ describe('Persistence', function () {
let theDb, theDb2, doc1, doc2
waterfall([
apply(callbackify(storage.ensureFileDoesntExistAsync), dbFile),
apply(callbackify(storage.ensureFileDoesntExistAsync), dbFile + '~'),
apply(callbackify(ensureFileDoesntExistAsync), dbFile),
apply(callbackify(ensureFileDoesntExistAsync), dbFile + '~'),
function (cb) {
theDb = new Datastore({ filename: dbFile })
theDb.loadDatabase(cb)
@ -1019,7 +1020,7 @@ describe('Persistence', function () {
assert(datafileLength > 5000)
// Loading it in a separate process that we will crash before finishing the loadDatabase
fork('test_lac/loadAndCrash.test').on('exit', function (code) {
fork('test_lac/loadAndCrash.test.cjs').on('exit', function (code) {
code.should.equal(1) // See test_lac/loadAndCrash.test.js
fs.existsSync('workspace/lac.db').should.equal(true)

@ -1,8 +1,8 @@
/* eslint-env jest */
// Forked from https://github.com/antoniopresto/react-native-local-mongodb/blob/93acbc8a9aaca86aed1d632855cd8b984501147b/test/persistence.test.js
const { promisify } = require('util')
const AsyncStorage = require('@react-native-async-storage/async-storage').default
const DataStore = require('../../')
import { promisify } from 'util'
import AsyncStorage from '@react-native-async-storage/async-storage'
import DataStore from '../../'
const getDb = async () => {
await AsyncStorage.clear()

@ -1,3 +1,3 @@
const browserResolve = require('browser-resolve')
import browserResolve from 'browser-resolve'
module.exports = (id, opts) => browserResolve.sync(id, { ...opts, browser: 'react-native' })
export default (id, opts) => browserResolve.sync(id, { ...opts, browser: 'react-native' })

@ -1,17 +1,19 @@
const { callbackify, promisify } = require('util')
const { promises: fs, constants: fsConstants } = require('fs')
import { callbackify, promisify } from 'util'
// Must use an intermediary variable, otherwise Rollup imports callbackify from util directly
// (along with crypto somehow) in files importing customUtils.
const _callbackify = callbackify
const waterfallAsync = async tasks => {
for (const task of tasks) {
await promisify(task)()
}
}
const waterfall = callbackify(waterfallAsync)
const waterfall = _callbackify(waterfallAsync)
const eachAsync = async (arr, iterator) => Promise.all(arr.map(el => promisify(iterator)(el)))
const each = callbackify(eachAsync)
const each = _callbackify(eachAsync)
const apply = function (fn) {
const args = Array.prototype.slice.call(arguments, 1)
@ -26,21 +28,16 @@ const whilstAsync = async (test, fn) => {
while (test()) await promisify(fn)()
}
const whilst = callbackify(whilstAsync)
const whilst = _callbackify(whilstAsync)
const wait = delay => new Promise(resolve => {
setTimeout(resolve, delay)
})
const exists = path => fs.access(path, fsConstants.FS_OK).then(() => true, () => false)
// eslint-disable-next-line n/no-callback-literal
const existsCallback = (path, callback) => fs.access(path, fsConstants.FS_OK).then(() => callback(true), () => callback(false))
module.exports.whilst = whilst
module.exports.apply = apply
module.exports.waterfall = waterfall
module.exports.each = each
module.exports.wait = wait
module.exports.exists = exists
module.exports.existsCallback = existsCallback
module.exports.callbackify = callbackify
export {
whilst,
apply,
waterfall,
each,
wait
}

@ -53,7 +53,7 @@ class FakeFsWriteStream extends Writable {
fs.createWriteStream = path => new FakeFsWriteStream(path)
// End of fs monkey patching
const Nedb = require('../lib/datastore.js')
const Nedb = require('../cjs/datastore.cjs')
const db = new Nedb({ filename: 'workspace/lac.db' })
db.loadDatabaseAsync() // no need to await

@ -1,6 +1,5 @@
const fs = require('fs')
const fsPromises = fs.promises
const Nedb = require('../lib/datastore')
import fsPromises from 'node:fs/promises'
import Nedb from '../src/datastore.js'
const N = 64
// A console.error triggers an error of the parent test

@ -1,9 +1,12 @@
'use strict'
const path = require('path')
const webpack = require('webpack')
import { dirname, resolve, join } from 'node:path'
import { fileURLToPath } from 'node:url'
import webpack from 'webpack'
module.exports = (env, argv) => {
const __dirname = dirname(fileURLToPath(import.meta.url))
export default (env, argv) => {
const minimize = argv.env.minimize || false
const baseConfig = {
@ -15,7 +18,7 @@ module.exports = (env, argv) => {
minimize
},
output: {
path: path.join(__dirname, 'browser-version/out'),
path: __dirname,
filename: pathData => `${pathData.chunk.name.toLowerCase()}${minimize ? '.min' : ''}.js`,
libraryTarget: 'window',
library: '[name]'
@ -23,9 +26,9 @@ module.exports = (env, argv) => {
}
const pluginsNedb = [
new webpack.NormalModuleReplacementPlugin(new RegExp(path.resolve(__dirname, 'lib/storage.js')), path.resolve(__dirname, 'browser-version/lib/storage.browser.js')),
new webpack.NormalModuleReplacementPlugin(new RegExp(path.resolve(__dirname, 'lib/customUtils.js')), path.resolve(__dirname, 'browser-version/lib/customUtils.js')),
new webpack.NormalModuleReplacementPlugin(/byline/, path.resolve(__dirname, 'browser-version/lib/byline.js'))
new webpack.NormalModuleReplacementPlugin(new RegExp(resolve(__dirname, 'src/storage.js')), resolve(__dirname, 'src/browser/storage.browser.js')),
new webpack.NormalModuleReplacementPlugin(new RegExp(resolve(__dirname, 'src/customUtils.js')), resolve(__dirname, 'src/browser/customUtils.js')),
new webpack.NormalModuleReplacementPlugin(new RegExp(resolve(__dirname, 'src/byline.js')), resolve(__dirname, 'src/browser/byline.js'))
]
const polyfillPlugins = [
@ -43,7 +46,7 @@ module.exports = (env, argv) => {
name: 'Nedb',
plugins: pluginsNedb,
entry: {
Nedb: path.join(__dirname, 'lib', 'datastore.js')
Nedb: join(__dirname, 'src', 'datastore.js')
}
},
{
@ -53,13 +56,13 @@ module.exports = (env, argv) => {
resolve: {
fallback: {
fs: false,
path: require.resolve('path-browserify'),
util: require.resolve('util/'),
path: import.meta.resolve('path-browserify'),
util: import.meta.resolve('util/'),
crypto: false
}
},
entry: {
testUtils: path.join(__dirname, 'test', 'utils.test.js')
testUtils: join(__dirname, 'test', 'utils.test.js')
}
}
]

Loading…
Cancel
Save