diff --git a/API.md b/API.md
index 49bdc55..d99c5b0 100644
--- a/API.md
+++ b/API.md
@@ -258,14 +258,15 @@ Will return pointers to matched elements (shallow copies), returning full copies
* [.ttlIndexes](#Datastore+ttlIndexes) : Object.<string, number>
* [.autoloadPromise](#Datastore+autoloadPromise) : Promise
* [.compareStrings()](#Datastore+compareStrings) : [compareStrings
](#compareStrings)
- * [.loadDatabase(callback)](#Datastore+loadDatabase)
+ * [.loadDatabase([callback])](#Datastore+loadDatabase)
* [.loadDatabaseAsync()](#Datastore+loadDatabaseAsync) ⇒ Promise
* [.getAllData()](#Datastore+getAllData) ⇒ [Array.<document>
](#document)
- * [.ensureIndex(options, callback)](#Datastore+ensureIndex)
+ * [.ensureIndex(options, [callback])](#Datastore+ensureIndex)
* [.ensureIndexAsync(options)](#Datastore+ensureIndexAsync) ⇒ Promise.<void>
* [.removeIndex(fieldName, callback)](#Datastore+removeIndex)
* [.removeIndexAsync(fieldName)](#Datastore+removeIndexAsync) ⇒ Promise.<void>
- * [.insertAsync(newDoc)](#Datastore+insertAsync) ⇒ [Promise.<document>
](#document)
+ * [.insert(newDoc, [callback])](#Datastore+insert)
+ * [.insertAsync(newDoc)](#Datastore+insertAsync) ⇒ Promise.<(document\|Array.<document>)>
* [.count(query, [callback])](#Datastore+count) ⇒ Cursor.<number>
\| undefined
* [.countAsync(query)](#Datastore+countAsync) ⇒ Cursor.<number>
* [.find(query, [projection], [callback])](#Datastore+find) ⇒ Cursor.<Array.<document>>
\| undefined
@@ -408,21 +409,21 @@ letters. Native localCompare
will most of the time be the right cho
**Access**: protected
-### neDB.loadDatabase(callback)
-
Load the database from the datafile, and trigger the execution of buffered commands if any.
+### neDB.loadDatabase([callback])
+Callback version of [loadDatabaseAsync](#Datastore+loadDatabaseAsync).
**Kind**: instance method of [Datastore
](#Datastore)
+**See**: Datastore#loadDatabaseAsync
**Params**
-- callback [NoParamCallback
](#NoParamCallback)
+- [callback] [NoParamCallback
](#NoParamCallback)
### neDB.loadDatabaseAsync() ⇒ Promise
-Async version of [loadDatabase](#Datastore+loadDatabase).
+Load the database from the datafile, and trigger the execution of buffered commands if any.
**Kind**: instance method of [Datastore
](#Datastore)
-**See**: Datastore#loadDatabase
### neDB.getAllData() ⇒ [Array.<document>
](#document)
@@ -431,36 +432,40 @@ letters. Native localCompare
will most of the time be the right cho
**Kind**: instance method of [Datastore
](#Datastore)
-### neDB.ensureIndex(options, callback)
-Ensure an index is kept for this field. Same parameters as lib/indexes
-This function acts synchronously on the indexes, however the persistence of the indexes is deferred with the
-executor.
-Previous versions said explicitly the callback was optional, it is now recommended setting one.
+### neDB.ensureIndex(options, [callback])
+Callback version of [ensureIndex](#Datastore+ensureIndex).
**Kind**: instance method of [Datastore
](#Datastore)
+**See**: Datastore#ensureIndex
**Params**
- options object
- - .fieldName string
- Name of the field to index. Use the dot notation to index a field in a nested document.
- - [.unique] boolean
= false
- Enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
- - [.sparse] boolean
= false
- don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
- - [.expireAfterSeconds] number
- if set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus expireAfterSeconds
. Documents where the indexed field is not specified or not a Date
object are ignored
-- callback [NoParamCallback
](#NoParamCallback) - Callback, signature: err
+ - .fieldName string
+ - [.unique] boolean
= false
+ - [.sparse] boolean
= false
+ - [.expireAfterSeconds] number
+- [callback] [NoParamCallback
](#NoParamCallback)
### neDB.ensureIndexAsync(options) ⇒ Promise.<void>
-Async version of [ensureIndex](#Datastore+ensureIndex).
+Ensure an index is kept for this field. Same parameters as lib/indexes
+This function acts synchronously on the indexes, however the persistence of the indexes is deferred with the
+executor.
**Kind**: instance method of [Datastore
](#Datastore)
-**See**: Datastore#ensureIndex
**Params**
- options object
- - .fieldName string
- Name of the field to index. Use the dot notation to index a field in a nested document.
- - [.unique] boolean
= false
- Enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
- - [.sparse] boolean
= false
- Don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
- - [.expireAfterSeconds] number
- If set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus expireAfterSeconds
. Documents where the indexed field is not specified or not a Date
object are ignored
+ - .fieldName string
- Name of the field to index. Use the dot notation to index a field in a nested
+document.
+ - [.unique] boolean
= false
- Enforce field uniqueness. Note that a unique index will raise an error
+if you try to index two documents for which the field is not defined.
+ - [.sparse] boolean
= false
- Don't index documents for which the field is not defined. Use this option
+along with "unique" if you want to accept multiple documents for which it is not defined.
+ - [.expireAfterSeconds] number
- If set, the created index is a TTL (time to live) index, that will
+automatically remove documents when the system date becomes larger than the date on the indexed field plus
+expireAfterSeconds
. Documents where the indexed field is not specified or not a Date
object are ignored.
@@ -487,31 +492,45 @@ field in a nested document.
- fieldName string
- Field name of the index to remove. Use the dot notation to remove an index referring to a
field in a nested document.
-
+
-### neDB.insertAsync(newDoc) ⇒ [Promise.<document>
](#document)
-Async version of [Datastore#insert](Datastore#insert).
+### neDB.insert(newDoc, [callback])
+Callback version of [insertAsync](#Datastore+insertAsync).
**Kind**: instance method of [Datastore
](#Datastore)
+**See**: Datastore#insertAsync
**Params**
- newDoc [document
](#document) | [Array.<document>
](#document)
+- [callback] [SingleDocumentCallback
](#SingleDocumentCallback) | [MultipleDocumentsCallback
](#MultipleDocumentsCallback)
+
+
+
+### neDB.insertAsync(newDoc) ⇒ Promise.<(document\|Array.<document>)>
+Insert a new document, or new documents.
+
+**Kind**: instance method of [Datastore
](#Datastore)
+**Returns**: Promise.<(document\|Array.<document>)>
- The document(s) inserted.
+**Params**
+
+- newDoc [document
](#document) | [Array.<document>
](#document) - Document or array of documents to insert.
### neDB.count(query, [callback]) ⇒ Cursor.<number>
\| undefined
-Count all documents matching the query.
+Callback-version of [countAsync](#Datastore+countAsync).
**Kind**: instance method of [Datastore
](#Datastore)
+**See**: Datastore#countAsync
**Params**
-- query [query
](#query) - MongoDB-style query
-- [callback] [countCallback
](#Datastore..countCallback) - If given, the function will return undefined, otherwise it will return the Cursor.
+- query [query
](#query)
+- [callback] [countCallback
](#Datastore..countCallback)
### neDB.countAsync(query) ⇒ Cursor.<number>
-Async version of [count](#Datastore+count).
+Count all documents matching the query.
**Kind**: instance method of [Datastore
](#Datastore)
**Returns**: Cursor.<number>
- count
@@ -522,21 +541,22 @@ field in a nested document.
### neDB.find(query, [projection], [callback]) ⇒ Cursor.<Array.<document>>
\| undefined
-Find all documents matching the query
-If no callback is passed, we return the cursor so that user can limit, skip and finally exec
+Callback version of [findAsync](#Datastore+findAsync).
**Kind**: instance method of [Datastore
](#Datastore)
+**See**: Datastore#findAsync
**Params**
-- query [query
](#query) - MongoDB-style query
-- [projection] [projection
](#projection) | [MultipleDocumentsCallback
](#MultipleDocumentsCallback) = {}
- MongoDB-style projection. If not given, will be
-interpreted as the callback.
-- [callback] [MultipleDocumentsCallback
](#MultipleDocumentsCallback) - Optional callback, signature: err, docs
+- query [query
](#query)
+- [projection] [projection
](#projection) | [MultipleDocumentsCallback
](#MultipleDocumentsCallback) = {}
+- [callback] [MultipleDocumentsCallback
](#MultipleDocumentsCallback)
### neDB.findAsync(query, [projection]) ⇒ Cursor.<Array.<document>>
-Async version of [find](#Datastore+find).
+Find all documents matching the query.
+We return the [Cursor](#Cursor) that the user can either await
directly or use to can [limit](#Cursor+limit) or
+[skip](#Cursor+skip) before.
**Kind**: instance method of [Datastore
](#Datastore)
**Params**
@@ -547,22 +567,23 @@ interpreted as the callback.
### neDB.findOne(query, [projection], [callback]) ⇒ [Cursor.<document>
](#document) \| undefined
-Find one document matching the query.
+Callback version of [findOneAsync](#Datastore+findOneAsync).
**Kind**: instance method of [Datastore
](#Datastore)
+**See**: Datastore#findOneAsync
**Params**
-- query [query
](#query) - MongoDB-style query
-- [projection] [projection
](#projection) | [SingleDocumentCallback
](#SingleDocumentCallback) = {}
- MongoDB-style projection
-- [callback] [SingleDocumentCallback
](#SingleDocumentCallback) - Optional callback, signature: err, doc
+- query [query
](#query)
+- [projection] [projection
](#projection) | [SingleDocumentCallback
](#SingleDocumentCallback) = {}
+- [callback] [SingleDocumentCallback
](#SingleDocumentCallback)
### neDB.findOneAsync(query, projection) ⇒ [Cursor.<document>
](#document)
-Async version of [findOne](#Datastore+findOne).
+Find one document matching the query.
+We return the [Cursor](#Cursor) that the user can either await
directly or use to can [skip](#Cursor+skip) before.
**Kind**: instance method of [Datastore
](#Datastore)
-**See**: Datastore#findOne
**Params**
- query [query
](#query) - MongoDB-style query
diff --git a/lib/cursor.js b/lib/cursor.js
index dfe32f3..87a583c 100755
--- a/lib/cursor.js
+++ b/lib/cursor.js
@@ -247,7 +247,4 @@ class Cursor {
}
// Interface
-/**
- * @type {Cursor}
- */
module.exports = Cursor
diff --git a/lib/datastore.js b/lib/datastore.js
index 5823f6e..55052b5 100755
--- a/lib/datastore.js
+++ b/lib/datastore.js
@@ -313,19 +313,19 @@ class Datastore extends EventEmitter {
}
/**
- * Load the database from the datafile, and trigger the execution of buffered commands if any.
- * @param {NoParamCallback} callback
+ * Callback version of {@link Datastore#loadDatabaseAsync}.
+ * @param {NoParamCallback} [callback]
+ * @see Datastore#loadDatabaseAsync
*/
loadDatabase (callback) {
- if (typeof callback !== 'function') callback = () => {}
- callbackify(() => this.loadDatabaseAsync())(callback)
+ const promise = this.loadDatabaseAsync()
+ if (typeof callback === 'function') callbackify(() => promise)(callback)
}
/**
- * Async version of {@link Datastore#loadDatabase}.
+ * Load the database from the datafile, and trigger the execution of buffered commands if any.
* @async
* @return {Promise}
- * @see Datastore#loadDatabase
*/
loadDatabaseAsync () {
return this.executor.pushAsync(() => this.persistence.loadDatabaseAsync(), true)
@@ -351,31 +351,35 @@ class Datastore extends EventEmitter {
}
/**
- * Ensure an index is kept for this field. Same parameters as lib/indexes
- * This function acts synchronously on the indexes, however the persistence of the indexes is deferred with the
- * executor.
- * Previous versions said explicitly the callback was optional, it is now recommended setting one.
+ * Callback version of {@link Datastore#ensureIndex}.
* @param {object} options
- * @param {string} options.fieldName Name of the field to index. Use the dot notation to index a field in a nested document.
- * @param {boolean} [options.unique = false] Enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
- * @param {boolean} [options.sparse = false] don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
- * @param {number} [options.expireAfterSeconds] - if set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored
- * @param {NoParamCallback} callback Callback, signature: err
+ * @param {string} options.fieldName
+ * @param {boolean} [options.unique = false]
+ * @param {boolean} [options.sparse = false]
+ * @param {number} [options.expireAfterSeconds]
+ * @param {NoParamCallback} [callback]
+ * @see Datastore#ensureIndex
*/
- ensureIndex (options = {}, callback = () => {}) {
+ ensureIndex (options = {}, callback) {
const promise = this.ensureIndexAsync(options) // to make sure the synchronous part of ensureIndexAsync is executed synchronously
- callbackify(() => promise)(callback)
+ if (typeof callback === 'function') callbackify(() => promise)(callback)
}
/**
- * Async version of {@link Datastore#ensureIndex}.
+ * Ensure an index is kept for this field. Same parameters as lib/indexes
+ * This function acts synchronously on the indexes, however the persistence of the indexes is deferred with the
+ * executor.
* @param {object} options
- * @param {string} options.fieldName Name of the field to index. Use the dot notation to index a field in a nested document.
- * @param {boolean} [options.unique = false] Enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
- * @param {boolean} [options.sparse = false] Don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
- * @param {number} [options.expireAfterSeconds] - If set, the created index is a TTL (time to live) index, that will automatically remove documents when the system date becomes larger than the date on the indexed field plus `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored
+ * @param {string} options.fieldName Name of the field to index. Use the dot notation to index a field in a nested
+ * document.
+ * @param {boolean} [options.unique = false] Enforce field uniqueness. Note that a unique index will raise an error
+ * if you try to index two documents for which the field is not defined.
+ * @param {boolean} [options.sparse = false] Don't index documents for which the field is not defined. Use this option
+ * along with "unique" if you want to accept multiple documents for which it is not defined.
+ * @param {number} [options.expireAfterSeconds] - If set, the created index is a TTL (time to live) index, that will
+ * automatically remove documents when the system date becomes larger than the date on the indexed field plus
+ * `expireAfterSeconds`. Documents where the indexed field is not specified or not a `Date` object are ignored.
* @return {Promise}
- * @see Datastore#ensureIndex
*/
async ensureIndexAsync (options = {}) {
if (!options.fieldName) {
@@ -676,21 +680,20 @@ class Datastore extends EventEmitter {
}
/**
- * Insert a new document.
+ * Callback version of {@link Datastore#insertAsync}.
* @param {document|document[]} newDoc
- * @param {SingleDocumentCallback} [callback = () => {}] Optional callback, signature: err, insertedDoc
- *
- * @private
+ * @param {SingleDocumentCallback|MultipleDocumentsCallback} [callback]
+ * @see Datastore#insertAsync
*/
insert (newDoc, callback) {
- if (typeof callback !== 'function') callback = () => {}
- callbackify(doc => this.insertAsync(doc))(newDoc, callback)
+ const promise = this.insertAsync(newDoc)
+ if (typeof callback === 'function') callbackify(() => promise)(callback)
}
/**
- * Async version of {@link Datastore#insert}.
- * @param {document|document[]} newDoc
- * @return {Promise}
+ * Insert a new document, or new documents.
+ * @param {document|document[]} newDoc Document or array of documents to insert.
+ * @return {Promise} The document(s) inserted.
* @async
*/
insertAsync (newDoc) {
@@ -704,10 +707,11 @@ class Datastore extends EventEmitter {
*/
/**
- * Count all documents matching the query.
- * @param {query} query MongoDB-style query
- * @param {Datastore~countCallback} [callback] If given, the function will return undefined, otherwise it will return the Cursor.
+ * Callback-version of {@link Datastore#countAsync}.
+ * @param {query} query
+ * @param {Datastore~countCallback} [callback]
* @return {Cursor|undefined}
+ * @see Datastore#countAsync
*/
count (query, callback) {
const cursor = this.countAsync(query)
@@ -717,7 +721,7 @@ class Datastore extends EventEmitter {
}
/**
- * Async version of {@link Datastore#count}.
+ * Count all documents matching the query.
* @param {query} query MongoDB-style query
* @return {Cursor} count
* @async
@@ -727,13 +731,12 @@ class Datastore extends EventEmitter {
}
/**
- * Find all documents matching the query
- * If no callback is passed, we return the cursor so that user can limit, skip and finally exec
- * @param {query} query MongoDB-style query
- * @param {projection|MultipleDocumentsCallback} [projection = {}] MongoDB-style projection. If not given, will be
- * interpreted as the callback.
- * @param {MultipleDocumentsCallback} [callback] Optional callback, signature: err, docs
+ * Callback version of {@link Datastore#findAsync}.
+ * @param {query} query
+ * @param {projection|MultipleDocumentsCallback} [projection = {}]
+ * @param {MultipleDocumentsCallback} [callback]
* @return {Cursor|undefined}
+ * @see Datastore#findAsync
*/
find (query, projection, callback) {
if (arguments.length === 1) {
@@ -753,7 +756,9 @@ class Datastore extends EventEmitter {
}
/**
- * Async version of {@link Datastore#find}.
+ * Find all documents matching the query.
+ * We return the {@link Cursor} that the user can either `await` directly or use to can {@link Cursor#limit} or
+ * {@link Cursor#skip} before.
* @param {query} query MongoDB-style query
* @param {projection} [projection = {}] MongoDB-style projection
* @return {Cursor}
@@ -773,11 +778,12 @@ class Datastore extends EventEmitter {
*/
/**
- * Find one document matching the query.
- * @param {query} query MongoDB-style query
- * @param {projection|SingleDocumentCallback} [projection = {}] MongoDB-style projection
- * @param {SingleDocumentCallback} [callback] Optional callback, signature: err, doc
+ * Callback version of {@link Datastore#findOneAsync}.
+ * @param {query} query
+ * @param {projection|SingleDocumentCallback} [projection = {}]
+ * @param {SingleDocumentCallback} [callback]
* @return {Cursor|undefined}
+ * @see Datastore#findOneAsync
*/
findOne (query, projection, callback) {
if (arguments.length === 1) {
@@ -797,11 +803,11 @@ class Datastore extends EventEmitter {
}
/**
- * Async version of {@link Datastore#findOne}.
+ * Find one document matching the query.
+ * We return the {@link Cursor} that the user can either `await` directly or use to can {@link Cursor#skip} before.
* @param {query} query MongoDB-style query
* @param {projection} projection MongoDB-style projection
* @return {Cursor}
- * @see Datastore#findOne
*/
findOneAsync (query, projection = {}) {
const cursor = new Cursor(this, query, docs => docs.length === 1 ? model.deepCopy(docs[0]) : null)
diff --git a/lib/storage.js b/lib/storage.js
index 1da3fd1..b6d72cc 100755
--- a/lib/storage.js
+++ b/lib/storage.js
@@ -205,13 +205,11 @@ const writeFileLinesAsync = (filename, lines) => new Promise((resolve, reject) =
})
readable.on('error', err => {
- if (err) reject(err)
- else resolve()
+ reject(err)
})
stream.on('error', err => {
- if (err) reject(err)
- else resolve()
+ reject(err)
})
} catch (err) {
reject(err)
diff --git a/test/persistence.async.test.js b/test/persistence.async.test.js
index 32e6399..1373648 100755
--- a/test/persistence.async.test.js
+++ b/test/persistence.async.test.js
@@ -824,39 +824,63 @@ describe('Persistence async', function () {
const datafileLength = (await fs.readFile('workspace/lac.db', 'utf8')).length
+ assert(datafileLength > 5000)
+
// Loading it in a separate process that we will crash before finishing the loadDatabase
- fork('test_lac/loadAndCrash.test').on('exit', async function (code) {
- assert.equal(code, 1) // See test_lac/loadAndCrash.test.js
-
- assert.equal(await exists('workspace/lac.db'), true)
- assert.equal(await exists('workspace/lac.db~'), true)
- assert.equal((await fs.readFile('workspace/lac.db', 'utf8')).length, datafileLength)
- assert.equal((await fs.readFile('workspace/lac.db~', 'utf8')).length, 5000)
-
- // Reload database without a crash, check that no data was lost and fs state is clean (no temp file)
- const db = new Datastore({ filename: 'workspace/lac.db' })
- await db.loadDatabaseAsync()
- assert.equal(await exists('workspace/lac.db'), true)
- assert.equal(await exists('workspace/lac.db~'), false)
- assert.equal((await fs.readFile('workspace/lac.db', 'utf8')).length, datafileLength)
-
- const docs = await db.findAsync({})
- assert.equal(docs.length, N)
- for (i = 0; i < N; i += 1) {
- docI = docs.find(d => d._id === 'anid_' + i)
- assert.notEqual(docI, undefined)
- assert.deepEqual({ hello: 'world', _id: 'anid_' + i }, docI)
- }
- })
+ const child = fork('test_lac/loadAndCrash.test', [], { stdio: 'inherit' })
+
+ await Promise.race([
+ new Promise((resolve, reject) => child.on('error', reject)),
+ new Promise((resolve, reject) => {
+ child.on('exit', async function (code) {
+ try {
+ assert.equal(code, 1) // See test_lac/loadAndCrash.test.js
+
+ assert.equal(await exists('workspace/lac.db'), true)
+ assert.equal(await exists('workspace/lac.db~'), true)
+ assert.equal((await fs.readFile('workspace/lac.db', 'utf8')).length, datafileLength)
+ assert.equal((await fs.readFile('workspace/lac.db~', 'utf8')).length, 5000)
+
+ // Reload database without a crash, check that no data was lost and fs state is clean (no temp file)
+ const db = new Datastore({ filename: 'workspace/lac.db' })
+ await db.loadDatabaseAsync()
+ assert.equal(await exists('workspace/lac.db'), true)
+ assert.equal(await exists('workspace/lac.db~'), false)
+ assert.equal((await fs.readFile('workspace/lac.db', 'utf8')).length, datafileLength)
+
+ const docs = await db.findAsync({})
+ assert.equal(docs.length, N)
+ for (i = 0; i < N; i += 1) {
+ docI = docs.find(d => d._id === 'anid_' + i)
+ assert.notEqual(docI, undefined)
+ assert.deepEqual({ hello: 'world', _id: 'anid_' + i }, docI)
+ }
+ resolve()
+ } catch (error) {
+ reject(error)
+ }
+ })
+ })
+ ])
+ })
- // Not run on Windows as there is no clean way to set maximum file descriptors. Not an issue as the code itself is tested.
- it('Cannot cause EMFILE errors by opening too many file descriptors', async function () {
- this.timeout(5000)
- if (process.platform === 'win32' || process.platform === 'win64') { return }
- const { stdout } = await promisify(execFile)('test_lac/openFdsLaunch.sh')
+ // Not run on Windows as there is no clean way to set maximum file descriptors. Not an issue as the code itself is tested.
+ it('Cannot cause EMFILE errors by opening too many file descriptors', async function () {
+ this.timeout(10000)
+ if (process.platform === 'win32' || process.platform === 'win64') { return }
+ try {
+ const { stdout, stderr } = await promisify(execFile)('test_lac/openFdsLaunch.sh')
// The subprocess will not output anything to stdout unless part of the test fails
- if (stdout.length !== 0) throw new Error(stdout)
- })
+ if (stderr.length !== 0) {
+ console.error('subprocess catch\n', stdout)
+ throw new Error(stderr)
+ }
+ } catch (err) {
+ if (Object.prototype.hasOwnProperty.call(err, 'stdout') || Object.prototype.hasOwnProperty.call(err, 'stderr')) {
+ console.error('subprocess catch\n', err.stdout)
+ throw new Error(err.stderr)
+ } else throw err
+ }
})
}) // ==== End of 'Prevent dataloss when persisting data' ====
diff --git a/test/persistence.test.js b/test/persistence.test.js
index aa965b4..d83fc24 100755
--- a/test/persistence.test.js
+++ b/test/persistence.test.js
@@ -1004,6 +1004,8 @@ describe('Persistence', function () {
const datafileLength = fs.readFileSync('workspace/lac.db', 'utf8').length
+ assert(datafileLength > 5000)
+
// Loading it in a separate process that we will crash before finishing the loadDatabase
fork('test_lac/loadAndCrash.test').on('exit', function (code) {
code.should.equal(1) // See test_lac/loadAndCrash.test.js
diff --git a/test_lac/loadAndCrash.test.js b/test_lac/loadAndCrash.test.js
index ca5bd82..66ea9d1 100755
--- a/test_lac/loadAndCrash.test.js
+++ b/test_lac/loadAndCrash.test.js
@@ -1,133 +1,59 @@
/* eslint-env mocha */
-/* global DEBUG */
/**
* Load and modify part of fs to ensure writeFile will crash after writing 5000 bytes
*/
const fs = require('fs')
-
-function rethrow () {
- // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
- // is fairly slow to generate.
- if (DEBUG) {
- const backtrace = new Error()
- return function (err) {
- if (err) {
- backtrace.stack = err.name + ': ' + err.message +
- backtrace.stack.substr(backtrace.name.length)
- throw backtrace
- }
- }
- }
-
- return function (err) {
- if (err) {
- throw err // Forgot a callback but don't know where? Use NODE_DEBUG=fs
+const { Writable } = require('stream')
+const { callbackify } = require('util')
+
+fs.promises.writeFile = async function (path, data) {
+ let onePassDone = false
+ const options = { encoding: 'utf8', mode: 0o666, flag: 'w' } // we don't care about the actual options passed
+
+ const filehandle = await fs.promises.open(path, options.flag, options.mode)
+ const buffer = (data instanceof Buffer) ? data : Buffer.from('' + data, options.encoding || 'utf8')
+ let length = buffer.length
+ let offset = 0
+
+ try {
+ while (length > 0) {
+ if (onePassDone) { process.exit(1) } // Crash on purpose before rewrite done
+ const { bytesWritten } = await filehandle.write(buffer, offset, Math.min(5000, length)) // Force write by chunks of 5000 bytes to ensure data will be incomplete on crash
+ onePassDone = true
+ offset += bytesWritten
+ length -= bytesWritten
}
+ } finally {
+ await filehandle.close()
}
}
-function maybeCallback (cb) {
- return typeof cb === 'function' ? cb : rethrow()
-}
-
-function isFd (path) {
- return (path >>> 0) === path
-}
-
-function assertEncoding (encoding) {
- if (encoding && !Buffer.isEncoding(encoding)) {
- throw new Error('Unknown encoding: ' + encoding)
+class FakeFsWriteStream extends Writable {
+ constructor (filename) {
+ super()
+ this.filename = filename
+ this._content = Buffer.alloc(0)
}
-}
-
-let onePassDone = false
-
-function writeAll (fd, isUserFd, buffer, offset, length, position, callback_) {
- const callback = maybeCallback(arguments[arguments.length - 1])
-
- if (onePassDone) { process.exit(1) } // Crash on purpose before rewrite done
- const l = Math.min(5000, length) // Force write by chunks of 5000 bytes to ensure data will be incomplete on crash
- // write(fd, buffer, offset, length, position, callback)
- fs.write(fd, buffer, offset, l, position, function (writeErr, written) {
- if (writeErr) {
- if (isUserFd) {
- if (callback) callback(writeErr)
- } else {
- fs.close(fd, function () {
- if (callback) callback(writeErr)
- })
- }
- } else {
- onePassDone = true
- if (written === length) {
- if (isUserFd) {
- if (callback) callback(null)
- } else {
- fs.close(fd, callback)
- }
- } else {
- offset += written
- length -= written
- if (position !== null) {
- position += written
- }
- writeAll(fd, isUserFd, buffer, offset, length, position, callback)
- }
- }
- })
-}
-
-fs.writeFile = function (path, data, options, callback_) {
- const callback = maybeCallback(arguments[arguments.length - 1])
-
- if (!options || typeof options === 'function') {
- options = { encoding: 'utf8', mode: 438, flag: 'w' } // Mode 438 == 0o666 (compatibility with older Node releases)
- } else if (typeof options === 'string') {
- options = { encoding: options, mode: 438, flag: 'w' } // Mode 438 == 0o666 (compatibility with older Node releases)
- } else if (typeof options !== 'object') {
- throw new Error(`throwOptionsError${options}`)
+ _write (chunk, encoding, callback) {
+ this._content = Buffer.concat([this._content, Buffer.from(chunk, encoding)])
+ callback()
}
- assertEncoding(options.encoding)
-
- const flag = options.flag || 'w'
-
- if (isFd(path)) {
- writeFd(path, true)
- return
+ _end (chunk, encoding, callback) {
+ this._content = Buffer.concat([this._content, Buffer.from(chunk, encoding)])
+ callback()
}
- fs.open(path, flag, options.mode, function (openErr, fd) {
- if (openErr) {
- if (callback) callback(openErr)
- } else {
- writeFd(fd, false)
- }
- })
-
- function writeFd (fd, isUserFd) {
- const buffer = (data instanceof Buffer) ? data : Buffer.from('' + data, options.encoding || 'utf8')
- const position = /a/.test(flag) ? null : 0
-
- writeAll(fd, isUserFd, buffer, 0, buffer.length, position, callback)
+ close (callback) {
+ callbackify(fs.promises.writeFile)(this.filename, this._content, 'utf8', callback)
}
}
-fs.createWriteStream = function (path) {
- let content = ''
- return {
- write (data) {
- content += data
- },
- close (callback) {
- fs.writeFile(path, content, callback)
- }
- }
-}
+fs.createWriteStream = path => new FakeFsWriteStream(path)
-// End of fs modification
+// End of fs monkey patching
const Nedb = require('../lib/datastore.js')
const db = new Nedb({ filename: 'workspace/lac.db' })
-db.loadDatabase()
+db.loadDatabaseAsync() // no need to await
diff --git a/test_lac/openFds.test.js b/test_lac/openFds.test.js
index da449d6..f9e72e4 100644
--- a/test_lac/openFds.test.js
+++ b/test_lac/openFds.test.js
@@ -1,65 +1,61 @@
const fs = require('fs')
-const { waterfall, whilst } = require('../test/utils.test.js')
+const fsPromises = fs.promises
const Nedb = require('../lib/datastore')
-const { callbackify } = require('util')
-const db = new Nedb({ filename: './workspace/openfds.db', autoload: true })
const N = 64
-let i
-let fds
-function multipleOpen (filename, N, callback) {
- whilst(function () { return i < N }
- , function (cb) {
- fs.open(filename, 'r', function (err, fd) {
- i += 1
- if (fd) { fds.push(fd) }
- return cb(err)
- })
+// A console.error triggers an error of the parent test
+
+const test = async () => {
+ let filehandles = []
+ try {
+ for (let i = 0; i < 2 * N + 1; i++) {
+ const filehandle = await fsPromises.open('./test_lac/openFdsTestFile', 'r')
+ filehandles.push(filehandle)
}
- , callback)
-}
+ console.error('No error occurred while opening a file too many times')
+ process.exit(1)
+ } catch (error) {
+ if (error.code !== 'EMFILE') {
+ console.error(error)
+ process.exit(1)
+ }
+ } finally {
+ for (const filehandle of filehandles) {
+ await filehandle.close()
+ }
+ filehandles = []
+ }
+
+ try {
+ for (let i = 0; i < N; i++) {
+ const filehandle = await fsPromises.open('./test_lac/openFdsTestFile2', 'r')
+ filehandles.push(filehandle)
+ }
+ } catch (error) {
+ console.error(`An unexpected error occurred when opening file not too many times at i: ${i} with error: ${error}`)
+ process.exit(1)
+ } finally {
+ for (const filehandle of filehandles) {
+ await filehandle.close()
+ }
+ }
-waterfall([
- // Check that ulimit has been set to the correct value
- function (cb) {
- i = 0
- fds = []
- multipleOpen('./test_lac/openFdsTestFile', 2 * N + 1, function (err) {
- if (!err) { console.log('No error occured while opening a file too many times') }
- fds.forEach(function (fd) { fs.closeSync(fd) })
- return cb()
- })
- },
- function (cb) {
- i = 0
- fds = []
- multipleOpen('./test_lac/openFdsTestFile2', N, function (err) {
- if (err) { console.log('An unexpected error occured when opening file not too many times: ' + err) }
- fds.forEach(function (fd) { fs.closeSync(fd) })
- return cb()
- })
- },
- // Then actually test NeDB persistence
- function () {
- db.remove({}, { multi: true }, function (err) {
- if (err) { console.log(err) }
- db.insert({ hello: 'world' }, function (err) {
- if (err) { console.log(err) }
+ try {
+ const db = new Nedb({ filename: './workspace/openfds.db' })
+ await db.loadDatabaseAsync()
+ await db.removeAsync({}, { multi: true })
+ await db.insertAsync({ hello: 'world' })
- i = 0
- whilst(function () { return i < 2 * N + 1 }
- , function (cb) {
- callbackify(() => db.persistence.persistCachedDatabaseAsync())(function (err) {
- if (err) { return cb(err) }
- i += 1
- return cb()
- })
- }
- , function (err) {
- if (err) { console.log('Got unexpected error during one peresistence operation: ' + err) }
- }
- )
- })
- })
+ for (let i = 0; i < 2 * N + 1; i++) {
+ await db.persistence.persistCachedDatabaseAsync()
+ }
+ } catch (error) {
+ console.error(`Got unexpected error during one persistence operation at ${i}: with error: ${error}`)
}
-], () => {})
+}
+try {
+ test()
+} catch (error) {
+ console.error(error)
+ process.exit(1)
+}