Bulk inserts work as expected

pull/2/head
Louis Chatriot 11 years ago
parent 6fb4a61688
commit c0baaede1a
  1. 44
      lib/datastore.js
  2. 19
      test/db.test.js

@ -262,33 +262,49 @@ Datastore.prototype._insert = function (newDoc, cb) {
};
/**
* If newDoc is an array of documents, this will insert all documents in the cache
* Prepare a document (or array of documents) to be inserted in a database
* @api private
*/
Datastore.prototype._insertInCache = function (newDoc) {
var insertedDoc;
Datastore.prototype.prepareDocumentForInsertion = function (newDoc) {
var preparedDoc, self = this;
if (util.isArray(newDoc)) { this._insertMultipleDocsInCache(newDoc); return; }
if (util.isArray(newDoc)) {
preparedDoc = [];
newDoc.forEach(function (doc) { preparedDoc.push(self.prepareDocumentForInsertion(doc)); });
} else {
newDoc._id = customUtils.uid(16);
preparedDoc = model.deepCopy(newDoc);
model.checkObject(preparedDoc);
}
// Ensure the document has the right format
newDoc._id = customUtils.uid(16);
model.checkObject(newDoc);
insertedDoc = model.deepCopy(newDoc);
return preparedDoc;
};
// Insert in all indexes (also serves to ensure uniqueness)
this.addToIndexes(insertedDoc);
/**
* If newDoc is an array of documents, this will insert all documents in the cache
* @api private
*/
Datastore.prototype._insertInCache = function (newDoc) {
if (util.isArray(newDoc)) {
this._insertMultipleDocsInCache(newDoc);
} else {
this.addToIndexes(this.prepareDocumentForInsertion(newDoc));
}
};
/**
* If one insertion fails (e.g. because of a unique constraint), roll back all previous
* inserts and throws the error
* @api private
*/
Datastore.prototype._insertMultipleDocsInCache = function (newDocs) {
var i, failingI, error;
var i, failingI, error
, preparedDocs = this.prepareDocumentForInsertion(newDocs)
;
for (i = 0; i < newDocs.length; i += 1) {
for (i = 0; i < preparedDocs.length; i += 1) {
try {
this._insertInCache(newDocs[i]);
this.addToIndexes(preparedDocs[i]);
} catch (e) {
error = e;
failingI = i;
@ -298,7 +314,7 @@ Datastore.prototype._insertMultipleDocsInCache = function (newDocs) {
if (error) {
for (i = 0; i < failingI; i += 1) {
this._removeFromCache(newDocs[i]);
this.removeFromIndexes(preparedDocs[i]);
}
throw error;

@ -180,7 +180,7 @@ describe('Database', function () {
});
});
it.only('Can insert an array of documents at once', function (done) {
it('Can insert an array of documents at once', function (done) {
var docs = [{ a: 5, b: 'hello' }, { a: 42, b: 'world' }];
d.insert(docs, function (err) {
@ -204,6 +204,23 @@ describe('Database', function () {
});
});
it('If a bulk insert violates a constraint, all changes are rolled back', function (done) {
var docs = [{ a: 5, b: 'hello' }, { a: 42, b: 'world' }, { a: 5, b: 'bloup' }];
d.ensureIndex({ fieldName: 'a', unique: true });
d.insert(docs, function (err) {
assert.isDefined(err);
assert.isNotNull(err);
d.find({}, function (err, docs) {
docs.length.should.equal(0);
done();
});
});
});
}); // ==== End of 'Insert' ==== //

Loading…
Cancel
Save