var customUtils = require ( './customUtils' )
, model = require ( './model' )
, async = require ( 'async' )
, Executor = require ( './executor' )
, Index = require ( './indexes' )
, util = require ( 'util' )
, _ = require ( 'underscore' )
, Persistence = require ( './persistence' )
, Cursor = require ( './cursor' )
;
/ * *
* Create a new collection
* @ param { String } options . filename Optional , datastore will be in - memory only if not provided
* @ param { Boolean } options . inMemoryOnly Optional , default to false
* @ param { Boolean } options . nodeWebkitAppName Optional , specify the name of your NW app if you want options . filename to be relative to the directory where
* Node Webkit stores application data such as cookies and local storage ( the best place to store data in my opinion )
* @ param { Boolean } options . autoload Optional , defaults to false
* @ param { Function } options . onload Optional , if autoload is used this will be called after the load database with the error object as parameter . If you don ' t pass it the error will be thrown
* /
function Datastore ( options ) {
var filename ;
// Retrocompatibility with v0.6 and before
if ( typeof options === 'string' ) {
filename = options ;
this . inMemoryOnly = false ; // Default
} else {
options = options || { } ;
filename = options . filename ;
this . inMemoryOnly = options . inMemoryOnly || false ;
this . autoload = options . autoload || false ;
}
// Determine whether in memory or persistent
if ( ! filename || typeof filename !== 'string' || filename . length === 0 ) {
this . filename = null ;
this . inMemoryOnly = true ;
} else {
this . filename = filename ;
}
// Persistence handling
this . persistence = new Persistence ( { db : this , nodeWebkitAppName : options . nodeWebkitAppName } ) ;
// This new executor is ready if we don't use persistence
// If we do, it will only be ready once loadDatabase is called
this . executor = new Executor ( ) ;
if ( this . inMemoryOnly ) { this . executor . ready = true ; }
// Indexed by field name, dot notation can be used
// _id is always indexed and since _ids are generated randomly the underlying
// binary is always well-balanced
this . indexes = { } ;
this . indexes . _id = new Index ( { fieldName : '_id' , unique : true } ) ;
// Queue a load of the database right away and call the onload handler
// By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception
if ( this . autoload ) { this . loadDatabase ( options . onload || function ( err ) {
if ( err ) { throw err ; }
} ) ; }
}
/ * *
* Load the database from the datafile , and trigger the execution of buffered commands if any
* /
Datastore . prototype . loadDatabase = function ( ) {
this . executor . push ( { this : this . persistence , fn : this . persistence . loadDatabase , arguments : arguments } , true ) ;
} ;
/ * *
* Get an array of all the data in the database
* /
Datastore . prototype . getAllData = function ( ) {
return this . indexes . _id . getAll ( ) ;
} ;
/ * *
* Reset all currently defined indexes
* /
Datastore . prototype . resetIndexes = function ( newData ) {
var self = this ;
Object . keys ( this . indexes ) . forEach ( function ( i ) {
self . indexes [ i ] . reset ( newData ) ;
} ) ;
} ;
/ * *
* Ensure an index is kept for this field . Same parameters as lib / indexes
* For now this function is synchronous , we need to test how much time it takes
* We use an async API for consistency with the rest of the code
* @ param { String } options . fieldName
* @ param { Boolean } options . unique
* @ param { Boolean } options . sparse
* @ param { Function } cb Optional callback , signature : err
* /
Datastore . prototype . ensureIndex = function ( options , cb ) {
var callback = cb || function ( ) { } ;
options = options || { } ;
if ( ! options . fieldName ) { return callback ( { missingFieldName : true } ) ; }
if ( this . indexes [ options . fieldName ] ) { return callback ( null ) ; }
this . indexes [ options . fieldName ] = new Index ( options ) ;
try {
this . indexes [ options . fieldName ] . insert ( this . getAllData ( ) ) ;
} catch ( e ) {
delete this . indexes [ options . fieldName ] ;
return callback ( e ) ;
}
this . persistence . persistNewState ( [ { $$indexCreated : options } ] , function ( err ) {
if ( err ) { return callback ( err ) ; }
return callback ( null ) ;
} ) ;
} ;
/ * *
* Remove an index
* @ param { String } fieldName
* @ param { Function } cb Optional callback , signature : err
* /
Datastore . prototype . removeIndex = function ( fieldName , cb ) {
var callback = cb || function ( ) { } ;
delete this . indexes [ fieldName ] ;
this . persistence . persistNewState ( [ { $$indexRemoved : fieldName } ] , function ( err ) {
if ( err ) { return callback ( err ) ; }
return callback ( null ) ;
} ) ;
} ;
/ * *
* Add one or several document ( s ) to all indexes
* /
Datastore . prototype . addToIndexes = function ( doc ) {
var i , failingIndex , error
, keys = Object . keys ( this . indexes )
;
for ( i = 0 ; i < keys . length ; i += 1 ) {
try {
this . indexes [ keys [ i ] ] . insert ( doc ) ;
} catch ( e ) {
failingIndex = i ;
error = e ;
break ;
}
}
// If an error happened, we need to rollback the insert on all other indexes
if ( error ) {
for ( i = 0 ; i < failingIndex ; i += 1 ) {
this . indexes [ keys [ i ] ] . remove ( doc ) ;
}
throw error ;
}
} ;
/ * *
* Remove one or several document ( s ) from all indexes
* /
Datastore . prototype . removeFromIndexes = function ( doc ) {
var self = this ;
Object . keys ( this . indexes ) . forEach ( function ( i ) {
self . indexes [ i ] . remove ( doc ) ;
} ) ;
} ;
/ * *
* Update one or several documents in all indexes
* To update multiple documents , oldDoc must be an array of { oldDoc , newDoc } pairs
* If one update violates a constraint , all changes are rolled back
* /
Datastore . prototype . updateIndexes = function ( oldDoc , newDoc ) {
var i , failingIndex , error
, keys = Object . keys ( this . indexes )
;
for ( i = 0 ; i < keys . length ; i += 1 ) {
try {
this . indexes [ keys [ i ] ] . update ( oldDoc , newDoc ) ;
} catch ( e ) {
failingIndex = i ;
error = e ;
break ;
}
}
// If an error happened, we need to rollback the update on all other indexes
if ( error ) {
for ( i = 0 ; i < failingIndex ; i += 1 ) {
this . indexes [ keys [ i ] ] . revertUpdate ( oldDoc , newDoc ) ;
}
throw error ;
}
} ;
/ * *
* Return the list of candidates for a given query
* Crude implementation for now , we return the candidates given by the first usable index if any
* We try the following query types , in this order : basic match , $in match , comparison match
* One way to make it better would be to enable the use of multiple indexes if the first usable index
* returns too much data . I may do it in the future .
*
* TODO : needs to be moved to the Cursor module
* /
Datastore . prototype . getCandidates = function ( query ) {
var indexNames = Object . keys ( this . indexes )
, usableQueryKeys ;
// For a basic match
usableQueryKeys = [ ] ;
Object . keys ( query ) . forEach ( function ( k ) {
if ( typeof query [ k ] === 'string' || typeof query [ k ] === 'number' || typeof query [ k ] === 'boolean' || util . isDate ( query [ k ] ) || query [ k ] === null ) {
usableQueryKeys . push ( k ) ;
}
} ) ;
usableQueryKeys = _ . intersection ( usableQueryKeys , indexNames ) ;
if ( usableQueryKeys . length > 0 ) {
return this . indexes [ usableQueryKeys [ 0 ] ] . getMatching ( query [ usableQueryKeys [ 0 ] ] ) ;
}
// For a $in match
usableQueryKeys = [ ] ;
Object . keys ( query ) . forEach ( function ( k ) {
if ( query [ k ] && query [ k ] . hasOwnProperty ( '$in' ) ) {
usableQueryKeys . push ( k ) ;
}
} ) ;
usableQueryKeys = _ . intersection ( usableQueryKeys , indexNames ) ;
if ( usableQueryKeys . length > 0 ) {
return this . indexes [ usableQueryKeys [ 0 ] ] . getMatching ( query [ usableQueryKeys [ 0 ] ] . $in ) ;
}
// For a comparison match
usableQueryKeys = [ ] ;
Object . keys ( query ) . forEach ( function ( k ) {
if ( query [ k ] && ( query [ k ] . hasOwnProperty ( '$lt' ) || query [ k ] . hasOwnProperty ( '$lte' ) || query [ k ] . hasOwnProperty ( '$gt' ) || query [ k ] . hasOwnProperty ( '$gte' ) ) ) {
usableQueryKeys . push ( k ) ;
}
} ) ;
usableQueryKeys = _ . intersection ( usableQueryKeys , indexNames ) ;
if ( usableQueryKeys . length > 0 ) {
return this . indexes [ usableQueryKeys [ 0 ] ] . getBetweenBounds ( query [ usableQueryKeys [ 0 ] ] ) ;
}
// By default, return all the DB data
return this . getAllData ( ) ;
} ;
/ * *
* Insert a new document
* @ param { Function } cb Optional callback , signature : err , insertedDoc
*
* @ api private Use Datastore . insert which has the same signature
* /
Datastore . prototype . _insert = function ( newDoc , cb ) {
var callback = cb || function ( ) { }
;
try {
this . _insertInCache ( newDoc ) ;
} catch ( e ) {
return callback ( e ) ;
}
this . persistence . persistNewState ( util . isArray ( newDoc ) ? newDoc : [ newDoc ] , function ( err ) {
if ( err ) { return callback ( err ) ; }
return callback ( null , newDoc ) ;
} ) ;
} ;
/ * *
* Create a new _id that ' s not already in use
* /
Datastore . prototype . createNewId = function ( ) {
var tentativeId = customUtils . uid ( 16 ) ;
// Try as many times as needed to get an unused _id. As explained in customUtils, the probability of this ever happening is extremely small, so this is O(1)
if ( this . indexes . _id . getMatching ( tentativeId ) . length > 0 ) {
tentativeId = this . createNewId ( ) ;
}
return tentativeId ;
} ;
/ * *
* Prepare a document ( or array of documents ) to be inserted in a database
* @ api private
* /
Datastore . prototype . prepareDocumentForInsertion = function ( newDoc ) {
var preparedDoc , self = this ;
if ( util . isArray ( newDoc ) ) {
preparedDoc = [ ] ;
newDoc . forEach ( function ( doc ) { preparedDoc . push ( self . prepareDocumentForInsertion ( doc ) ) ; } ) ;
} else {
if ( newDoc . _id === undefined ) {
newDoc . _id = this . createNewId ( ) ;
}
preparedDoc = model . deepCopy ( newDoc ) ;
model . checkObject ( preparedDoc ) ;
}
return preparedDoc ;
} ;
/ * *
* If newDoc is an array of documents , this will insert all documents in the cache
* @ api private
* /
Datastore . prototype . _insertInCache = function ( newDoc ) {
if ( util . isArray ( newDoc ) ) {
this . _insertMultipleDocsInCache ( newDoc ) ;
} else {
this . addToIndexes ( this . prepareDocumentForInsertion ( newDoc ) ) ;
}
} ;
/ * *
* If one insertion fails ( e . g . because of a unique constraint ) , roll back all previous
* inserts and throws the error
* @ api private
* /
Datastore . prototype . _insertMultipleDocsInCache = function ( newDocs ) {
var i , failingI , error
, preparedDocs = this . prepareDocumentForInsertion ( newDocs )
;
for ( i = 0 ; i < preparedDocs . length ; i += 1 ) {
try {
this . addToIndexes ( preparedDocs [ i ] ) ;
} catch ( e ) {
error = e ;
failingI = i ;
break ;
}
}
if ( error ) {
for ( i = 0 ; i < failingI ; i += 1 ) {
this . removeFromIndexes ( preparedDocs [ i ] ) ;
}
throw error ;
}
} ;
Datastore . prototype . insert = function ( ) {
this . executor . push ( { this : this , fn : this . _insert , arguments : arguments } ) ;
} ;
/ * *
* Count all documents matching the query
* @ param { Object } query MongoDB - style query
* /
Datastore . prototype . count = function ( query , callback ) {
var cursor = new Cursor ( this , query , function ( err , docs , callback ) {
if ( err ) { return callback ( err ) ; }
return callback ( null , docs . length ) ;
} ) ;
if ( typeof callback === 'function' ) {
cursor . exec ( callback ) ;
} else {
return cursor ;
}
} ;
/ * *
* Find all documents matching the query
* If no callback is passed , we return the cursor so that user can limit , skip and finally exec
* @ param { Object } query MongoDB - style query
* @ param { Object } projection MongoDB - style projection
* /
Datastore . prototype . find = function ( query , projection , callback ) {
switch ( arguments . length ) {
case 1 :
projection = { } ;
// callback is undefined, will return a cursor
break ;
case 2 :
if ( typeof projection === 'function' ) {
callback = projection ;
projection = { } ;
} // If not assume projection is an object and callback undefined
break ;
}
var cursor = new Cursor ( this , query , function ( err , docs , callback ) {
var res = [ ] , i ;
if ( err ) { return callback ( err ) ; }
for ( i = 0 ; i < docs . length ; i += 1 ) {
res . push ( model . deepCopy ( docs [ i ] ) ) ;
}
return callback ( null , res ) ;
} ) ;
cursor . projection ( projection ) ;
if ( typeof callback === 'function' ) {
cursor . exec ( callback ) ;
} else {
return cursor ;
}
} ;
/ * *
* Find one document matching the query
* @ param { Object } query MongoDB - style query
* @ param { Object } projection MongoDB - style projection
* /
Datastore . prototype . findOne = function ( query , projection , callback ) {
switch ( arguments . length ) {
case 1 :
projection = { } ;
// callback is undefined, will return a cursor
break ;
case 2 :
if ( typeof projection === 'function' ) {
callback = projection ;
projection = { } ;
} // If not assume projection is an object and callback undefined
break ;
}
var cursor = new Cursor ( this , query , function ( err , docs , callback ) {
if ( err ) { return callback ( err ) ; }
if ( docs . length === 1 ) {
return callback ( null , model . deepCopy ( docs [ 0 ] ) ) ;
} else {
return callback ( null , null ) ;
}
} ) ;
cursor . projection ( projection ) . limit ( 1 ) ;
if ( typeof callback === 'function' ) {
cursor . exec ( callback ) ;
} else {
return cursor ;
}
} ;
/ * *
* Update all docs matching query
* For now , very naive implementation ( recalculating the whole database )
* @ param { Object } query
* @ param { Object } updateQuery
* @ param { Object } options Optional options
* options . multi If true , can update multiple documents ( defaults to false )
* options . upsert If true , document is inserted if the query doesn ' t match anything
* @ param { Function } cb Optional callback , signature : err , numReplaced , upsert ( set to true if the update was in fact an upsert )
*
* @ api private Use Datastore . update which has the same signature
* /
Datastore . prototype . _update = function ( query , updateQuery , options , cb ) {
var callback
, self = this
, numReplaced = 0
, multi , upsert
, i
;
if ( typeof options === 'function' ) { cb = options ; options = { } ; }
callback = cb || function ( ) { } ;
multi = options . multi !== undefined ? options . multi : false ;
upsert = options . upsert !== undefined ? options . upsert : false ;
async . waterfall ( [
function ( cb ) { // If upsert option is set, check whether we need to insert the doc
if ( ! upsert ) { return cb ( ) ; }
// Need to use an internal function not tied to the executor to avoid deadlock
var cursor = new Cursor ( self , query ) ;
cursor . limit ( 1 ) . _exec ( function ( err , docs ) {
if ( err ) { return callback ( err ) ; }
if ( docs . length === 1 ) {
return cb ( ) ;
} else {
var toBeInserted ;
try {
model . checkObject ( updateQuery ) ;
// updateQuery is a simple object with no modifier, use it as the document to insert
toBeInserted = updateQuery ;
} catch ( e ) {
// updateQuery contains modifiers, use the find query as the base,
// strip it from all operators and update it according to updateQuery
toBeInserted = model . modify ( model . deepCopy ( query , true ) , updateQuery ) ;
}
return self . _insert ( toBeInserted , function ( err , newDoc ) {
if ( err ) { return callback ( err ) ; }
return callback ( null , 1 , newDoc ) ;
} ) ;
}
} ) ;
}
, function ( ) { // Perform the update
var modifiedDoc
, candidates = self . getCandidates ( query )
, modifications = [ ]
;
// Preparing update (if an error is thrown here neither the datafile nor
// the in-memory indexes are affected)
try {
for ( i = 0 ; i < candidates . length ; i += 1 ) {
if ( model . match ( candidates [ i ] , query ) && ( multi || numReplaced === 0 ) ) {
numReplaced += 1 ;
modifiedDoc = model . modify ( candidates [ i ] , updateQuery ) ;
modifications . push ( { oldDoc : candidates [ i ] , newDoc : modifiedDoc } ) ;
}
}
} catch ( err ) {
return callback ( err ) ;
}
// Change the docs in memory
try {
self . updateIndexes ( modifications ) ;
} catch ( err ) {
return callback ( err ) ;
}
// Update the datafile
self . persistence . persistNewState ( _ . pluck ( modifications , 'newDoc' ) , function ( err ) {
if ( err ) { return callback ( err ) ; }
return callback ( null , numReplaced ) ;
} ) ;
}
] ) ;
} ;
Datastore . prototype . update = function ( ) {
this . executor . push ( { this : this , fn : this . _update , arguments : arguments } ) ;
} ;
/ * *
* Remove all docs matching the query
* For now very naive implementation ( similar to update )
* @ param { Object } query
* @ param { Object } options Optional options
* options . multi If true , can update multiple documents ( defaults to false )
* @ param { Function } cb Optional callback , signature : err , numRemoved
*
* @ api private Use Datastore . remove which has the same signature
* /
Datastore . prototype . _remove = function ( query , options , cb ) {
var callback
, self = this
, numRemoved = 0
, multi
, removedDocs = [ ]
, candidates = this . getCandidates ( query )
;
if ( typeof options === 'function' ) { cb = options ; options = { } ; }
callback = cb || function ( ) { } ;
multi = options . multi !== undefined ? options . multi : false ;
try {
candidates . forEach ( function ( d ) {
if ( model . match ( d , query ) && ( multi || numRemoved === 0 ) ) {
numRemoved += 1 ;
removedDocs . push ( { $$deleted : true , _id : d . _id } ) ;
self . removeFromIndexes ( d ) ;
}
} ) ;
} catch ( err ) { return callback ( err ) ; }
self . persistence . persistNewState ( removedDocs , function ( err ) {
if ( err ) { return callback ( err ) ; }
return callback ( null , numRemoved ) ;
} ) ;
} ;
Datastore . prototype . remove = function ( ) {
this . executor . push ( { this : this , fn : this . _remove , arguments : arguments } ) ;
} ;
module . exports = Datastore ;