parent
bd7d36a5ab
commit
4f853dcfdf
@ -1,88 +1,175 @@
|
||||
function ScholarCrawler(parser, node_ids, nodes, edges)
|
||||
function ScholarCrawler(parser, nodes, edges)
|
||||
{
|
||||
this.stack = [];
|
||||
this.stack = [];
|
||||
this.edge_ids = {};
|
||||
this.node_ids = {};
|
||||
|
||||
this.node_ids = node_ids;
|
||||
this.parser = parser;
|
||||
this.nodes = nodes;
|
||||
this.edges = edges;
|
||||
this.nodes = nodes;
|
||||
this.edges = edges;
|
||||
this.parser = parser;
|
||||
|
||||
this.delay = 1000;
|
||||
this.max_depth = 2;
|
||||
this.minimum_citations = 0;
|
||||
this.delay = 1000;
|
||||
this.minimum_citations = 0;
|
||||
};
|
||||
|
||||
ScholarCrawler.prototype.formatArticle = function(node)
|
||||
ScholarCrawler.prototype.article_to_node = function(article)
|
||||
{
|
||||
node.label = '';
|
||||
node.shape = "dot";
|
||||
node.original_title = node.title;
|
||||
node.title =
|
||||
"<span class='node_tooltip'>" + node.authors +
|
||||
"<emph> " + node.title + ".</emph> " +
|
||||
node.source + ", " + node.year + ".</span>";
|
||||
node.mass = node.n_citations/2 + 1;
|
||||
node.radius = 3*Math.pow(node.n_citations, 0.8) + 3;
|
||||
return node;
|
||||
return {
|
||||
id: article._id,
|
||||
_id: article._id,
|
||||
article: article,
|
||||
group: 'standard',
|
||||
label: '',
|
||||
shape: 'dot',
|
||||
mass: article.n_citations/2 + 1,
|
||||
radius: 3*Math.pow(article.n_citations, 0.8) + 3,
|
||||
title: "<span class='node_tooltip'>" + article.authors +
|
||||
"<a href='' onclick='return open_external(\"" + article.url + "\")' target='_blank'> "
|
||||
+ article.title + ".</a> " + article.source + ", " + article.year + ".</span>"
|
||||
};
|
||||
};
|
||||
|
||||
ScholarCrawler.prototype.process = function(url, parent_node)
|
||||
ScholarCrawler.prototype.add_citations = function(parent_node, levels)
|
||||
{
|
||||
var crawler = this;
|
||||
|
||||
this.parser.parse(url, function(children)
|
||||
{
|
||||
for(i=0; i<children.length; i++)
|
||||
{
|
||||
var child = crawler.formatArticle(children[i]);
|
||||
|
||||
if(parent_node == null)
|
||||
{
|
||||
child['depth'] = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
child['depth'] = parent_node['depth'] + 1;
|
||||
crawler.edges.add({"from": child['id'], "to": parent_node['id']});
|
||||
}
|
||||
|
||||
if(child['n_citations'] < crawler.minimum_citations)
|
||||
continue;
|
||||
|
||||
if(crawler.node_ids.indexOf(child['id']) == -1)
|
||||
{
|
||||
if(child['depth'] < crawler.max_depth && child['n_citations'] > 0) {
|
||||
crawler.push(child['citations_url'], child);
|
||||
child.color = node_queued_color;
|
||||
}
|
||||
|
||||
crawler.node_ids.push(child['id']);
|
||||
crawler.nodes.add(child);
|
||||
}
|
||||
}
|
||||
|
||||
if(!(parent_node === null))
|
||||
{
|
||||
parent_node.color = visjs_options.nodes.color;
|
||||
crawler.nodes.update(parent_node);
|
||||
}
|
||||
|
||||
});
|
||||
};
|
||||
assert(levels >= 0, "levels should be non-negative");
|
||||
|
||||
var crawler = this;
|
||||
|
||||
if(parent_node.is_dummy)
|
||||
return this._add_citations_from_scopus(parent_node, levels);
|
||||
|
||||
articles_db.findOne({_id: parent_node._id}, function(err, parent_article_db)
|
||||
{
|
||||
if(parent_article_db === null || !parent_article_db.is_cached)
|
||||
crawler._add_citations_from_scopus(parent_node, levels);
|
||||
else
|
||||
crawler._add_citations_from_db(crawler.article_to_node(parent_node.article), levels);
|
||||
});
|
||||
}
|
||||
|
||||
ScholarCrawler.prototype._add_citations_from_db = function(parent_node, levels)
|
||||
{
|
||||
assert(levels >= 0, "levels should be non-negative");
|
||||
|
||||
var crawler = this;
|
||||
citations_db.find({to: parent_node.article._id}, function(err, citations)
|
||||
{
|
||||
citations.forEach(function(citation)
|
||||
{
|
||||
articles_db.findOne({_id: citation.from}, function(err, child_article)
|
||||
{
|
||||
crawler._add_child_article(child_article, parent_node, levels);
|
||||
});
|
||||
});
|
||||
|
||||
parent_node.group = "standard";
|
||||
crawler.nodes.update(parent_node);
|
||||
});
|
||||
}
|
||||
|
||||
ScholarCrawler.prototype._add_citations_from_scopus = function(parent_node, levels)
|
||||
{
|
||||
assert(levels >= 0, "levels should be non-negative");
|
||||
|
||||
var crawler = this;
|
||||
this.parser.parse(parent_node.article.citations_url, function(child_articles)
|
||||
{
|
||||
child_articles.forEach(function(child_article)
|
||||
{
|
||||
articles_db.findOne({_id:child_article._id}, function(err, child_article_db)
|
||||
{
|
||||
if(child_article_db === null)
|
||||
{
|
||||
child_article.is_cached = false;
|
||||
articles_db.insert(child_article);
|
||||
crawler._add_child_article(child_article, parent_node, levels);
|
||||
}
|
||||
else
|
||||
{
|
||||
crawler._add_child_article(child_article_db, parent_node, levels);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if(!parent_node.is_dummy)
|
||||
{
|
||||
parent_node.group = "standard";
|
||||
crawler.nodes.update(parent_node);
|
||||
|
||||
ScholarCrawler.prototype.push = function(url, parent_node)
|
||||
parent_node.article.is_cached = true;
|
||||
articles_db.update({_id: parent_node.article._id}, parent_node.article, {});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ScholarCrawler.prototype._add_child_article = function(child_article, parent_node, levels)
|
||||
{
|
||||
assert(levels >= 0, "levels should be non-negative");
|
||||
|
||||
var child_node = this.article_to_node(child_article);
|
||||
|
||||
if(child_article.n_citations < this.minimum_citations)
|
||||
return;
|
||||
|
||||
if(!parent_node.is_dummy)
|
||||
{
|
||||
edge = {
|
||||
id: $.md5(child_node._id + parent_node._id),
|
||||
from: child_node._id,
|
||||
to: parent_node._id
|
||||
};
|
||||
|
||||
edge._id = edge.id;
|
||||
|
||||
if(!(edge._id in this.edge_ids))
|
||||
{
|
||||
this.edge_ids[edge._id] = true;
|
||||
this.edges.add(edge);
|
||||
citations_db.insert(edge);
|
||||
}
|
||||
}
|
||||
|
||||
if(child_node.article.n_citations == 0)
|
||||
child_node.group = "standard";
|
||||
else if(levels == 0)
|
||||
child_node.group = "leaf";
|
||||
else
|
||||
this.push(child_node, levels-1);
|
||||
|
||||
if(!(child_node._id in this.node_ids))
|
||||
{
|
||||
this.node_ids[child_node._id] = true;
|
||||
this.nodes.add(child_node);
|
||||
}
|
||||
else
|
||||
{
|
||||
this.nodes.update(child_node);
|
||||
}
|
||||
}
|
||||
|
||||
ScholarCrawler.prototype.push = function(parent_node, levels)
|
||||
{
|
||||
this.stack.push([url, parent_node]);
|
||||
assert(levels >= 0, "levels should be non-negative");
|
||||
|
||||
this.stack.push([parent_node, levels]);
|
||||
if(!parent_node.is_dummy)
|
||||
{
|
||||
parent_node.group = "processing";
|
||||
if(parent_node._id in this.node_ids)
|
||||
{
|
||||
this.nodes.update(parent_node);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ScholarCrawler.prototype.start = function()
|
||||
ScholarCrawler.prototype.next = function()
|
||||
{
|
||||
if(this.stack.length > 0)
|
||||
{
|
||||
var args = this.stack.pop();
|
||||
this.process.apply(this, args);
|
||||
}
|
||||
|
||||
var crawler = this;
|
||||
setTimeout(function() { crawler.start() }, this.delay);
|
||||
if(this.stack.length > 0)
|
||||
{
|
||||
var args = this.stack.pop();
|
||||
this.add_citations(args[0], args[1]);
|
||||
}
|
||||
|
||||
var crawler = this;
|
||||
setTimeout(function() { crawler.next() }, 1000);
|
||||
};
|
||||
|
@ -1,7 +1,26 @@
|
||||
var gui = require('nw.gui');
|
||||
var assert = require('assert');
|
||||
var gui = require('nw.gui');
|
||||
var nedb = require('nedb');
|
||||
var path = require('path');
|
||||
var win = gui.Window.get();
|
||||
|
||||
win.maximize();
|
||||
|
||||
var view = new MainMenuView();
|
||||
view.render(document.getElementById("main"));
|
||||
function createDB(name)
|
||||
{
|
||||
return new nedb({
|
||||
filename : path.join(gui.App.dataPath, name + '.db'),
|
||||
autoload: true
|
||||
});
|
||||
}
|
||||
|
||||
function open_external(url)
|
||||
{
|
||||
gui.Shell.openExternal(url);
|
||||
return false;
|
||||
}
|
||||
|
||||
var articles_db = createDB("articles");
|
||||
var citations_db = createDB("citations");
|
||||
|
||||
new MainMenuView().render(document.getElementById("main"));
|
||||
|
@ -1,34 +1,36 @@
|
||||
function ShowMapView(seed_url) {
|
||||
this.seed_url = seed_url;
|
||||
this.seed_url = seed_url;
|
||||
};
|
||||
|
||||
ShowMapView.prototype.render = function(container)
|
||||
{
|
||||
var node_ids = [];
|
||||
var edges = new vis.DataSet();
|
||||
var nodes = new vis.DataSet();
|
||||
var edges = new vis.DataSet();
|
||||
var nodes = new vis.DataSet();
|
||||
|
||||
var parser = new ScopusParser();
|
||||
var crawler = new ScholarCrawler(parser, node_ids, nodes, edges);
|
||||
crawler.push(this.seed_url, null);
|
||||
crawler.start();
|
||||
var parser = new ScopusParser();
|
||||
var crawler = new ScholarCrawler(parser, nodes, edges);
|
||||
crawler.push({ is_dummy: true, article: { citations_url: this.seed_url } }, 2);
|
||||
crawler.next();
|
||||
|
||||
var network_div = document.createElement('div');
|
||||
$(network_div).addClass("mynetwork");
|
||||
$(container).empty();
|
||||
container.appendChild(network_div)
|
||||
document.crawler = crawler;
|
||||
|
||||
var data = { nodes: nodes, edges: edges };
|
||||
var network = new vis.Network(network_div, data, visjs_options);
|
||||
var network_div = document.createElement('div');
|
||||
$(network_div).addClass("mynetwork");
|
||||
$(container).empty();
|
||||
container.appendChild(network_div)
|
||||
|
||||
network.on('doubleClick', function(params) {
|
||||
gui.Shell.openExternal(nodes.get(params.nodes[0]).url)
|
||||
});
|
||||
var data = { nodes: nodes, edges: edges };
|
||||
var network = new vis.Network(network_div, data, visjs_options);
|
||||
|
||||
network.on("resize", function(params) {
|
||||
var height = $(window).height();
|
||||
var width = $(window).width();
|
||||
$(".mynetwork").css("width", width);
|
||||
$(".mynetwork").css("height", height);
|
||||
});
|
||||
network.on('doubleClick', function(params) {
|
||||
if(params.nodes.length > 0)
|
||||
crawler.push(nodes.get(params.nodes[0]), 1);
|
||||
});
|
||||
|
||||
network.on("resize", function(params) {
|
||||
var height = $(window).height();
|
||||
var width = $(window).width();
|
||||
$(".mynetwork").css("width", width);
|
||||
$(".mynetwork").css("height", height);
|
||||
});
|
||||
};
|
||||
|
@ -0,0 +1,22 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
|
||||
pids
|
||||
logs
|
||||
results
|
||||
|
||||
npm-debug.log
|
||||
workspace
|
||||
node_modules
|
||||
|
||||
browser-version/src
|
||||
browser-version/node_modules
|
||||
|
||||
*.swp
|
||||
*~
|
@ -0,0 +1,22 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2013 Louis Chatriot <louis.chatriot@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -0,0 +1,639 @@
|
||||
# NeDB (Node embedded database)
|
||||
|
||||
<img src="http://i.imgur.com/GdeQBmc.png" style="width: 25%; height: 25%; float: left;">
|
||||
|
||||
**Embedded persistent database for Node.js, written in Javascript, with no dependency** (except npm
|
||||
modules of course). You can **think of it as a SQLite for Node.js projects**, which
|
||||
can be used with a simple `require` statement. The API is a subset of MongoDB's. You can use it as a persistent or an in-memory only datastore.
|
||||
|
||||
NeDB is not intended to be a replacement of large-scale databases such as MongoDB! Its goal is to provide you with a clean and easy way to query data and persist it to disk, for web applications that do not need lots of concurrent connections, for example a <a href="https://github.com/louischatriot/braindead-ci" target="_blank">continuous integration and deployment server</a> and desktop applications built with <a href="https://github.com/rogerwang/node-webkit" target="_blank">Node Webkit</a>.
|
||||
|
||||
NeDB was benchmarked against the popular client-side database <a href="http://www.taffydb.com/" target="_blank">TaffyDB</a> and <a href="https://github.com/louischatriot/taffydb-benchmark" target="_blank">NeDB is much, much faster</a>. That's why there is now <a href="#browser-version">a browser version</a>, which can also provide persistence.
|
||||
|
||||
Check the <a href="https://github.com/louischatriot/nedb/wiki/Change-log" target="_blank">change log in the wiki</a> if you think nedb doesn't behave as the documentation describes! Most of the issues I get are due to non-latest version NeDBs.
|
||||
|
||||
You want to help out? <a href="#contribute">You can contribute time or bitcoins, check out how!</a>
|
||||
|
||||
|
||||
## Installation, tests
|
||||
Module name on npm is `nedb`.
|
||||
```javascript
|
||||
npm install nedb --save // Put latest version in your package.json
|
||||
|
||||
npm test // You'll need the dev dependencies to test it
|
||||
```
|
||||
|
||||
## API
|
||||
It's a subset of MongoDB's API (the most used operations). The current API will not change, but I will add operations as they are needed. Summary of the API:
|
||||
|
||||
* <a href="#creatingloading-a-database">Creating/loading a database</a>
|
||||
* <a href="#compacting-the-database">Compacting the database</a>
|
||||
* <a href="#inserting-documents">Inserting documents</a>
|
||||
* <a href="#finding-documents">Finding documents</a>
|
||||
* <a href="#basic-querying">Basic Querying</a>
|
||||
* <a href="#operators-lt-lte-gt-gte-in-nin-ne-exists-regex">Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $exists, $regex)</a>
|
||||
* <a href="#array-fields">Array fields</a>
|
||||
* <a href="#logical-operators-or-and-not-where">Logical operators $or, $and, $not, $where</a>
|
||||
* <a href="#sorting-and-paginating">Sorting and paginating</a>
|
||||
* <a href="#projections">Projections</a>
|
||||
* <a href="#counting-documents">Counting documents</a>
|
||||
* <a href="#updating-documents">Updating documents</a>
|
||||
* <a href="#removing-documents">Removing documents</a>
|
||||
* <a href="#indexing">Indexing</a>
|
||||
* <a href="#browser-version">Browser version</a>
|
||||
|
||||
### Creating/loading a database
|
||||
You can use NeDB as an in-memory only datastore or as a persistent datastore. One datastore is the equivalent of a MongoDB collection. The constructor is used as follows `new Datastore(options)` where `options` is an object with the following fields:
|
||||
|
||||
* `filename` (optional): path to the file where the data is persisted. If left blank, the datastore is automatically considered in-memory only. It cannot end with a `~` which is used in the temporary files NeDB uses to perform crash-safe writes
|
||||
* `inMemoryOnly` (optional, defaults to false): as the name implies.
|
||||
* `autoload` (optional, defaults to false): if used, the database will
|
||||
automatically be loaded from the datafile upon creation (you don't
|
||||
need to call `loadDatabase`). Any command
|
||||
issued before load is finished is buffered and will be executed when
|
||||
load is done.
|
||||
* `onload` (optional): if you use autoloading, this is the handler called after the `loadDatabase`. It takes one `error` argument. If you use autoloading without specifying this handler, and an error happens during load, an error will be thrown.
|
||||
* `nodeWebkitAppName` (optional, **DEPRECATED**): if you are using NeDB from whithin a Node Webkit app, specify its name (the same one you use in the `package.json`) in this field and the `filename` will be relative to the directory Node Webkit uses to store the rest of the application's data (local storage etc.). It works on Linux, OS X and Windows. Now that you can use `require('nw.gui').App.dataPath` in Node Webkit to get the path to the data directory for your application, you should not use this option anymore and it will be removed.
|
||||
|
||||
If you use a persistent datastore without the `autoload` option, you need to call `loadDatabase` manually.
|
||||
This function fetches the data from datafile and prepares the database. **Don't forget it!** If you use a
|
||||
persistent datastore, no command (insert, find, update, remove) will be executed before `loadDatabase`
|
||||
is called, so make sure to call it yourself or use the `autoload`
|
||||
option.
|
||||
|
||||
```javascript
|
||||
// Type 1: In-memory only datastore (no need to load the database)
|
||||
var Datastore = require('nedb')
|
||||
, db = new Datastore();
|
||||
|
||||
|
||||
// Type 2: Persistent datastore with manual loading
|
||||
var Datastore = require('nedb')
|
||||
, db = new Datastore({ filename: 'path/to/datafile' });
|
||||
db.loadDatabase(function (err) { // Callback is optional
|
||||
// Now commands will be executed
|
||||
});
|
||||
|
||||
|
||||
// Type 3: Persistent datastore with automatic loading
|
||||
var Datastore = require('nedb')
|
||||
, db = new Datastore({ filename: 'path/to/datafile', autoload: true });
|
||||
// You can issue commands right away
|
||||
|
||||
|
||||
// Type 4: Persistent datastore for a Node Webkit app called 'nwtest'
|
||||
// For example on Linux, the datafile will be ~/.config/nwtest/nedb-data/something.db
|
||||
var Datastore = require('nedb')
|
||||
, path = require('path')
|
||||
, db = new Datastore({ filename: path.join(require('nw.gui').App.dataPath, 'something.db') });
|
||||
|
||||
|
||||
// Of course you can create multiple datastores if you need several
|
||||
// collections. In this case it's usually a good idea to use autoload for all collections.
|
||||
db = {};
|
||||
db.users = new Datastore('path/to/users.db');
|
||||
db.robots = new Datastore('path/to/robots.db');
|
||||
|
||||
// You need to load each database (here we do it asynchronously)
|
||||
db.users.loadDatabase();
|
||||
db.robots.loadDatabase();
|
||||
```
|
||||
|
||||
### Compacting the database
|
||||
Under the hood, NeDB's persistence uses an append-only format, meaning that all updates and deletes actually result in lines added at the end of the datafile. The reason for this is that disk space is very cheap and appends are much faster than rewrites since they don't do a seek. The database is automatically compacted (i.e. put back in the one-line-per-document format) everytime your application restarts.
|
||||
|
||||
You can manually call the compaction function with `yourDatabase.persistence.compactDatafile` which takes no argument. It queues a compaction of the datafile in the executor, to be executed sequentially after all pending operations.
|
||||
|
||||
You can also set automatic compaction at regular intervals with `yourDatabase.persistence.setAutocompactionInterval(interval)`, `interval` in milliseconds (a minimum of 5s is enforced), and stop automatic compaction with `yourDatabase.persistence.stopAutocompaction()`.
|
||||
|
||||
Keep in mind that compaction takes a bit of time (not too much: 130ms for 50k records on my slow machine) and no other operation can happen when it does, so most projects actually don't need to use it.
|
||||
|
||||
|
||||
### Inserting documents
|
||||
The native types are `String`, `Number`, `Boolean`, `Date` and `null`. You can also use
|
||||
arrays and subdocuments (objects). If a field is `undefined`, it will not be saved (this is different from
|
||||
MongoDB which transforms `undefined` in `null`, something I find counter-intuitive).
|
||||
|
||||
If the document does not contain an `_id` field, NeDB will automatically generated one for you (a 16-characters alphanumerical string). The `_id` of a document, once set, cannot be modified.
|
||||
|
||||
Field names cannot begin by '$' or contain a '.'.
|
||||
|
||||
```javascript
|
||||
var doc = { hello: 'world'
|
||||
, n: 5
|
||||
, today: new Date()
|
||||
, nedbIsAwesome: true
|
||||
, notthere: null
|
||||
, notToBeSaved: undefined // Will not be saved
|
||||
, fruits: [ 'apple', 'orange', 'pear' ]
|
||||
, infos: { name: 'nedb' }
|
||||
};
|
||||
|
||||
db.insert(doc, function (err, newDoc) { // Callback is optional
|
||||
// newDoc is the newly inserted document, including its _id
|
||||
// newDoc has no key called notToBeSaved since its value was undefined
|
||||
});
|
||||
```
|
||||
|
||||
You can also bulk-insert an array of documents. This operation is atomic, meaning that if one insert fails due to a unique constraint being violated, all changes are rolled back.
|
||||
```javascript
|
||||
db.insert([{ a: 5 }, { a: 42 }], function (err, newDocs) {
|
||||
// Two documents were inserted in the database
|
||||
// newDocs is an array with these documents, augmented with their _id
|
||||
});
|
||||
|
||||
// If there is a unique constraint on field 'a', this will fail
|
||||
db.insert([{ a: 5 }, { a: 42 }, { a: 5 }], function (err) {
|
||||
// err is a 'uniqueViolated' error
|
||||
// The database was not modified
|
||||
});
|
||||
```
|
||||
|
||||
### Finding documents
|
||||
Use `find` to look for multiple documents matching you query, or `findOne` to look for one specific document. You can select documents based on field equality or use comparison operators (`$lt`, `$lte`, `$gt`, `$gte`, `$in`, `$nin`, `$ne`). You can also use logical operators `$or`, `$and`, `$not` and `$where`. See below for the syntax.
|
||||
|
||||
You can use regular expressions in two ways: in basic querying in place of a string, or with the `$regex` operator.
|
||||
|
||||
You can sort and paginate results using the cursor API (see below).
|
||||
|
||||
You can use standard projections to restrict the fields to appear in the results (see below).
|
||||
|
||||
#### Basic querying
|
||||
Basic querying means are looking for documents whose fields match the ones you specify. You can use regular expression to match strings.
|
||||
You can use the dot notation to navigate inside nested documents, arrays, arrays of subdocuments and to match a specific element of an array.
|
||||
|
||||
```javascript
|
||||
// Let's say our datastore contains the following collection
|
||||
// { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false, satellites: ['Phobos', 'Deimos'] }
|
||||
// { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true, humans: { genders: 2, eyes: true } }
|
||||
// { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false }
|
||||
// { _id: 'id4', planet: 'Omicron Persei 8', system: 'futurama', inhabited: true, humans: { genders: 7 } }
|
||||
// { _id: 'id5', completeData: { planets: [ { name: 'Earth', number: 3 }, { name: 'Mars', number: 2 }, { name: 'Pluton', number: 9 } ] } }
|
||||
|
||||
// Finding all planets in the solar system
|
||||
db.find({ system: 'solar' }, function (err, docs) {
|
||||
// docs is an array containing documents Mars, Earth, Jupiter
|
||||
// If no document is found, docs is equal to []
|
||||
});
|
||||
|
||||
// Finding all planets whose name contain the substring 'ar' using a regular expression
|
||||
db.find({ planet: /ar/ }, function (err, docs) {
|
||||
// docs contains Mars and Earth
|
||||
});
|
||||
|
||||
// Finding all inhabited planets in the solar system
|
||||
db.find({ system: 'solar', inhabited: true }, function (err, docs) {
|
||||
// docs is an array containing document Earth only
|
||||
});
|
||||
|
||||
// Use the dot-notation to match fields in subdocuments
|
||||
db.find({ "humans.genders": 2 }, function (err, docs) {
|
||||
// docs contains Earth
|
||||
});
|
||||
|
||||
// Use the dot-notation to navigate arrays of subdocuments
|
||||
db.find({ "completeData.planets.name": "Mars" }, function (err, docs) {
|
||||
// docs contains document 5
|
||||
});
|
||||
|
||||
db.find({ "completeData.planets.name": "Jupiter" }, function (err, docs) {
|
||||
// docs is empty
|
||||
});
|
||||
|
||||
db.find({ "completeData.planets.0.name": "Earth" }, function (err, docs) {
|
||||
// docs contains document 5
|
||||
// If we had tested against "Mars" docs would be empty because we are matching against a specific array element
|
||||
});
|
||||
|
||||
|
||||
// You can also deep-compare objects. Don't confuse this with dot-notation!
|
||||
db.find({ humans: { genders: 2 } }, function (err, docs) {
|
||||
// docs is empty, because { genders: 2 } is not equal to { genders: 2, eyes: true }
|
||||
});
|
||||
|
||||
// Find all documents in the collection
|
||||
db.find({}, function (err, docs) {
|
||||
});
|
||||
|
||||
// The same rules apply when you want to only find one document
|
||||
db.findOne({ _id: 'id1' }, function (err, doc) {
|
||||
// doc is the document Mars
|
||||
// If no document is found, doc is null
|
||||
});
|
||||
```
|
||||
|
||||
#### Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $exists, $regex)
|
||||
The syntax is `{ field: { $op: value } }` where `$op` is any comparison operator:
|
||||
|
||||
* `$lt`, `$lte`: less than, less than or equal
|
||||
* `$gt`, `$gte`: greater than, greater than or equal
|
||||
* `$in`: member of. `value` must be an array of values
|
||||
* `$ne`, `$nin`: not equal, not a member of
|
||||
* `$exists`: checks whether the document posses the property `field`. `value` should be true or false
|
||||
* `$regex`: checks whether a string is matched by the regular expression. Contrary to MongoDB, the use of `$options` with `$regex` is not supported, because it doesn't give you more power than regex flags. Basic queries are more readable so only use the `$regex` operator when you need to use another operator with it (see example below)
|
||||
|
||||
```javascript
|
||||
// $lt, $lte, $gt and $gte work on numbers and strings
|
||||
db.find({ "humans.genders": { $gt: 5 } }, function (err, docs) {
|
||||
// docs contains Omicron Persei 8, whose humans have more than 5 genders (7).
|
||||
});
|
||||
|
||||
// When used with strings, lexicographical order is used
|
||||
db.find({ planet: { $gt: 'Mercury' }}, function (err, docs) {
|
||||
// docs contains Omicron Persei 8
|
||||
})
|
||||
|
||||
// Using $in. $nin is used in the same way
|
||||
db.find({ planet: { $in: ['Earth', 'Jupiter'] }}, function (err, docs) {
|
||||
// docs contains Earth and Jupiter
|
||||
});
|
||||
|
||||
// Using $exists
|
||||
db.find({ satellites: { $exists: true } }, function (err, docs) {
|
||||
// docs contains only Mars
|
||||
});
|
||||
|
||||
// Using $regex with another operator
|
||||
db.find({ planet: { $regex: /ar/, $nin: ['Jupiter', 'Earth'] } }, function (err, docs) {
|
||||
// docs only contains Mars because Earth was excluded from the match by $nin
|
||||
});
|
||||
```
|
||||
|
||||
#### Array fields
|
||||
When a field in a document is an array, NeDB first tries to see if there is an array-specific comparison function (for now there is only `$size`) being used
|
||||
and tries it first. If there isn't, the query is treated as a query on every element and there is a match if at least one element matches.
|
||||
|
||||
```javascript
|
||||
// Using an array-specific comparison function
|
||||
// Note: you can't use nested comparison functions, e.g. { $size: { $lt: 5 } } will throw an error
|
||||
db.find({ satellites: { $size: 2 } }, function (err, docs) {
|
||||
// docs contains Mars
|
||||
});
|
||||
|
||||
db.find({ satellites: { $size: 1 } }, function (err, docs) {
|
||||
// docs is empty
|
||||
});
|
||||
|
||||
// If a document's field is an array, matching it means matching any element of the array
|
||||
db.find({ satellites: 'Phobos' }, function (err, docs) {
|
||||
// docs contains Mars. Result would have been the same if query had been { satellites: 'Deimos' }
|
||||
});
|
||||
|
||||
// This also works for queries that use comparison operators
|
||||
db.find({ satellites: { $lt: 'Amos' } }, function (err, docs) {
|
||||
// docs is empty since Phobos and Deimos are after Amos in lexicographical order
|
||||
});
|
||||
|
||||
// This also works with the $in and $nin operator
|
||||
db.find({ satellites: { $in: ['Moon', 'Deimos'] } }, function (err, docs) {
|
||||
// docs contains Mars (the Earth document is not complete!)
|
||||
});
|
||||
```
|
||||
|
||||
#### Logical operators $or, $and, $not, $where
|
||||
You can combine queries using logical operators:
|
||||
|
||||
* For `$or` and `$and`, the syntax is `{ $op: [query1, query2, ...] }`.
|
||||
* For `$not`, the syntax is `{ $not: query }`
|
||||
* For `$where`, the syntax is `{ $where: function () { /* object is "this", return a boolean */ } }`
|
||||
|
||||
```javascript
|
||||
db.find({ $or: [{ planet: 'Earth' }, { planet: 'Mars' }] }, function (err, docs) {
|
||||
// docs contains Earth and Mars
|
||||
});
|
||||
|
||||
db.find({ $not: { planet: 'Earth' } }, function (err, docs) {
|
||||
// docs contains Mars, Jupiter, Omicron Persei 8
|
||||
});
|
||||
|
||||
db.find({ $where: function () { return Object.keys(this) > 6; } }, function (err, docs) {
|
||||
// docs with more than 6 properties
|
||||
});
|
||||
|
||||
// You can mix normal queries, comparison queries and logical operators
|
||||
db.find({ $or: [{ planet: 'Earth' }, { planet: 'Mars' }], inhabited: true }, function (err, docs) {
|
||||
// docs contains Earth
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
#### Sorting and paginating
|
||||
If you don't specify a callback to `find`, `findOne` or `count`, a `Cursor` object is returned. You can modify the cursor with `sort`, `skip` and `limit` and then execute it with `exec(callback)`.
|
||||
|
||||
```javascript
|
||||
// Let's say the database contains these 4 documents
|
||||
// doc1 = { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false, satellites: ['Phobos', 'Deimos'] }
|
||||
// doc2 = { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true, humans: { genders: 2, eyes: true } }
|
||||
// doc3 = { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false }
|
||||
// doc4 = { _id: 'id4', planet: 'Omicron Persei 8', system: 'futurama', inhabited: true, humans: { genders: 7 } }
|
||||
|
||||
// No query used means all results are returned (before the Cursor modifiers)
|
||||
db.find({}).sort({ planet: 1 }).skip(1).limit(2).exec(function (err, docs) {
|
||||
// docs is [doc3, doc1]
|
||||
});
|
||||
|
||||
// You can sort in reverse order like this
|
||||
db.find({ system: 'solar' }).sort({ planet: -1 }).exec(function (err, docs) {
|
||||
// docs is [doc1, doc3, doc2]
|
||||
});
|
||||
|
||||
// You can sort on one field, then another, and so on like this:
|
||||
db.find({}).sort({ firstField: 1, secondField: -1 }) ... // You understand how this works!
|
||||
```
|
||||
|
||||
#### Projections
|
||||
You can give `find` and `findOne` an optional second argument, `projections`. The syntax is the same as MongoDB: `{ a: 1, b: 1 }` to return only the `a` and `b` fields, `{ a: 0, b: 0 }` to omit these two fields. You cannot use both modes at the time, except for `_id` which is by default always returned and which you can choose to omit.
|
||||
|
||||
```javascript
|
||||
// Same database as above
|
||||
|
||||
// Keeping only the given fields
|
||||
db.find({ planet: 'Mars' }, { planet: 1, system: 1 }, function (err, docs) {
|
||||
// docs is [{ planet: 'Mars', system: 'solar', _id: 'id1' }]
|
||||
});
|
||||
|
||||
// Keeping only the given fields but removing _id
|
||||
db.find({ planet: 'Mars' }, { planet: 1, system: 1, _id: 0 }, function (err, docs) {
|
||||
// docs is [{ planet: 'Mars', system: 'solar' }]
|
||||
});
|
||||
|
||||
// Omitting only the given fields and removing _id
|
||||
db.find({ planet: 'Mars' }, { planet: 0, system: 0, _id: 0 }, function (err, docs) {
|
||||
// docs is [{ inhabited: false, satellites: ['Phobos', 'Deimos'] }]
|
||||
});
|
||||
|
||||
// Failure: using both modes at the same time
|
||||
db.find({ planet: 'Mars' }, { planet: 0, system: 1 }, function (err, docs) {
|
||||
// err is the error message, docs is undefined
|
||||
});
|
||||
|
||||
// You can also use it in a Cursor way but this syntax is not compatible with MongoDB
|
||||
// If upstream compatibility is important don't use this method
|
||||
db.find({ planet: 'Mars' }).projection({ planet: 1, system: 1 }).exec(function (err, docs) {
|
||||
// docs is [{ planet: 'Mars', system: 'solar', _id: 'id1' }]
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
|
||||
### Counting documents
|
||||
You can use `count` to count documents. It has the same syntax as `find`. For example:
|
||||
|
||||
```javascript
|
||||
// Count all planets in the solar system
|
||||
db.count({ system: 'solar' }, function (err, count) {
|
||||
// count equals to 3
|
||||
});
|
||||
|
||||
// Count all documents in the datastore
|
||||
db.count({}, function (err, count) {
|
||||
// count equals to 4
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
### Updating documents
|
||||
`db.update(query, update, options, callback)` will update all documents matching `query` according to the `update` rules:
|
||||
* `query` is the same kind of finding query you use with `find` and `findOne`
|
||||
* `update` specifies how the documents should be modified. It is either a new document or a set of modifiers (you cannot use both together, it doesn't make sense!)
|
||||
* A new document will replace the matched docs
|
||||
* The modifiers create the fields they need to modify if they don't exist, and you can apply them to subdocs. Available field modifiers are `$set` to change a field's value, `$unset` to delete a field and `$inc` to increment a field's value. To work on arrays, you have `$push`, `$pop`, `$addToSet`, `$pull`, and the special `$each`. See examples below for the syntax.
|
||||
* `options` is an object with two possible parameters
|
||||
* `multi` (defaults to `false`) which allows the modification of several documents if set to true
|
||||
* `upsert` (defaults to `false`) if you want to insert a new document corresponding to the `update` rules if your `query` doesn't match anything
|
||||
* `callback` (optional) signature: `err`, `numReplaced`, `newDoc`
|
||||
* `numReplaced` is the number of documents replaced
|
||||
* `newDoc` is the created document if the upsert mode was chosen and a document was inserted
|
||||
|
||||
**Note**: you can't change a document's _id.
|
||||
|
||||
```javascript
|
||||
// Let's use the same example collection as in the "finding document" part
|
||||
// { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false }
|
||||
// { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true }
|
||||
// { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false }
|
||||
// { _id: 'id4', planet: 'Omicron Persia 8', system: 'futurama', inhabited: true }
|
||||
|
||||
// Replace a document by another
|
||||
db.update({ planet: 'Jupiter' }, { planet: 'Pluton'}, {}, function (err, numReplaced) {
|
||||
// numReplaced = 1
|
||||
// The doc #3 has been replaced by { _id: 'id3', planet: 'Pluton' }
|
||||
// Note that the _id is kept unchanged, and the document has been replaced
|
||||
// (the 'system' and inhabited fields are not here anymore)
|
||||
});
|
||||
|
||||
// Set an existing field's value
|
||||
db.update({ system: 'solar' }, { $set: { system: 'solar system' } }, { multi: true }, function (err, numReplaced) {
|
||||
// numReplaced = 3
|
||||
// Field 'system' on Mars, Earth, Jupiter now has value 'solar system'
|
||||
});
|
||||
|
||||
// Setting the value of a non-existing field in a subdocument by using the dot-notation
|
||||
db.update({ planet: 'Mars' }, { $set: { "data.satellites": 2, "data.red": true } }, {}, function () {
|
||||
// Mars document now is { _id: 'id1', system: 'solar', inhabited: false
|
||||
// , data: { satellites: 2, red: true }
|
||||
// }
|
||||
// Not that to set fields in subdocuments, you HAVE to use dot-notation
|
||||
// Using object-notation will just replace the top-level field
|
||||
db.update({ planet: 'Mars' }, { $set: { data: { satellites: 3 } } }, {}, function () {
|
||||
// Mars document now is { _id: 'id1', system: 'solar', inhabited: false
|
||||
// , data: { satellites: 3 }
|
||||
// }
|
||||
// You lost the "data.red" field which is probably not the intended behavior
|
||||
});
|
||||
});
|
||||
|
||||
// Deleting a field
|
||||
db.update({ planet: 'Mars' }, { $unset: { planet: true } }, {}, function () {
|
||||
// Now the document for Mars doesn't contain the planet field
|
||||
// You can unset nested fields with the dot notation of course
|
||||
});
|
||||
|
||||
// Upserting a document
|
||||
db.update({ planet: 'Pluton' }, { planet: 'Pluton', inhabited: false }, { upsert: true }, function (err, numReplaced, upsert) {
|
||||
// numReplaced = 1, upsert = { _id: 'id5', planet: 'Pluton', inhabited: false }
|
||||
// A new document { _id: 'id5', planet: 'Pluton', inhabited: false } has been added to the collection
|
||||
});
|
||||
|
||||
// If you upsert with a modifier, the upserted doc is the query modified by the modifier
|
||||
// This is simpler than it sounds :)
|
||||
db.update({ planet: 'Pluton' }, { $inc: { distance: 38 } }, { upsert: true }, function () {
|
||||
// A new document { _id: 'id5', planet: 'Pluton', distance: 38 } has been added to the collection
|
||||
});
|
||||
|
||||
// If we insert a new document { _id: 'id6', fruits: ['apple', 'orange', 'pear'] } in the collection,
|
||||
// let's see how we can modify the array field atomically
|
||||
|
||||
// $push inserts new elements at the end of the array
|
||||
db.update({ _id: 'id6' }, { $push: { fruits: 'banana' } }, {}, function () {
|
||||
// Now the fruits array is ['apple', 'orange', 'pear', 'banana']
|
||||
});
|
||||
|
||||
// $pop removes an element from the end (if used with 1) or the front (if used with -1) of the array
|
||||
db.update({ _id: 'id6' }, { $pop: { fruits: 1 } }, {}, function () {
|
||||
// Now the fruits array is ['apple', 'orange']
|
||||
// With { $pop: { fruits: -1 } }, it would have been ['orange', 'pear']
|
||||
});
|
||||
|
||||
// $addToSet adds an element to an array only if it isn't already in it
|
||||
// Equality is deep-checked (i.e. $addToSet will not insert an object in an array already containing the same object)
|
||||
// Note that it doesn't check whether the array contained duplicates before or not
|
||||
db.update({ _id: 'id6' }, { $addToSet: { fruits: 'apple' } }, {}, function () {
|
||||
// The fruits array didn't change
|
||||
// If we had used a fruit not in the array, e.g. 'banana', it would have been added to the array
|
||||
});
|
||||
|
||||
// $pull removes all values matching a value or even any NeDB query from the array
|
||||
db.update({ _id: 'id6' }, { $pull: { fruits: 'apple' } }, {}, function () {
|
||||
// Now the fruits array is ['orange', 'pear']
|
||||
});
|
||||
db.update({ _id: 'id6' }, { $pull: { fruits: $in: ['apple', 'pear'] } }, {}, function () {
|
||||
// Now the fruits array is ['orange']
|
||||
});
|
||||
|
||||
|
||||
|
||||
// $each can be used to $push or $addToSet multiple values at once
|
||||
// This example works the same way with $addToSet
|
||||
db.update({ _id: 'id6' }, { $push: { fruits: {$each: ['banana', 'orange'] } } }, {}, function () {
|
||||
// Now the fruits array is ['apple', 'orange', 'pear', 'banana', 'orange']
|
||||
});
|
||||
```
|
||||
|
||||
### Removing documents
|
||||
`db.remove(query, options, callback)` will remove all documents matching `query` according to `options`
|
||||
* `query` is the same as the ones used for finding and updating
|
||||
* `options` only one option for now: `multi` which allows the removal of multiple documents if set to true. Default is false
|
||||
* `callback` is optional, signature: err, numRemoved
|
||||
|
||||
```javascript
|
||||
// Let's use the same example collection as in the "finding document" part
|
||||
// { _id: 'id1', planet: 'Mars', system: 'solar', inhabited: false }
|
||||
// { _id: 'id2', planet: 'Earth', system: 'solar', inhabited: true }
|
||||
// { _id: 'id3', planet: 'Jupiter', system: 'solar', inhabited: false }
|
||||
// { _id: 'id4', planet: 'Omicron Persia 8', system: 'futurama', inhabited: true }
|
||||
|
||||
// Remove one document from the collection
|
||||
// options set to {} since the default for multi is false
|
||||
db.remove({ _id: 'id2' }, {}, function (err, numRemoved) {
|
||||
// numRemoved = 1
|
||||
});
|
||||
|
||||
// Remove multiple documents
|
||||
db.remove({ system: 'solar' }, { multi: true }, function (err, numRemoved) {
|
||||
// numRemoved = 3
|
||||
// All planets from the solar system were removed
|
||||
});
|
||||
```
|
||||
|
||||
### Indexing
|
||||
NeDB supports indexing. It gives a very nice speed boost and can be used to enforce a unique constraint on a field. You can index any field, including fields in nested documents using the dot notation. For now, indexes are only used to speed up basic queries and queries using `$in`, `$lt`, `$lte`, `$gt` and `$gte`.
|
||||
|
||||
To create an index, use `datastore.ensureIndex(options, cb)`, where callback is optional and get passed an error if any (usually a unique constraint that was violated). `ensureIndex` can be called when you want, even after some data was inserted, though it's best to call it at application startup. The options are:
|
||||
|
||||
* **fieldName** (required): name of the field to index. Use the dot notation to index a field in a nested document.
|
||||
* **unique** (optional, defaults to `false`): enforce field uniqueness. Note that a unique index will raise an error if you try to index two documents for which the field is not defined.
|
||||
* **sparse** (optional, defaults to `false`): don't index documents for which the field is not defined. Use this option along with "unique" if you want to accept multiple documents for which it is not defined.
|
||||
|
||||
Note: the `_id` is automatically indexed with a unique constraint, no need to call `ensureIndex` on it.
|
||||
|
||||
You can remove a previously created index with `datastore.removeIndex(fieldName, cb)`.
|
||||
|
||||
If your datastore is persistent, the indexes you created are persisted in the datafile, when you load the database a second time they are automatically created for you. No need to remove any `ensureIndex` though, if it is called on a database that already has the index, nothing happens.
|
||||
|
||||
```javascript
|
||||
db.ensureIndex({ fieldName: 'somefield' }, function (err) {
|
||||
// If there was an error, err is not null
|
||||
});
|
||||
|
||||
// Using a unique constraint with the index
|
||||
db.ensureIndex({ fieldName: 'somefield', unique: true }, function (err) {
|
||||
});
|
||||
|
||||
// Using a sparse unique index
|
||||
db.ensureIndex({ fieldName: 'somefield', unique: true, sparse: true }, function (err) {
|
||||
});
|
||||
|
||||
|
||||
// Format of the error message when the unique constraint is not met
|
||||
db.insert({ somefield: 'nedb' }, function (err) {
|
||||
// err is null
|
||||
db.insert({ somefield: 'nedb' }, function (err) {
|
||||
// err is { errorType: 'uniqueViolated'
|
||||
// , key: 'name'
|
||||
// , message: 'Unique constraint violated for key name' }
|
||||
});
|
||||
});
|
||||
|
||||
// Remove index on field somefield
|
||||
db.removeIndex('somefield', function (err) {
|
||||
});
|
||||
```
|
||||
|
||||
**Note:** the `ensureIndex` function creates the index synchronously, so it's best to use it at application startup. It's quite fast so it doesn't increase startup time much (35 ms for a collection containing 10,000 documents).
|
||||
|
||||
|
||||
## Browser version
|
||||
As of v0.8.0, you can use NeDB in the browser! You can find it and its minified version in the repository, in the `browser-version/out` directory. You only need to require `nedb.js` or `nedb.min.js` in your HTML file and the global object `Nedb` can be used right away, with the same API as the server version:
|
||||
|
||||
```
|
||||
<script src="nedb.min.js"></script>
|
||||
<script>
|
||||
var db = new Nedb(); // Create an in-memory only datastore
|
||||
|
||||
db.insert({ planet: 'Earth' });
|
||||
db.insert({ planet: 'Mars' });
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
// docs contains the two planets Earth and Mars
|
||||
});
|
||||
</script>
|
||||
```
|
||||
|
||||
It has been tested and is compatible with Chrome, Safari, Firefox, IE 10, IE 9. Please open an issue if you need compatibility with IE 8/IE 7, I think it will need some work and am not sure it is needed, since most complex webapplications - the ones that would need NeDB - only work on modern browsers anyway. To launch the tests, simply open the file `browser-version/test/index.html` in a browser and you'll see the results of the tests for this browser.
|
||||
|
||||
If you fork and modify nedb, you can build the browser version from the sources, the build script is `browser-version/build.js`.
|
||||
|
||||
As of v0.11, NeDB is also persistent on the browser. To use this, simply create the collection with the `filename` option which will be the name of the `localStorage` variable storing data. Persistence should work on all browsers where NeDB works.
|
||||
|
||||
**Browser persistence is still young! It has been tested on most major browsers but please report any bugs you find**
|
||||
|
||||
|
||||
## Performance
|
||||
### Speed
|
||||
NeDB is not intended to be a replacement of large-scale databases such as MongoDB, and as such was not designed for speed. That said, it is still pretty fast on the expected datasets, especially if you use indexing. On my machine (3 years old, no SSD), with a collection containing 10,000 documents, with indexing:
|
||||
* Insert: **5,950 ops/s**
|
||||
* Find: **25,440 ops/s**
|
||||
* Update: **4,490 ops/s**
|
||||
* Remove: **6,620 ops/s**
|
||||
|
||||
You can run the simple benchmarks I use by executing the scripts in the `benchmarks` folder. Run them with the `--help` flag to see how they work.
|
||||
|
||||
### Memory footprint
|
||||
A copy of the whole database is kept in memory. This is not much on the
|
||||
expected kind of datasets (20MB for 10,000 2KB documents). If requested, I'll introduce an
|
||||
option to not use this cache to decrease memory footprint (at the cost
|
||||
of a lower speed).
|
||||
|
||||
|
||||
## Use in other services
|
||||
* <a href="https://github.com/louischatriot/connect-nedb-session"
|
||||
target="_blank">connect-nedb-session</a> is a session store for
|
||||
Connect and Express, backed by nedb
|
||||
* If you mostly use NeDB for logging purposes and don't want the memory footprint of your application to grow too large, you can use <a href="https://github.com/louischatriot/nedb-logger" target="_blank">NeDB Logger</a> to insert documents in a NeDB-readable database
|
||||
* If you've outgrown NeDB, switching to MongoDB won't be too hard as it is the same API. Use <a href="https://github.com/louischatriot/nedb-to-mongodb" target="_blank">this utility</a> to transfer the data from a NeDB database to a MongoDB collection
|
||||
|
||||
|
||||
## Contribute!
|
||||
You want to help? You can contribute time or bitcoins.
|
||||
|
||||
### Helping on the codebase
|
||||
Issues reporting and pull requests are always appreciated. For issues, make sure to always include a code snippet and describe the expected vs actual behavior. If you send a pull request, make sure to stick to NeDB's coding style and always test all the code you submit. You can look at the current tests to see how to do it
|
||||
|
||||
### Bitcoins
|
||||
You don't have time? You can support NeDB by sending bitcoins to this adress: 1dDZLnWpBbodPiN8sizzYrgaz5iahFyb1
|
||||
|
||||
|
||||
## License
|
||||
|
||||
See [License](LICENSE)
|
@ -0,0 +1,305 @@
|
||||
/**
|
||||
* Functions that are used in several benchmark tests
|
||||
*/
|
||||
|
||||
var customUtils = require('../lib/customUtils')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, Datastore = require('../lib/datastore')
|
||||
, Persistence = require('../lib/persistence')
|
||||
, executeAsap // process.nextTick or setImmediate depending on your Node version
|
||||
;
|
||||
|
||||
try {
|
||||
executeAsap = setImmediate;
|
||||
} catch (e) {
|
||||
executeAsap = process.nextTick;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Configure the benchmark
|
||||
*/
|
||||
module.exports.getConfiguration = function (benchDb) {
|
||||
var d, n
|
||||
, program = require('commander')
|
||||
;
|
||||
|
||||
program
|
||||
.option('-n --number [number]', 'Size of the collection to test on', parseInt)
|
||||
.option('-i --with-index', 'Use an index')
|
||||
.option('-m --in-memory', 'Test with an in-memory only store')
|
||||
.parse(process.argv);
|
||||
|
||||
n = program.number || 10000;
|
||||
|
||||
console.log("----------------------------");
|
||||
console.log("Test with " + n + " documents");
|
||||
console.log(program.withIndex ? "Use an index" : "Don't use an index");
|
||||
console.log(program.inMemory ? "Use an in-memory datastore" : "Use a persistent datastore");
|
||||
console.log("----------------------------");
|
||||
|
||||
d = new Datastore({ filename: benchDb
|
||||
, inMemoryOnly: program.inMemory
|
||||
});
|
||||
|
||||
return { n: n, d: d, program: program };
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure the workspace exists and the db datafile is empty
|
||||
*/
|
||||
module.exports.prepareDb = function (filename, cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(filename), function () {
|
||||
fs.exists(filename, function (exists) {
|
||||
if (exists) {
|
||||
fs.unlink(filename, cb);
|
||||
} else { return cb(); }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return an array with the numbers from 0 to n-1, in a random order
|
||||
* Uses Fisher Yates algorithm
|
||||
* Useful to get fair tests
|
||||
*/
|
||||
function getRandomArray (n) {
|
||||
var res = []
|
||||
, i, j, temp
|
||||
;
|
||||
|
||||
for (i = 0; i < n; i += 1) { res[i] = i; }
|
||||
|
||||
for (i = n - 1; i >= 1; i -= 1) {
|
||||
j = Math.floor((i + 1) * Math.random());
|
||||
temp = res[i];
|
||||
res[i] = res[j];
|
||||
res[j] = temp;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
module.exports.getRandomArray = getRandomArray;
|
||||
|
||||
|
||||
/**
|
||||
* Insert a certain number of documents for testing
|
||||
*/
|
||||
module.exports.insertDocs = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step('Begin inserting ' + n + ' docs');
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (insert) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished inserting ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.insert({ docNumber: order[i] }, function (err) {
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find documents with find
|
||||
*/
|
||||
module.exports.findDocs = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Finding " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (find) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished finding ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.find({ docNumber: order[i] }, function (err, docs) {
|
||||
if (docs.length !== 1 || docs[0].docNumber !== order[i]) { return cb('One find didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find documents with find and the $in operator
|
||||
*/
|
||||
module.exports.findDocsWithIn = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
, ins = [], i, j
|
||||
, arraySize = Math.min(10, n) // The array for $in needs to be smaller than n (inclusive)
|
||||
;
|
||||
|
||||
// Preparing all the $in arrays, will take some time
|
||||
for (i = 0; i < n; i += 1) {
|
||||
ins[i] = [];
|
||||
|
||||
for (j = 0; j < arraySize; j += 1) {
|
||||
ins[i].push((i + j) % n);
|
||||
}
|
||||
}
|
||||
|
||||
profiler.step("Finding " + n + " documents WITH $IN OPERATOR");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (find with in selector) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished finding ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.find({ docNumber: { $in: ins[i] } }, function (err, docs) {
|
||||
if (docs.length !== arraySize) { return cb('One find didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find documents with findOne
|
||||
*/
|
||||
module.exports.findOneDocs = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("FindingOne " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (findOne) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished finding ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.findOne({ docNumber: order[i] }, function (err, doc) {
|
||||
if (!doc || doc.docNumber !== order[i]) { return cb('One find didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update documents
|
||||
* options is the same as the options object for update
|
||||
*/
|
||||
module.exports.updateDocs = function (options, d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Updating " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (update) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished updating ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
// Will not actually modify the document but will take the same time
|
||||
d.update({ docNumber: order[i] }, { docNumber: order[i] }, options, function (err, nr) {
|
||||
if (err) { return cb(err); }
|
||||
if (nr !== 1) { return cb('One update didnt work'); }
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove documents
|
||||
* options is the same as the options object for update
|
||||
*/
|
||||
module.exports.removeDocs = function (options, d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Removing " + n + " documents");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT (1 remove + 1 insert) ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
console.log("====== IMPORTANT: Please note that this is the time that was needed to perform " + n + " removes and " + n + " inserts");
|
||||
console.log("====== The extra inserts are needed to keep collection size at " + n + " items for the benchmark to make sense");
|
||||
console.log("====== Use the insert speed logged above to calculate the actual remove speed, which is higher (should be significantly so if you use indexing)");
|
||||
profiler.step('Finished removing ' + n + ' docs');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.remove({ docNumber: order[i] }, options, function (err, nr) {
|
||||
if (err) { return cb(err); }
|
||||
if (nr !== 1) { return cb('One remove didnt work'); }
|
||||
d.insert({ docNumber: order[i] }, function (err) { // We need to reinsert the doc so that we keep the collection's size at n
|
||||
// So actually we're calculating the average time taken by one insert + one remove
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Load database
|
||||
*/
|
||||
module.exports.loadDatabase = function (d, n, profiler, cb) {
|
||||
var beg = new Date()
|
||||
, order = getRandomArray(n)
|
||||
;
|
||||
|
||||
profiler.step("Loading the database " + n + " times");
|
||||
|
||||
function runFrom(i) {
|
||||
if (i === n) { // Finished
|
||||
console.log("===== RESULT ===== " + Math.floor(1000* n / profiler.elapsedSinceLastStep()) + " ops/s");
|
||||
profiler.step('Finished loading a database' + n + ' times');
|
||||
return cb();
|
||||
}
|
||||
|
||||
d.loadDatabase(function (err) {
|
||||
executeAsap(function () {
|
||||
runFrom(i + 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
runFrom(0);
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,51 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/insert.bench.db'
|
||||
, async = require('async')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('INSERT BENCH')
|
||||
, d = new Datastore(benchDb)
|
||||
, program = require('commander')
|
||||
, n
|
||||
;
|
||||
|
||||
program
|
||||
.option('-n --number [number]', 'Size of the collection to test on', parseInt)
|
||||
.option('-i --with-index', 'Test with an index')
|
||||
.parse(process.argv);
|
||||
|
||||
n = program.number || 10000;
|
||||
|
||||
console.log("----------------------------");
|
||||
console.log("Test with " + n + " documents");
|
||||
console.log("----------------------------");
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) {
|
||||
var i;
|
||||
|
||||
profiler.step('Begin calling ensureIndex ' + n + ' times');
|
||||
|
||||
for (i = 0; i < n; i += 1) {
|
||||
d.ensureIndex({ fieldName: 'docNumber' });
|
||||
delete d.indexes.docNumber;
|
||||
}
|
||||
|
||||
console.log("Average time for one ensureIndex: " + (profiler.elapsedSinceLastStep() / n) + "ms");
|
||||
profiler.step('Finished calling ensureIndex ' + n + ' times');
|
||||
}
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
||||
|
@ -0,0 +1,30 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/find.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('FIND BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, async.apply(commonUtilities.findDocs, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
@ -0,0 +1,31 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/findOne.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('FINDONE BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) { setTimeout(function () {cb();}, 500); }
|
||||
, async.apply(commonUtilities.findOneDocs, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
@ -0,0 +1,30 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/find.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('FIND BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, async.apply(commonUtilities.findDocsWithIn, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
@ -0,0 +1,33 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/insert.bench.db'
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('INSERT BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) {
|
||||
d.ensureIndex({ fieldName: 'docNumber' });
|
||||
n = 2 * n; // We will actually insert twice as many documents
|
||||
// because the index is slower when the collection is already
|
||||
// big. So the result given by the algorithm will be a bit worse than
|
||||
// actual performance
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
@ -0,0 +1,38 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/loaddb.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('LOADDB BENCH')
|
||||
, d = new Datastore(benchDb)
|
||||
, program = require('commander')
|
||||
, n
|
||||
;
|
||||
|
||||
program
|
||||
.option('-n --number [number]', 'Size of the collection to test on', parseInt)
|
||||
.option('-i --with-index', 'Test with an index')
|
||||
.parse(process.argv);
|
||||
|
||||
n = program.number || 10000;
|
||||
|
||||
console.log("----------------------------");
|
||||
console.log("Test with " + n + " documents");
|
||||
console.log(program.withIndex ? "Use an index" : "Don't use an index");
|
||||
console.log("----------------------------");
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(cb);
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, async.apply(commonUtilities.loadDatabase, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
@ -0,0 +1,39 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/remove.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('REMOVE BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
|
||||
// Test with remove only one document
|
||||
, function (cb) { profiler.step('MULTI: FALSE'); return cb(); }
|
||||
, async.apply(commonUtilities.removeDocs, { multi: false }, d, n, profiler)
|
||||
|
||||
// Test with multiple documents
|
||||
, function (cb) { d.remove({}, { multi: true }, function () { return cb(); }); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) { profiler.step('MULTI: TRUE'); return cb(); }
|
||||
, async.apply(commonUtilities.removeDocs, { multi: true }, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
@ -0,0 +1,39 @@
|
||||
var Datastore = require('../lib/datastore')
|
||||
, benchDb = 'workspace/update.bench.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, async = require('async')
|
||||
, execTime = require('exec-time')
|
||||
, profiler = new execTime('UPDATE BENCH')
|
||||
, commonUtilities = require('./commonUtilities')
|
||||
, config = commonUtilities.getConfiguration(benchDb)
|
||||
, d = config.d
|
||||
, n = config.n
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(commonUtilities.prepareDb, benchDb)
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
if (err) { return cb(err); }
|
||||
if (config.program.withIndex) { d.ensureIndex({ fieldName: 'docNumber' }); }
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) { profiler.beginProfiling(); return cb(); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
|
||||
// Test with update only one document
|
||||
, function (cb) { profiler.step('MULTI: FALSE'); return cb(); }
|
||||
, async.apply(commonUtilities.updateDocs, { multi: false }, d, n, profiler)
|
||||
|
||||
// Test with multiple documents
|
||||
, function (cb) { d.remove({}, { multi: true }, function (err) { return cb(); }); }
|
||||
, async.apply(commonUtilities.insertDocs, d, n, profiler)
|
||||
, function (cb) { profiler.step('MULTI: TRUE'); return cb(); }
|
||||
, async.apply(commonUtilities.updateDocs, { multi: true }, d, n, profiler)
|
||||
], function (err) {
|
||||
profiler.step("Benchmark finished");
|
||||
|
||||
if (err) { return console.log("An error was encountered: ", err); }
|
||||
});
|
@ -0,0 +1,78 @@
|
||||
/**
|
||||
* Specific customUtils for the browser, where we don't have access to the Crypto and Buffer modules
|
||||
*/
|
||||
|
||||
/**
|
||||
* Taken from the crypto-browserify module
|
||||
* https://github.com/dominictarr/crypto-browserify
|
||||
* NOTE: Math.random() does not guarantee "cryptographic quality" but we actually don't need it
|
||||
*/
|
||||
function randomBytes (size) {
|
||||
var bytes = new Array(size);
|
||||
var r;
|
||||
|
||||
for (var i = 0, r; i < size; i++) {
|
||||
if ((i & 0x03) == 0) r = Math.random() * 0x100000000;
|
||||
bytes[i] = r >>> ((i & 0x03) << 3) & 0xff;
|
||||
}
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Taken from the base64-js module
|
||||
* https://github.com/beatgammit/base64-js/
|
||||
*/
|
||||
function byteArrayToBase64 (uint8) {
|
||||
var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||
, extraBytes = uint8.length % 3 // if we have 1 byte left, pad 2 bytes
|
||||
, output = ""
|
||||
, temp, length, i;
|
||||
|
||||
function tripletToBase64 (num) {
|
||||
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F];
|
||||
};
|
||||
|
||||
// go through the array every three bytes, we'll deal with trailing stuff later
|
||||
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) {
|
||||
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]);
|
||||
output += tripletToBase64(temp);
|
||||
}
|
||||
|
||||
// pad the end with zeros, but make sure to not forget the extra bytes
|
||||
switch (extraBytes) {
|
||||
case 1:
|
||||
temp = uint8[uint8.length - 1];
|
||||
output += lookup[temp >> 2];
|
||||
output += lookup[(temp << 4) & 0x3F];
|
||||
output += '==';
|
||||
break;
|
||||
case 2:
|
||||
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]);
|
||||
output += lookup[temp >> 10];
|
||||
output += lookup[(temp >> 4) & 0x3F];
|
||||
output += lookup[(temp << 2) & 0x3F];
|
||||
output += '=';
|
||||
break;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return a random alphanumerical string of length len
|
||||
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
|
||||
* (il the base64 conversion yields too many pluses and slashes) but
|
||||
* that's not an issue here
|
||||
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
|
||||
* See http://en.wikipedia.org/wiki/Birthday_problem
|
||||
*/
|
||||
function uid (len) {
|
||||
return byteArrayToBase64(randomBytes(Math.ceil(Math.max(8, len * 2)))).replace(/[+\/]/g, '').slice(0, len);
|
||||
}
|
||||
|
||||
|
||||
|
||||
module.exports.uid = uid;
|
@ -0,0 +1,96 @@
|
||||
/**
|
||||
* Way data is stored for this database
|
||||
* For a Node.js/Node Webkit database it's the file system
|
||||
* For a browser-side database it's localStorage when supported
|
||||
*
|
||||
* This version is the Node.js/Node Webkit version
|
||||
*/
|
||||
|
||||
|
||||
|
||||
function exists (filename, callback) {
|
||||
// In this specific case this always answers that the file doesn't exist
|
||||
if (typeof localStorage === 'undefined') { console.log("WARNING - This browser doesn't support localStorage, no data will be saved in NeDB!"); return callback(); }
|
||||
|
||||
if (localStorage.getItem(filename) !== null) {
|
||||
return callback(true);
|
||||
} else {
|
||||
return callback(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function rename (filename, newFilename, callback) {
|
||||
if (typeof localStorage === 'undefined') { console.log("WARNING - This browser doesn't support localStorage, no data will be saved in NeDB!"); return callback(); }
|
||||
|
||||
if (localStorage.getItem(filename) === null) {
|
||||
localStorage.removeItem(newFilename);
|
||||
} else {
|
||||
localStorage.setItem(newFilename, localStorage.getItem(filename));
|
||||
localStorage.removeItem(filename);
|
||||
}
|
||||
|
||||
return callback();
|
||||
}
|
||||
|
||||
|
||||
function writeFile (filename, contents, options, callback) {
|
||||
if (typeof localStorage === 'undefined') { console.log("WARNING - This browser doesn't support localStorage, no data will be saved in NeDB!"); return callback(); }
|
||||
|
||||
// Options do not matter in browser setup
|
||||
if (typeof options === 'function') { callback = options; }
|
||||
|
||||
localStorage.setItem(filename, contents);
|
||||
return callback();
|
||||
}
|
||||
|
||||
|
||||
function appendFile (filename, toAppend, options, callback) {
|
||||
if (typeof localStorage === 'undefined') { console.log("WARNING - This browser doesn't support localStorage, no data will be saved in NeDB!"); return callback(); }
|
||||
|
||||
// Options do not matter in browser setup
|
||||
if (typeof options === 'function') { callback = options; }
|
||||
|
||||
var contents = localStorage.getItem(filename) || '';
|
||||
contents += toAppend;
|
||||
|
||||
localStorage.setItem(filename, contents);
|
||||
return callback();
|
||||
}
|
||||
|
||||
|
||||
function readFile (filename, options, callback) {
|
||||
if (typeof localStorage === 'undefined') { console.log("WARNING - This browser doesn't support localStorage, no data will be saved in NeDB!"); return callback(); }
|
||||
|
||||
// Options do not matter in browser setup
|
||||
if (typeof options === 'function') { callback = options; }
|
||||
|
||||
var contents = localStorage.getItem(filename) || '';
|
||||
return callback(null, contents);
|
||||
}
|
||||
|
||||
|
||||
function unlink (filename, callback) {
|
||||
if (typeof localStorage === 'undefined') { console.log("WARNING - This browser doesn't support localStorage, no data will be saved in NeDB!"); return callback(); }
|
||||
|
||||
localStorage.removeItem(filename);
|
||||
return callback();
|
||||
}
|
||||
|
||||
|
||||
// Nothing done, no directories will be used on the browser
|
||||
function mkdirp (dir, callback) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.exists = exists;
|
||||
module.exports.rename = rename;
|
||||
module.exports.writeFile = writeFile;
|
||||
module.exports.appendFile = appendFile;
|
||||
module.exports.readFile = readFile;
|
||||
module.exports.unlink = unlink;
|
||||
module.exports.mkdirp = mkdirp;
|
||||
|
@ -0,0 +1,101 @@
|
||||
/**
|
||||
* Build the browser version of nedb
|
||||
*/
|
||||
|
||||
var fs = require('fs')
|
||||
, path = require('path')
|
||||
, child_process = require('child_process')
|
||||
, toCopy = ['lib', 'node_modules']
|
||||
, async, browserify, uglify
|
||||
;
|
||||
|
||||
// Ensuring both node_modules (the source one and build one), src and out directories exist
|
||||
function ensureDirExists (name) {
|
||||
try {
|
||||
fs.mkdirSync(path.join(__dirname, name));
|
||||
} catch (e) {
|
||||
if (e.code !== 'EEXIST') {
|
||||
console.log("Error ensuring that node_modules exists");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
ensureDirExists('../node_modules');
|
||||
ensureDirExists('node_modules');
|
||||
ensureDirExists('out');
|
||||
ensureDirExists('src');
|
||||
|
||||
|
||||
// Installing build dependencies and require them
|
||||
console.log("Installing build dependencies");
|
||||
child_process.exec('npm install', { cwd: __dirname }, function (err, stdout, stderr) {
|
||||
if (err) { console.log("Error reinstalling dependencies"); process.exit(1); }
|
||||
|
||||
fs = require('fs-extra');
|
||||
async = require('async');
|
||||
browserify = require('browserify');
|
||||
uglify = require('uglify-js');
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
console.log("Installing source dependencies if needed");
|
||||
|
||||
child_process.exec('npm install', { cwd: path.join(__dirname, '..') }, function (err) { return cb(err); });
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Removing contents of the src directory");
|
||||
|
||||
async.eachSeries(fs.readdirSync(path.join(__dirname, 'src')), function (item, _cb) {
|
||||
fs.remove(path.join(__dirname, 'src', item), _cb);
|
||||
}, cb);
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Copying source files");
|
||||
|
||||
async.eachSeries(toCopy, function (item, _cb) {
|
||||
fs.copy(path.join(__dirname, '..', item), path.join(__dirname, 'src', item), _cb);
|
||||
}, cb);
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Copying browser specific files to replace their server-specific counterparts");
|
||||
|
||||
async.eachSeries(fs.readdirSync(path.join(__dirname, 'browser-specific')), function (item, _cb) {
|
||||
fs.copy(path.join(__dirname, 'browser-specific', item), path.join(__dirname, 'src', item), _cb);
|
||||
}, cb);
|
||||
}
|
||||
, function (cb) {
|
||||
console.log("Browserifying the code");
|
||||
|
||||
var b = browserify()
|
||||
, srcPath = path.join(__dirname, 'src/lib/datastore.js');
|
||||
|
||||
b.add(srcPath);
|
||||
b.bundle({ standalone: 'Nedb' }, function (err, out) {
|
||||
if (err) { return cb(err); }
|
||||
fs.writeFile(path.join(__dirname, 'out/nedb.js'), out, 'utf8', function (err) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
} else {
|
||||
return cb(null, out);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (out, cb) {
|
||||
console.log("Creating the minified version");
|
||||
|
||||
var compressedCode = uglify.minify(out, { fromString: true });
|
||||
fs.writeFile(path.join(__dirname, 'out/nedb.min.js'), compressedCode.code, 'utf8', cb);
|
||||
}
|
||||
], function (err) {
|
||||
if (err) {
|
||||
console.log("Error during build");
|
||||
console.log(err);
|
||||
} else {
|
||||
console.log("Build finished with success");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@ -0,0 +1,8 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"async": "~0.2.9",
|
||||
"fs-extra": "~0.6.3",
|
||||
"uglify-js": "~2.3.6",
|
||||
"browserify": "~2.25.0"
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Mocha tests for NeDB</title>
|
||||
<link rel="stylesheet" href="mocha.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="mocha"></div>
|
||||
<script src="http://code.jquery.com/jquery-latest.min.js"></script>
|
||||
<script src="http://chaijs.com/chai.js"></script>
|
||||
<script src="http://underscorejs.org/underscore-min.js"></script>
|
||||
<script src="mocha.js"></script>
|
||||
<script>mocha.setup('bdd')</script>
|
||||
<script src="../out/nedb.min.js"></script>
|
||||
<script src="nedb-browser.js"></script>
|
||||
<script>
|
||||
mocha.checkLeaks();
|
||||
mocha.globals(['jQuery']);
|
||||
mocha.run();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,199 @@
|
||||
@charset "UTF-8";
|
||||
body {
|
||||
font: 20px/1.5 "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
padding: 60px 50px;
|
||||
}
|
||||
|
||||
#mocha ul, #mocha li {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
#mocha ul {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
#mocha h1, #mocha h2 {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
#mocha h1 {
|
||||
margin-top: 15px;
|
||||
font-size: 1em;
|
||||
font-weight: 200;
|
||||
}
|
||||
|
||||
#mocha h1 a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
#mocha h1 a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
#mocha .suite .suite h1 {
|
||||
margin-top: 0;
|
||||
font-size: .8em;
|
||||
}
|
||||
|
||||
#mocha h2 {
|
||||
font-size: 12px;
|
||||
font-weight: normal;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#mocha .suite {
|
||||
margin-left: 15px;
|
||||
}
|
||||
|
||||
#mocha .test {
|
||||
margin-left: 15px;
|
||||
}
|
||||
|
||||
#mocha .test:hover h2::after {
|
||||
position: relative;
|
||||
top: 0;
|
||||
right: -10px;
|
||||
content: '(view source)';
|
||||
font-size: 12px;
|
||||
font-family: arial;
|
||||
color: #888;
|
||||
}
|
||||
|
||||
#mocha .test.pending:hover h2::after {
|
||||
content: '(pending)';
|
||||
font-family: arial;
|
||||
}
|
||||
|
||||
#mocha .test.pass.medium .duration {
|
||||
background: #C09853;
|
||||
}
|
||||
|
||||
#mocha .test.pass.slow .duration {
|
||||
background: #B94A48;
|
||||
}
|
||||
|
||||
#mocha .test.pass::before {
|
||||
content: '✓';
|
||||
font-size: 12px;
|
||||
display: block;
|
||||
float: left;
|
||||
margin-right: 5px;
|
||||
color: #00d6b2;
|
||||
}
|
||||
|
||||
#mocha .test.pass .duration {
|
||||
font-size: 9px;
|
||||
margin-left: 5px;
|
||||
padding: 2px 5px;
|
||||
color: white;
|
||||
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
|
||||
-moz-box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
|
||||
box-shadow: inset 0 1px 1px rgba(0,0,0,.2);
|
||||
-webkit-border-radius: 5px;
|
||||
-moz-border-radius: 5px;
|
||||
-ms-border-radius: 5px;
|
||||
-o-border-radius: 5px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#mocha .test.pass.fast .duration {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#mocha .test.pending {
|
||||
color: #0b97c4;
|
||||
}
|
||||
|
||||
#mocha .test.pending::before {
|
||||
content: '◦';
|
||||
color: #0b97c4;
|
||||
}
|
||||
|
||||
#mocha .test.fail {
|
||||
color: #c00;
|
||||
}
|
||||
|
||||
#mocha .test.fail pre {
|
||||
color: black;
|
||||
}
|
||||
|
||||
#mocha .test.fail::before {
|
||||
content: '✖';
|
||||
font-size: 12px;
|
||||
display: block;
|
||||
float: left;
|
||||
margin-right: 5px;
|
||||
color: #c00;
|
||||
}
|
||||
|
||||
#mocha .test pre.error {
|
||||
color: #c00;
|
||||
}
|
||||
|
||||
#mocha .test pre {
|
||||
display: inline-block;
|
||||
font: 12px/1.5 monaco, monospace;
|
||||
margin: 5px;
|
||||
padding: 15px;
|
||||
border: 1px solid #eee;
|
||||
border-bottom-color: #ddd;
|
||||
-webkit-border-radius: 3px;
|
||||
-webkit-box-shadow: 0 1px 3px #eee;
|
||||
}
|
||||
|
||||
#report.pass .test.fail {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#report.fail .test.pass {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#error {
|
||||
color: #c00;
|
||||
font-size: 1.5 em;
|
||||
font-weight: 100;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
#stats {
|
||||
position: fixed;
|
||||
top: 15px;
|
||||
right: 10px;
|
||||
font-size: 12px;
|
||||
margin: 0;
|
||||
color: #888;
|
||||
}
|
||||
|
||||
#stats .progress {
|
||||
float: right;
|
||||
padding-top: 0;
|
||||
}
|
||||
|
||||
#stats em {
|
||||
color: black;
|
||||
}
|
||||
|
||||
#stats a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
#stats a:hover {
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
#stats li {
|
||||
display: inline-block;
|
||||
margin: 0 5px;
|
||||
list-style: none;
|
||||
padding-top: 11px;
|
||||
}
|
||||
|
||||
code .comment { color: #ddd }
|
||||
code .init { color: #2F6FAD }
|
||||
code .string { color: #5890AD }
|
||||
code .keyword { color: #8A6343 }
|
||||
code .number { color: #2F6FAD }
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,298 @@
|
||||
/**
|
||||
* Testing the browser version of NeDB
|
||||
* The goal of these tests is not to be exhaustive, we have the server-side NeDB tests for that
|
||||
* This is more of a sanity check which executes most of the code at least once and checks
|
||||
* it behaves as the server version does
|
||||
*/
|
||||
|
||||
var assert = chai.assert;
|
||||
|
||||
/**
|
||||
* Given a docs array and an id, return the document whose id matches, or null if none is found
|
||||
*/
|
||||
function findById (docs, id) {
|
||||
return _.find(docs, function (doc) { return doc._id === id; }) || null;
|
||||
}
|
||||
|
||||
|
||||
describe('Basic CRUD functionality', function () {
|
||||
|
||||
it('Able to create a database object in the browser', function () {
|
||||
var db = new Nedb();
|
||||
|
||||
assert.equal(db.inMemoryOnly, true);
|
||||
assert.equal(db.persistence.inMemoryOnly, true);
|
||||
});
|
||||
|
||||
it('Insertion and querying', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ a: 4 }, function (err, newDoc1) {
|
||||
assert.isNull(err);
|
||||
db.insert({ a: 40 }, function (err, newDoc2) {
|
||||
assert.isNull(err);
|
||||
db.insert({ a: 400 }, function (err, newDoc3) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.find({ a: { $gt: 36 } }, function (err, docs) {
|
||||
var doc2 = _.find(docs, function (doc) { return doc._id === newDoc2._id; })
|
||||
, doc3 = _.find(docs, function (doc) { return doc._id === newDoc3._id; })
|
||||
;
|
||||
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(doc2.a, 40);
|
||||
assert.equal(doc3.a, 400);
|
||||
|
||||
db.find({ a: { $lt: 36 } }, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 4);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Querying with regular expressions', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Mars' }, function (err, newDoc2) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Jupiter' }, function (err, newDoc3) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Eaaaaaarth' }, function (err, newDoc4) {
|
||||
assert.isNull(err);
|
||||
db.insert({ planet: 'Maaaars' }, function (err, newDoc5) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.find({ planet: /ar/ }, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 4);
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc1._id; }).planet, 'Earth');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc2._id; }).planet, 'Mars');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id; }).planet, 'Eaaaaaarth');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id; }).planet, 'Maaaars');
|
||||
|
||||
db.find({ planet: /aa+r/ }, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc4._id; }).planet, 'Eaaaaaarth');
|
||||
assert.equal(_.find(docs, function (doc) { return doc._id === newDoc5._id; }).planet, 'Maaaars');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Updating documents', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ planet: 'Eaaaaarth' }, function (err, newDoc1) {
|
||||
db.insert({ planet: 'Maaaaars' }, function (err, newDoc2) {
|
||||
// Simple update
|
||||
db.update({ _id: newDoc2._id }, { $set: { planet: 'Saturn' } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth');
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
|
||||
// Failing update
|
||||
db.update({ _id: 'unknown' }, { $inc: { count: 1 } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 0);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Eaaaaarth');
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
|
||||
// Document replacement
|
||||
db.update({ planet: 'Eaaaaarth' }, { planet: 'Uranus' }, { multi: false }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus');
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
|
||||
// Multi update
|
||||
db.update({}, { $inc: { count: 3 } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 2);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 2);
|
||||
assert.equal(findById(docs, newDoc1._id).planet, 'Uranus');
|
||||
assert.equal(findById(docs, newDoc1._id).count, 3);
|
||||
assert.equal(findById(docs, newDoc2._id).planet, 'Saturn');
|
||||
assert.equal(findById(docs, newDoc2._id).count, 3);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Updating documents: special modifiers', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ planet: 'Earth' }, function (err, newDoc1) {
|
||||
// Pushing to an array
|
||||
db.update({}, { $push: { satellites: 'Phobos' } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.findOne({}, function (err, doc) {
|
||||
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos'] });
|
||||
|
||||
db.update({}, { $push: { satellites: 'Deimos' } }, {}, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.findOne({}, function (err, doc) {
|
||||
assert.deepEqual(doc, { planet: 'Earth', _id: newDoc1._id, satellites: ['Phobos', 'Deimos'] });
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Upserts', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.update({ a: 4 }, { $inc: { b: 1 } }, { upsert: true }, function (err, nr, upsert) {
|
||||
assert.isNull(err);
|
||||
// Return upserted document
|
||||
assert.equal(upsert.a, 4);
|
||||
assert.equal(upsert.b, 1);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 4);
|
||||
assert.equal(docs[0].b, 1);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Removing documents', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ a: 2 });
|
||||
db.insert({ a: 5 });
|
||||
db.insert({ a: 7 });
|
||||
|
||||
// Multi remove
|
||||
db.remove({ a: { $in: [ 5, 7 ] } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 2);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 2);
|
||||
|
||||
// Remove with no match
|
||||
db.remove({ b: { $exists: true } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 0);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 1);
|
||||
assert.equal(docs[0].a, 2);
|
||||
|
||||
// Simple remove
|
||||
db.remove({ a: { $exists: true } }, { multi: true }, function (err, nr) {
|
||||
assert.isNull(err);
|
||||
assert.equal(nr, 1);
|
||||
|
||||
db.find({}, function (err, docs) {
|
||||
assert.equal(docs.length, 0);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
}); // ==== End of 'Basic CRUD functionality' ==== //
|
||||
|
||||
|
||||
describe('Indexing', function () {
|
||||
|
||||
it('getCandidates works as expected', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.insert({ a: 4 }, function () {
|
||||
db.insert({ a: 6 }, function () {
|
||||
db.insert({ a: 7 }, function () {
|
||||
var candidates = db.getCandidates({ a: 6 })
|
||||
assert.equal(candidates.length, 3);
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 4; }));
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6; }));
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 7; }));
|
||||
|
||||
db.ensureIndex({ fieldName: 'a' });
|
||||
|
||||
candidates = db.getCandidates({ a: 6 })
|
||||
assert.equal(candidates.length, 1);
|
||||
assert.isDefined(_.find(candidates, function (doc) { return doc.a === 6; }));
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Can use indexes to enforce a unique constraint', function (done) {
|
||||
var db = new Nedb();
|
||||
|
||||
db.ensureIndex({ fieldName: 'u', unique: true });
|
||||
|
||||
db.insert({ u : 5 }, function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.insert({ u : 98 }, function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.insert({ u : 5 }, function (err) {
|
||||
assert.equal(err.errorType, 'uniqueViolated');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
}); // ==== End of 'Indexing' ==== //
|
||||
|
||||
|
||||
|
@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test NeDB persistence in the browser</title>
|
||||
<link rel="stylesheet" href="mocha.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="results"></div>
|
||||
<script src="../out/nedb.js"></script>
|
||||
<script src="./testPersistence.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,18 @@
|
||||
console.log("Beginning tests");
|
||||
console.log("Please note these tests work on Chrome latest, might not work on other browsers due to discrepancies in how local storage works for the file:// protocol");
|
||||
|
||||
function testsFailed () {
|
||||
document.getElementById("results").innerHTML = "TESTS FAILED";
|
||||
}
|
||||
|
||||
localStorage.removeItem('test');
|
||||
var db = new Nedb({ filename: 'test', autoload: true });
|
||||
db.insert({ hello: 'world' }, function (err) {
|
||||
if (err) {
|
||||
testsFailed();
|
||||
return;
|
||||
}
|
||||
|
||||
window.location = './testPersistence2.html';
|
||||
});
|
||||
|
@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Test NeDB persistence in the browser - Results</title>
|
||||
<link rel="stylesheet" href="mocha.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="results"></div>
|
||||
<script src="../out/nedb.js"></script>
|
||||
<script src="./testPersistence2.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,27 @@
|
||||
console.log("Checking tests results");
|
||||
console.log("Please note these tests work on Chrome latest, might not work on other browsers due to discrepancies in how local storage works for the file:// protocol");
|
||||
|
||||
function testsFailed () {
|
||||
document.getElementById("results").innerHTML = "TESTS FAILED";
|
||||
}
|
||||
|
||||
var db = new Nedb({ filename: 'test', autoload: true });
|
||||
db.find({}, function (err, docs) {
|
||||
if (docs.length !== 1) {
|
||||
console.log("Unexpected length of document database");
|
||||
return testsFailed();
|
||||
}
|
||||
|
||||
if (Object.keys(docs[0]).length !== 2) {
|
||||
console.log("Unexpected length insert document in database");
|
||||
return testsFailed();
|
||||
}
|
||||
|
||||
if (docs[0].hello !== 'world') {
|
||||
console.log("Unexpected document");
|
||||
return testsFailed();
|
||||
}
|
||||
|
||||
document.getElementById("results").innerHTML = "BROWSER PERSISTENCE TEST PASSED";
|
||||
});
|
||||
|
@ -0,0 +1,3 @@
|
||||
var Datastore = require('./lib/datastore');
|
||||
|
||||
module.exports = Datastore;
|
@ -0,0 +1,185 @@
|
||||
/**
|
||||
* Manage access to data, be it to find, update or remove it
|
||||
*/
|
||||
var model = require('./model')
|
||||
, _ = require('underscore')
|
||||
;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Create a new cursor for this collection
|
||||
* @param {Datastore} db - The datastore this cursor is bound to
|
||||
* @param {Query} query - The query this cursor will operate on
|
||||
* @param {Function} execDn - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove
|
||||
*/
|
||||
function Cursor (db, query, execFn) {
|
||||
this.db = db;
|
||||
this.query = query || {};
|
||||
if (execFn) { this.execFn = execFn; }
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set a limit to the number of results
|
||||
*/
|
||||
Cursor.prototype.limit = function(limit) {
|
||||
this._limit = limit;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Skip a the number of results
|
||||
*/
|
||||
Cursor.prototype.skip = function(skip) {
|
||||
this._skip = skip;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Sort results of the query
|
||||
* @param {SortQuery} sortQuery - SortQuery is { field: order }, field can use the dot-notation, order is 1 for ascending and -1 for descending
|
||||
*/
|
||||
Cursor.prototype.sort = function(sortQuery) {
|
||||
this._sort = sortQuery;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add the use of a projection
|
||||
* @param {Object} projection - MongoDB-style projection. {} means take all fields. Then it's { key1: 1, key2: 1 } to take only key1 and key2
|
||||
* { key1: 0, key2: 0 } to omit only key1 and key2. Except _id, you can't mix takes and omits
|
||||
*/
|
||||
Cursor.prototype.projection = function(projection) {
|
||||
this._projection = projection;
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Apply the projection
|
||||
*/
|
||||
Cursor.prototype.project = function (candidates) {
|
||||
var res = [], self = this
|
||||
, keepId, action, keys
|
||||
;
|
||||
|
||||
if (this._projection === undefined || Object.keys(this._projection).length === 0) {
|
||||
return candidates;
|
||||
}
|
||||
|
||||
keepId = this._projection._id === 0 ? false : true;
|
||||
this._projection = _.omit(this._projection, '_id');
|
||||
|
||||
// Check for consistency
|
||||
keys = Object.keys(this._projection);
|
||||
keys.forEach(function (k) {
|
||||
if (action !== undefined && self._projection[k] !== action) { throw "Can't both keep and omit fields except for _id"; }
|
||||
action = self._projection[k];
|
||||
});
|
||||
|
||||
// Do the actual projection
|
||||
candidates.forEach(function (candidate) {
|
||||
var toPush = action === 1 ? _.pick(candidate, keys) : _.omit(candidate, keys);
|
||||
if (keepId) {
|
||||
toPush._id = candidate._id;
|
||||
} else {
|
||||
delete toPush._id;
|
||||
}
|
||||
res.push(toPush);
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all matching elements
|
||||
* Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne
|
||||
* This is an internal function, use exec which uses the executor
|
||||
*
|
||||
* @param {Function} callback - Signature: err, results
|
||||
*/
|
||||
Cursor.prototype._exec = function(callback) {
|
||||
var candidates = this.db.getCandidates(this.query)
|
||||
, res = [], added = 0, skipped = 0, self = this
|
||||
, error = null
|
||||
, i, keys, key
|
||||
;
|
||||
|
||||
try {
|
||||
for (i = 0; i < candidates.length; i += 1) {
|
||||
if (model.match(candidates[i], this.query)) {
|
||||
// If a sort is defined, wait for the results to be sorted before applying limit and skip
|
||||
if (!this._sort) {
|
||||
if (this._skip && this._skip > skipped) {
|
||||
skipped += 1;
|
||||
} else {
|
||||
res.push(candidates[i]);
|
||||
added += 1;
|
||||
if (this._limit && this._limit <= added) { break; }
|
||||
}
|
||||
} else {
|
||||
res.push(candidates[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Apply all sorts
|
||||
if (this._sort) {
|
||||
keys = Object.keys(this._sort);
|
||||
|
||||
// Sorting
|
||||
var criteria = [];
|
||||
for (i = 0; i < keys.length; i++) {
|
||||
key = keys[i];
|
||||
criteria.push({ key: key, direction: self._sort[key] });
|
||||
}
|
||||
res.sort(function(a, b) {
|
||||
var criterion, compare, i;
|
||||
for (i = 0; i < criteria.length; i++) {
|
||||
criterion = criteria[i];
|
||||
compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key));
|
||||
if (compare !== 0) {
|
||||
return compare;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
// Applying limit and skip
|
||||
var limit = this._limit || res.length
|
||||
, skip = this._skip || 0;
|
||||
|
||||
res = res.slice(skip, skip + limit);
|
||||
}
|
||||
|
||||
// Apply projection
|
||||
try {
|
||||
res = this.project(res);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
res = undefined;
|
||||
}
|
||||
|
||||
if (this.execFn) {
|
||||
return this.execFn(error, res, callback);
|
||||
} else {
|
||||
return callback(error, res);
|
||||
}
|
||||
};
|
||||
|
||||
Cursor.prototype.exec = function () {
|
||||
this.db.executor.push({ this: this, fn: this._exec, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Cursor;
|
@ -0,0 +1,23 @@
|
||||
var crypto = require('crypto')
|
||||
, fs = require('fs')
|
||||
;
|
||||
|
||||
/**
|
||||
* Return a random alphanumerical string of length len
|
||||
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
|
||||
* (il the base64 conversion yields too many pluses and slashes) but
|
||||
* that's not an issue here
|
||||
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
|
||||
* See http://en.wikipedia.org/wiki/Birthday_problem
|
||||
*/
|
||||
function uid (len) {
|
||||
return crypto.randomBytes(Math.ceil(Math.max(8, len * 2)))
|
||||
.toString('base64')
|
||||
.replace(/[+\/]/g, '')
|
||||
.slice(0, len);
|
||||
}
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.uid = uid;
|
||||
|
@ -0,0 +1,594 @@
|
||||
var customUtils = require('./customUtils')
|
||||
, model = require('./model')
|
||||
, async = require('async')
|
||||
, Executor = require('./executor')
|
||||
, Index = require('./indexes')
|
||||
, util = require('util')
|
||||
, _ = require('underscore')
|
||||
, Persistence = require('./persistence')
|
||||
, Cursor = require('./cursor')
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Create a new collection
|
||||
* @param {String} options.filename Optional, datastore will be in-memory only if not provided
|
||||
* @param {Boolean} options.inMemoryOnly Optional, default to false
|
||||
* @param {Boolean} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where
|
||||
* Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
|
||||
* @param {Boolean} options.autoload Optional, defaults to false
|
||||
* @param {Function} options.onload Optional, if autoload is used this will be called after the load database with the error object as parameter. If you don't pass it the error will be thrown
|
||||
*/
|
||||
function Datastore (options) {
|
||||
var filename;
|
||||
|
||||
// Retrocompatibility with v0.6 and before
|
||||
if (typeof options === 'string') {
|
||||
filename = options;
|
||||
this.inMemoryOnly = false; // Default
|
||||
} else {
|
||||
options = options || {};
|
||||
filename = options.filename;
|
||||
this.inMemoryOnly = options.inMemoryOnly || false;
|
||||
this.autoload = options.autoload || false;
|
||||
}
|
||||
|
||||
// Determine whether in memory or persistent
|
||||
if (!filename || typeof filename !== 'string' || filename.length === 0) {
|
||||
this.filename = null;
|
||||
this.inMemoryOnly = true;
|
||||
} else {
|
||||
this.filename = filename;
|
||||
}
|
||||
|
||||
// Persistence handling
|
||||
this.persistence = new Persistence({ db: this, nodeWebkitAppName: options.nodeWebkitAppName });
|
||||
|
||||
// This new executor is ready if we don't use persistence
|
||||
// If we do, it will only be ready once loadDatabase is called
|
||||
this.executor = new Executor();
|
||||
if (this.inMemoryOnly) { this.executor.ready = true; }
|
||||
|
||||
// Indexed by field name, dot notation can be used
|
||||
// _id is always indexed and since _ids are generated randomly the underlying
|
||||
// binary is always well-balanced
|
||||
this.indexes = {};
|
||||
this.indexes._id = new Index({ fieldName: '_id', unique: true });
|
||||
|
||||
// Queue a load of the database right away and call the onload handler
|
||||
// By default (no onload handler), if there is an error there, no operation will be possible so warn the user by throwing an exception
|
||||
if (this.autoload) { this.loadDatabase(options.onload || function (err) {
|
||||
if (err) { throw err; }
|
||||
}); }
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Load the database from the datafile, and trigger the execution of buffered commands if any
|
||||
*/
|
||||
Datastore.prototype.loadDatabase = function () {
|
||||
this.executor.push({ this: this.persistence, fn: this.persistence.loadDatabase, arguments: arguments }, true);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get an array of all the data in the database
|
||||
*/
|
||||
Datastore.prototype.getAllData = function () {
|
||||
return this.indexes._id.getAll();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Reset all currently defined indexes
|
||||
*/
|
||||
Datastore.prototype.resetIndexes = function (newData) {
|
||||
var self = this;
|
||||
|
||||
Object.keys(this.indexes).forEach(function (i) {
|
||||
self.indexes[i].reset(newData);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure an index is kept for this field. Same parameters as lib/indexes
|
||||
* For now this function is synchronous, we need to test how much time it takes
|
||||
* We use an async API for consistency with the rest of the code
|
||||
* @param {String} options.fieldName
|
||||
* @param {Boolean} options.unique
|
||||
* @param {Boolean} options.sparse
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Datastore.prototype.ensureIndex = function (options, cb) {
|
||||
var callback = cb || function () {};
|
||||
|
||||
options = options || {};
|
||||
|
||||
if (!options.fieldName) { return callback({ missingFieldName: true }); }
|
||||
if (this.indexes[options.fieldName]) { return callback(null); }
|
||||
|
||||
this.indexes[options.fieldName] = new Index(options);
|
||||
|
||||
try {
|
||||
this.indexes[options.fieldName].insert(this.getAllData());
|
||||
} catch (e) {
|
||||
delete this.indexes[options.fieldName];
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
this.persistence.persistNewState([{ $$indexCreated: options }], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove an index
|
||||
* @param {String} fieldName
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Datastore.prototype.removeIndex = function (fieldName, cb) {
|
||||
var callback = cb || function () {};
|
||||
|
||||
delete this.indexes[fieldName];
|
||||
|
||||
this.persistence.persistNewState([{ $$indexRemoved: fieldName }], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add one or several document(s) to all indexes
|
||||
*/
|
||||
Datastore.prototype.addToIndexes = function (doc) {
|
||||
var i, failingIndex, error
|
||||
, keys = Object.keys(this.indexes)
|
||||
;
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.indexes[keys[i]].insert(doc);
|
||||
} catch (e) {
|
||||
failingIndex = i;
|
||||
error = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error happened, we need to rollback the insert on all other indexes
|
||||
if (error) {
|
||||
for (i = 0; i < failingIndex; i += 1) {
|
||||
this.indexes[keys[i]].remove(doc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove one or several document(s) from all indexes
|
||||
*/
|
||||
Datastore.prototype.removeFromIndexes = function (doc) {
|
||||
var self = this;
|
||||
|
||||
Object.keys(this.indexes).forEach(function (i) {
|
||||
self.indexes[i].remove(doc);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update one or several documents in all indexes
|
||||
* To update multiple documents, oldDoc must be an array of { oldDoc, newDoc } pairs
|
||||
* If one update violates a constraint, all changes are rolled back
|
||||
*/
|
||||
Datastore.prototype.updateIndexes = function (oldDoc, newDoc) {
|
||||
var i, failingIndex, error
|
||||
, keys = Object.keys(this.indexes)
|
||||
;
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.indexes[keys[i]].update(oldDoc, newDoc);
|
||||
} catch (e) {
|
||||
failingIndex = i;
|
||||
error = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error happened, we need to rollback the update on all other indexes
|
||||
if (error) {
|
||||
for (i = 0; i < failingIndex; i += 1) {
|
||||
this.indexes[keys[i]].revertUpdate(oldDoc, newDoc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return the list of candidates for a given query
|
||||
* Crude implementation for now, we return the candidates given by the first usable index if any
|
||||
* We try the following query types, in this order: basic match, $in match, comparison match
|
||||
* One way to make it better would be to enable the use of multiple indexes if the first usable index
|
||||
* returns too much data. I may do it in the future.
|
||||
*
|
||||
* TODO: needs to be moved to the Cursor module
|
||||
*/
|
||||
Datastore.prototype.getCandidates = function (query) {
|
||||
var indexNames = Object.keys(this.indexes)
|
||||
, usableQueryKeys;
|
||||
|
||||
// For a basic match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (typeof query[k] === 'string' || typeof query[k] === 'number' || typeof query[k] === 'boolean' || util.isDate(query[k]) || query[k] === null) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return this.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]]);
|
||||
}
|
||||
|
||||
// For a $in match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (query[k] && query[k].hasOwnProperty('$in')) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return this.indexes[usableQueryKeys[0]].getMatching(query[usableQueryKeys[0]].$in);
|
||||
}
|
||||
|
||||
// For a comparison match
|
||||
usableQueryKeys = [];
|
||||
Object.keys(query).forEach(function (k) {
|
||||
if (query[k] && (query[k].hasOwnProperty('$lt') || query[k].hasOwnProperty('$lte') || query[k].hasOwnProperty('$gt') || query[k].hasOwnProperty('$gte'))) {
|
||||
usableQueryKeys.push(k);
|
||||
}
|
||||
});
|
||||
usableQueryKeys = _.intersection(usableQueryKeys, indexNames);
|
||||
if (usableQueryKeys.length > 0) {
|
||||
return this.indexes[usableQueryKeys[0]].getBetweenBounds(query[usableQueryKeys[0]]);
|
||||
}
|
||||
|
||||
// By default, return all the DB data
|
||||
return this.getAllData();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a new document
|
||||
* @param {Function} cb Optional callback, signature: err, insertedDoc
|
||||
*
|
||||
* @api private Use Datastore.insert which has the same signature
|
||||
*/
|
||||
Datastore.prototype._insert = function (newDoc, cb) {
|
||||
var callback = cb || function () {}
|
||||
;
|
||||
|
||||
try {
|
||||
this._insertInCache(newDoc);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
|
||||
this.persistence.persistNewState(util.isArray(newDoc) ? newDoc : [newDoc], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, newDoc);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new _id that's not already in use
|
||||
*/
|
||||
Datastore.prototype.createNewId = function () {
|
||||
var tentativeId = customUtils.uid(16);
|
||||
// Try as many times as needed to get an unused _id. As explained in customUtils, the probability of this ever happening is extremely small, so this is O(1)
|
||||
if (this.indexes._id.getMatching(tentativeId).length > 0) {
|
||||
tentativeId = this.createNewId();
|
||||
}
|
||||
return tentativeId;
|
||||
};
|
||||
|
||||
/**
|
||||
* Prepare a document (or array of documents) to be inserted in a database
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype.prepareDocumentForInsertion = function (newDoc) {
|
||||
var preparedDoc, self = this;
|
||||
|
||||
if (util.isArray(newDoc)) {
|
||||
preparedDoc = [];
|
||||
newDoc.forEach(function (doc) { preparedDoc.push(self.prepareDocumentForInsertion(doc)); });
|
||||
} else {
|
||||
newDoc._id = newDoc._id || this.createNewId();
|
||||
preparedDoc = model.deepCopy(newDoc);
|
||||
model.checkObject(preparedDoc);
|
||||
}
|
||||
|
||||
return preparedDoc;
|
||||
};
|
||||
|
||||
/**
|
||||
* If newDoc is an array of documents, this will insert all documents in the cache
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype._insertInCache = function (newDoc) {
|
||||
if (util.isArray(newDoc)) {
|
||||
this._insertMultipleDocsInCache(newDoc);
|
||||
} else {
|
||||
this.addToIndexes(this.prepareDocumentForInsertion(newDoc));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* If one insertion fails (e.g. because of a unique constraint), roll back all previous
|
||||
* inserts and throws the error
|
||||
* @api private
|
||||
*/
|
||||
Datastore.prototype._insertMultipleDocsInCache = function (newDocs) {
|
||||
var i, failingI, error
|
||||
, preparedDocs = this.prepareDocumentForInsertion(newDocs)
|
||||
;
|
||||
|
||||
for (i = 0; i < preparedDocs.length; i += 1) {
|
||||
try {
|
||||
this.addToIndexes(preparedDocs[i]);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.removeFromIndexes(preparedDocs[i]);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
Datastore.prototype.insert = function () {
|
||||
this.executor.push({ this: this, fn: this._insert, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Count all documents matching the query
|
||||
* @param {Object} query MongoDB-style query
|
||||
*/
|
||||
Datastore.prototype.count = function(query, callback) {
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, docs.length);
|
||||
});
|
||||
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find all documents matching the query
|
||||
* If no callback is passed, we return the cursor so that user can limit, skip and finally exec
|
||||
* @param {Object} query MongoDB-style query
|
||||
* @param {Object} projection MongoDB-style projection
|
||||
*/
|
||||
Datastore.prototype.find = function (query, projection, callback) {
|
||||
switch (arguments.length) {
|
||||
case 1:
|
||||
projection = {};
|
||||
// callback is undefined, will return a cursor
|
||||
break;
|
||||
case 2:
|
||||
if (typeof projection === 'function') {
|
||||
callback = projection;
|
||||
projection = {};
|
||||
} // If not assume projection is an object and callback undefined
|
||||
break;
|
||||
}
|
||||
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
var res = [], i;
|
||||
|
||||
if (err) { return callback(err); }
|
||||
|
||||
for (i = 0; i < docs.length; i += 1) {
|
||||
res.push(model.deepCopy(docs[i]));
|
||||
}
|
||||
return callback(null, res);
|
||||
});
|
||||
|
||||
cursor.projection(projection);
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Find one document matching the query
|
||||
* @param {Object} query MongoDB-style query
|
||||
* @param {Object} projection MongoDB-style projection
|
||||
*/
|
||||
Datastore.prototype.findOne = function (query, projection, callback) {
|
||||
switch (arguments.length) {
|
||||
case 1:
|
||||
projection = {};
|
||||
// callback is undefined, will return a cursor
|
||||
break;
|
||||
case 2:
|
||||
if (typeof projection === 'function') {
|
||||
callback = projection;
|
||||
projection = {};
|
||||
} // If not assume projection is an object and callback undefined
|
||||
break;
|
||||
}
|
||||
|
||||
var cursor = new Cursor(this, query, function(err, docs, callback) {
|
||||
if (err) { return callback(err); }
|
||||
if (docs.length === 1) {
|
||||
return callback(null, model.deepCopy(docs[0]));
|
||||
} else {
|
||||
return callback(null, null);
|
||||
}
|
||||
});
|
||||
|
||||
cursor.projection(projection).limit(1);
|
||||
if (typeof callback === 'function') {
|
||||
cursor.exec(callback);
|
||||
} else {
|
||||
return cursor;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update all docs matching query
|
||||
* For now, very naive implementation (recalculating the whole database)
|
||||
* @param {Object} query
|
||||
* @param {Object} updateQuery
|
||||
* @param {Object} options Optional options
|
||||
* options.multi If true, can update multiple documents (defaults to false)
|
||||
* options.upsert If true, document is inserted if the query doesn't match anything
|
||||
* @param {Function} cb Optional callback, signature: err, numReplaced, upsert (set to true if the update was in fact an upsert)
|
||||
*
|
||||
* @api private Use Datastore.update which has the same signature
|
||||
*/
|
||||
Datastore.prototype._update = function (query, updateQuery, options, cb) {
|
||||
var callback
|
||||
, self = this
|
||||
, numReplaced = 0
|
||||
, multi, upsert
|
||||
, i
|
||||
;
|
||||
|
||||
if (typeof options === 'function') { cb = options; options = {}; }
|
||||
callback = cb || function () {};
|
||||
multi = options.multi !== undefined ? options.multi : false;
|
||||
upsert = options.upsert !== undefined ? options.upsert : false;
|
||||
|
||||
async.waterfall([
|
||||
function (cb) { // If upsert option is set, check whether we need to insert the doc
|
||||
if (!upsert) { return cb(); }
|
||||
|
||||
// Need to use an internal function not tied to the executor to avoid deadlock
|
||||
var cursor = new Cursor(self, query);
|
||||
cursor.limit(1)._exec(function (err, docs) {
|
||||
if (err) { return callback(err); }
|
||||
if (docs.length === 1) {
|
||||
return cb();
|
||||
} else {
|
||||
return self._insert(model.modify(query, updateQuery), function (err, newDoc) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, 1, newDoc);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
, function () { // Perform the update
|
||||
var modifiedDoc
|
||||
, candidates = self.getCandidates(query)
|
||||
, modifications = []
|
||||
;
|
||||
|
||||
// Preparing update (if an error is thrown here neither the datafile nor
|
||||
// the in-memory indexes are affected)
|
||||
try {
|
||||
for (i = 0; i < candidates.length; i += 1) {
|
||||
if (model.match(candidates[i], query) && (multi || numReplaced === 0)) {
|
||||
numReplaced += 1;
|
||||
modifiedDoc = model.modify(candidates[i], updateQuery);
|
||||
modifications.push({ oldDoc: candidates[i], newDoc: modifiedDoc });
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Change the docs in memory
|
||||
try {
|
||||
self.updateIndexes(modifications);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
// Update the datafile
|
||||
self.persistence.persistNewState(_.pluck(modifications, 'newDoc'), function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, numReplaced);
|
||||
});
|
||||
}
|
||||
]);
|
||||
};
|
||||
Datastore.prototype.update = function () {
|
||||
this.executor.push({ this: this, fn: this._update, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove all docs matching the query
|
||||
* For now very naive implementation (similar to update)
|
||||
* @param {Object} query
|
||||
* @param {Object} options Optional options
|
||||
* options.multi If true, can update multiple documents (defaults to false)
|
||||
* @param {Function} cb Optional callback, signature: err, numRemoved
|
||||
*
|
||||
* @api private Use Datastore.remove which has the same signature
|
||||
*/
|
||||
Datastore.prototype._remove = function (query, options, cb) {
|
||||
var callback
|
||||
, self = this
|
||||
, numRemoved = 0
|
||||
, multi
|
||||
, removedDocs = []
|
||||
, candidates = this.getCandidates(query)
|
||||
;
|
||||
|
||||
if (typeof options === 'function') { cb = options; options = {}; }
|
||||
callback = cb || function () {};
|
||||
multi = options.multi !== undefined ? options.multi : false;
|
||||
|
||||
try {
|
||||
candidates.forEach(function (d) {
|
||||
if (model.match(d, query) && (multi || numRemoved === 0)) {
|
||||
numRemoved += 1;
|
||||
removedDocs.push({ $$deleted: true, _id: d._id });
|
||||
self.removeFromIndexes(d);
|
||||
}
|
||||
});
|
||||
} catch (err) { return callback(err); }
|
||||
|
||||
self.persistence.persistNewState(removedDocs, function (err) {
|
||||
if (err) { return callback(err); }
|
||||
return callback(null, numRemoved);
|
||||
});
|
||||
};
|
||||
Datastore.prototype.remove = function () {
|
||||
this.executor.push({ this: this, fn: this._remove, arguments: arguments });
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
module.exports = Datastore;
|
@ -0,0 +1,77 @@
|
||||
/**
|
||||
* Responsible for sequentially executing actions on the database
|
||||
*/
|
||||
|
||||
var async = require('async')
|
||||
;
|
||||
|
||||
function Executor () {
|
||||
this.buffer = [];
|
||||
this.ready = false;
|
||||
|
||||
// This queue will execute all commands, one-by-one in order
|
||||
this.queue = async.queue(function (task, cb) {
|
||||
var callback
|
||||
, lastArg = task.arguments[task.arguments.length - 1]
|
||||
, i, newArguments = []
|
||||
;
|
||||
|
||||
// task.arguments is an array-like object on which adding a new field doesn't work, so we transform it into a real array
|
||||
for (i = 0; i < task.arguments.length; i += 1) { newArguments.push(task.arguments[i]); }
|
||||
|
||||
// Always tell the queue task is complete. Execute callback if any was given.
|
||||
if (typeof lastArg === 'function') {
|
||||
callback = function () {
|
||||
if (typeof setImmediate === 'function') {
|
||||
setImmediate(cb);
|
||||
} else {
|
||||
process.nextTick(cb);
|
||||
}
|
||||
lastArg.apply(null, arguments);
|
||||
};
|
||||
|
||||
newArguments[newArguments.length - 1] = callback;
|
||||
} else {
|
||||
callback = function () { cb(); };
|
||||
newArguments.push(callback);
|
||||
}
|
||||
|
||||
|
||||
task.fn.apply(task.this, newArguments);
|
||||
}, 1);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* If executor is ready, queue task (and process it immediately if executor was idle)
|
||||
* If not, buffer task for later processing
|
||||
* @param {Object} task
|
||||
* task.this - Object to use as this
|
||||
* task.fn - Function to execute
|
||||
* task.arguments - Array of arguments
|
||||
* @param {Boolean} forceQueuing Optional (defaults to false) force executor to queue task even if it is not ready
|
||||
*/
|
||||
Executor.prototype.push = function (task, forceQueuing) {
|
||||
if (this.ready || forceQueuing) {
|
||||
this.queue.push(task);
|
||||
} else {
|
||||
this.buffer.push(task);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Queue all tasks in buffer (in the same order they came in)
|
||||
* Automatically sets executor as ready
|
||||
*/
|
||||
Executor.prototype.processBuffer = function () {
|
||||
var i;
|
||||
this.ready = true;
|
||||
for (i = 0; i < this.buffer.length; i += 1) { this.queue.push(this.buffer[i]); }
|
||||
this.buffer = [];
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Executor;
|
@ -0,0 +1,294 @@
|
||||
var BinarySearchTree = require('binary-search-tree').AVLTree
|
||||
, model = require('./model')
|
||||
, _ = require('underscore')
|
||||
, util = require('util')
|
||||
;
|
||||
|
||||
/**
|
||||
* Two indexed pointers are equal iif they point to the same place
|
||||
*/
|
||||
function checkValueEquality (a, b) {
|
||||
return a === b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-aware projection
|
||||
*/
|
||||
function projectForUnique (elt) {
|
||||
if (elt === null) { return '$null'; }
|
||||
if (typeof elt === 'string') { return '$string' + elt; }
|
||||
if (typeof elt === 'boolean') { return '$boolean' + elt; }
|
||||
if (typeof elt === 'number') { return '$number' + elt; }
|
||||
if (util.isArray(elt)) { return '$date' + elt.getTime(); }
|
||||
|
||||
return elt; // Arrays and objects, will check for pointer equality
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a new index
|
||||
* All methods on an index guarantee that either the whole operation was successful and the index changed
|
||||
* or the operation was unsuccessful and an error is thrown while the index is unchanged
|
||||
* @param {String} options.fieldName On which field should the index apply (can use dot notation to index on sub fields)
|
||||
* @param {Boolean} options.unique Optional, enforce a unique constraint (default: false)
|
||||
* @param {Boolean} options.sparse Optional, allow a sparse index (we can have documents for which fieldName is undefined) (default: false)
|
||||
*/
|
||||
function Index (options) {
|
||||
this.fieldName = options.fieldName;
|
||||
this.unique = options.unique || false;
|
||||
this.sparse = options.sparse || false;
|
||||
|
||||
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality: checkValueEquality };
|
||||
|
||||
this.reset(); // No data in the beginning
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Reset an index
|
||||
* @param {Document or Array of documents} newData Optional, data to initialize the index with
|
||||
* If an error is thrown during insertion, the index is not modified
|
||||
*/
|
||||
Index.prototype.reset = function (newData) {
|
||||
this.tree = new BinarySearchTree(this.treeOptions);
|
||||
|
||||
if (newData) { this.insert(newData); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a new document in the index
|
||||
* If an array is passed, we insert all its elements (if one insertion fails the index is not modified)
|
||||
* O(log(n))
|
||||
*/
|
||||
Index.prototype.insert = function (doc) {
|
||||
var key, self = this
|
||||
, keys, i, failingI, error
|
||||
;
|
||||
|
||||
if (util.isArray(doc)) { this.insertMultipleDocs(doc); return; }
|
||||
|
||||
key = model.getDotValue(doc, this.fieldName);
|
||||
|
||||
// We don't index documents that don't contain the field if the index is sparse
|
||||
if (key === undefined && this.sparse) { return; }
|
||||
|
||||
if (!util.isArray(key)) {
|
||||
this.tree.insert(key, doc);
|
||||
} else {
|
||||
// If an insert fails due to a unique constraint, roll back all inserts before it
|
||||
keys = _.uniq(key, projectForUnique);
|
||||
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
try {
|
||||
this.tree.insert(keys[i], doc);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.tree.delete(keys[i], doc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert an array of documents in the index
|
||||
* If a constraint is violated, the changes should be rolled back and an error thrown
|
||||
*
|
||||
* @API private
|
||||
*/
|
||||
Index.prototype.insertMultipleDocs = function (docs) {
|
||||
var i, error, failingI;
|
||||
|
||||
for (i = 0; i < docs.length; i += 1) {
|
||||
try {
|
||||
this.insert(docs[i]);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.remove(docs[i]);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove a document from the index
|
||||
* If an array is passed, we remove all its elements
|
||||
* The remove operation is safe with regards to the 'unique' constraint
|
||||
* O(log(n))
|
||||
*/
|
||||
Index.prototype.remove = function (doc) {
|
||||
var key, self = this;
|
||||
|
||||
if (util.isArray(doc)) { doc.forEach(function (d) { self.remove(d); }); return; }
|
||||
|
||||
key = model.getDotValue(doc, this.fieldName);
|
||||
|
||||
if (key === undefined && this.sparse) { return; }
|
||||
|
||||
if (!util.isArray(key)) {
|
||||
this.tree.delete(key, doc);
|
||||
} else {
|
||||
_.uniq(key, projectForUnique).forEach(function (_key) {
|
||||
self.tree.delete(_key, doc);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update a document in the index
|
||||
* If a constraint is violated, changes are rolled back and an error thrown
|
||||
* Naive implementation, still in O(log(n))
|
||||
*/
|
||||
Index.prototype.update = function (oldDoc, newDoc) {
|
||||
if (util.isArray(oldDoc)) { this.updateMultipleDocs(oldDoc); return; }
|
||||
|
||||
this.remove(oldDoc);
|
||||
|
||||
try {
|
||||
this.insert(newDoc);
|
||||
} catch (e) {
|
||||
this.insert(oldDoc);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Update multiple documents in the index
|
||||
* If a constraint is violated, the changes need to be rolled back
|
||||
* and an error thrown
|
||||
* @param {Array of oldDoc, newDoc pairs} pairs
|
||||
*
|
||||
* @API private
|
||||
*/
|
||||
Index.prototype.updateMultipleDocs = function (pairs) {
|
||||
var i, failingI, error;
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
this.remove(pairs[i].oldDoc);
|
||||
}
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
try {
|
||||
this.insert(pairs[i].newDoc);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
failingI = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If an error was raised, roll back changes in the inverse order
|
||||
if (error) {
|
||||
for (i = 0; i < failingI; i += 1) {
|
||||
this.remove(pairs[i].newDoc);
|
||||
}
|
||||
|
||||
for (i = 0; i < pairs.length; i += 1) {
|
||||
this.insert(pairs[i].oldDoc);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Revert an update
|
||||
*/
|
||||
Index.prototype.revertUpdate = function (oldDoc, newDoc) {
|
||||
var revert = [];
|
||||
|
||||
if (!util.isArray(oldDoc)) {
|
||||
this.update(newDoc, oldDoc);
|
||||
} else {
|
||||
oldDoc.forEach(function (pair) {
|
||||
revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc });
|
||||
});
|
||||
this.update(revert);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Append all elements in toAppend to array
|
||||
function append (array, toAppend) {
|
||||
var i;
|
||||
|
||||
for (i = 0; i < toAppend.length; i += 1) {
|
||||
array.push(toAppend[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things)
|
||||
* @param {Thing} value Value to match the key against
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getMatching = function (value) {
|
||||
var res, self = this;
|
||||
|
||||
if (!util.isArray(value)) {
|
||||
return this.tree.search(value);
|
||||
} else {
|
||||
res = [];
|
||||
value.forEach(function (v) { append(res, self.getMatching(v)); });
|
||||
return res;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all documents in index whose key is between bounds are they are defined by query
|
||||
* Documents are sorted by key
|
||||
* @param {Query} query
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getBetweenBounds = function (query) {
|
||||
return this.tree.betweenBounds(query);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get all elements in the index
|
||||
* @return {Array of documents}
|
||||
*/
|
||||
Index.prototype.getAll = function () {
|
||||
var res = [];
|
||||
|
||||
this.tree.executeOnEveryNode(function (node) {
|
||||
var i;
|
||||
|
||||
for (i = 0; i < node.data.length; i += 1) {
|
||||
res.push(node.data[i]);
|
||||
}
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Index;
|
@ -0,0 +1,757 @@
|
||||
/**
|
||||
* Handle models (i.e. docs)
|
||||
* Serialization/deserialization
|
||||
* Copying
|
||||
* Querying, update
|
||||
*/
|
||||
|
||||
var util = require('util')
|
||||
, _ = require('underscore')
|
||||
, modifierFunctions = {}
|
||||
, lastStepModifierFunctions = {}
|
||||
, comparisonFunctions = {}
|
||||
, logicalOperators = {}
|
||||
, arrayComparisonFunctions = {}
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Check a key, throw an error if the key is non valid
|
||||
* @param {String} k key
|
||||
* @param {Model} v value, needed to treat the Date edge case
|
||||
* Non-treatable edge cases here: if part of the object if of the form { $$date: number } or { $$deleted: true }
|
||||
* Its serialized-then-deserialized version it will transformed into a Date object
|
||||
* But you really need to want it to trigger such behaviour, even when warned not to use '$' at the beginning of the field names...
|
||||
*/
|
||||
function checkKey (k, v) {
|
||||
if (k[0] === '$' && !(k === '$$date' && typeof v === 'number') && !(k === '$$deleted' && v === true) && !(k === '$$indexCreated') && !(k === '$$indexRemoved')) {
|
||||
throw 'Field names cannot begin with the $ character';
|
||||
}
|
||||
|
||||
if (k.indexOf('.') !== -1) {
|
||||
throw 'Field names cannot contain a .';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check a DB object and throw an error if it's not valid
|
||||
* Works by applying the above checkKey function to all fields recursively
|
||||
*/
|
||||
function checkObject (obj) {
|
||||
if (util.isArray(obj)) {
|
||||
obj.forEach(function (o) {
|
||||
checkObject(o);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof obj === 'object' && obj !== null) {
|
||||
Object.keys(obj).forEach(function (k) {
|
||||
checkKey(k, obj[k]);
|
||||
checkObject(obj[k]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Serialize an object to be persisted to a one-line string
|
||||
* For serialization/deserialization, we use the native JSON parser and not eval or Function
|
||||
* That gives us less freedom but data entered in the database may come from users
|
||||
* so eval and the like are not safe
|
||||
* Accepted primitive types: Number, String, Boolean, Date, null
|
||||
* Accepted secondary types: Objects, Arrays
|
||||
*/
|
||||
function serialize (obj) {
|
||||
var res;
|
||||
|
||||
res = JSON.stringify(obj, function (k, v) {
|
||||
checkKey(k, v);
|
||||
|
||||
if (v === undefined) { return undefined; }
|
||||
if (v === null) { return null; }
|
||||
|
||||
// Hackish way of checking if object is Date (this way it works between execution contexts in node-webkit).
|
||||
// We can't use value directly because for dates it is already string in this function (date.toJSON was already called), so we use this
|
||||
if (typeof this[k].getTime === 'function') { return { $$date: this[k].getTime() }; }
|
||||
|
||||
return v;
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* From a one-line representation of an object generate by the serialize function
|
||||
* Return the object itself
|
||||
*/
|
||||
function deserialize (rawData) {
|
||||
return JSON.parse(rawData, function (k, v) {
|
||||
if (k === '$$date') { return new Date(v); }
|
||||
if (typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || v === null) { return v; }
|
||||
if (v && v.$$date) { return v.$$date; }
|
||||
|
||||
return v;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Deep copy a DB object
|
||||
*/
|
||||
function deepCopy (obj) {
|
||||
var res;
|
||||
|
||||
if ( typeof obj === 'boolean' ||
|
||||
typeof obj === 'number' ||
|
||||
typeof obj === 'string' ||
|
||||
obj === null ||
|
||||
(util.isDate(obj)) ) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (util.isArray(obj)) {
|
||||
res = [];
|
||||
obj.forEach(function (o) { res.push(o); });
|
||||
return res;
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
res = {};
|
||||
Object.keys(obj).forEach(function (k) {
|
||||
res[k] = deepCopy(obj[k]);
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
return undefined; // For now everything else is undefined. We should probably throw an error instead
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Tells if an object is a primitive type or a "real" object
|
||||
* Arrays are considered primitive
|
||||
*/
|
||||
function isPrimitiveType (obj) {
|
||||
return ( typeof obj === 'boolean' ||
|
||||
typeof obj === 'number' ||
|
||||
typeof obj === 'string' ||
|
||||
obj === null ||
|
||||
util.isDate(obj) ||
|
||||
util.isArray(obj));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Utility functions for comparing things
|
||||
* Assumes type checking was already done (a and b already have the same type)
|
||||
* compareNSB works for numbers, strings and booleans
|
||||
*/
|
||||
function compareNSB (a, b) {
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
return 0;
|
||||
}
|
||||
|
||||
function compareArrays (a, b) {
|
||||
var i, comp;
|
||||
|
||||
for (i = 0; i < Math.min(a.length, b.length); i += 1) {
|
||||
comp = compareThings(a[i], b[i]);
|
||||
|
||||
if (comp !== 0) { return comp; }
|
||||
}
|
||||
|
||||
// Common section was identical, longest one wins
|
||||
return compareNSB(a.length, b.length);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compare { things U undefined }
|
||||
* Things are defined as any native types (string, number, boolean, null, date) and objects
|
||||
* We need to compare with undefined as it will be used in indexes
|
||||
* In the case of objects and arrays, we deep-compare
|
||||
* If two objects dont have the same type, the (arbitrary) type hierarchy is: undefined, null, number, strings, boolean, dates, arrays, objects
|
||||
* Return -1 if a < b, 1 if a > b and 0 if a = b (note that equality here is NOT the same as defined in areThingsEqual!)
|
||||
*/
|
||||
function compareThings (a, b) {
|
||||
var aKeys, bKeys, comp, i;
|
||||
|
||||
// undefined
|
||||
if (a === undefined) { return b === undefined ? 0 : -1; }
|
||||
if (b === undefined) { return a === undefined ? 0 : 1; }
|
||||
|
||||
// null
|
||||
if (a === null) { return b === null ? 0 : -1; }
|
||||
if (b === null) { return a === null ? 0 : 1; }
|
||||
|
||||
// Numbers
|
||||
if (typeof a === 'number') { return typeof b === 'number' ? compareNSB(a, b) : -1; }
|
||||
if (typeof b === 'number') { return typeof a === 'number' ? compareNSB(a, b) : 1; }
|
||||
|
||||
// Strings
|
||||
if (typeof a === 'string') { return typeof b === 'string' ? compareNSB(a, b) : -1; }
|
||||
if (typeof b === 'string') { return typeof a === 'string' ? compareNSB(a, b) : 1; }
|
||||
|
||||
// Booleans
|
||||
if (typeof a === 'boolean') { return typeof b === 'boolean' ? compareNSB(a, b) : -1; }
|
||||
if (typeof b === 'boolean') { return typeof a === 'boolean' ? compareNSB(a, b) : 1; }
|
||||
|
||||
// Dates
|
||||
if (util.isDate(a)) { return util.isDate(b) ? compareNSB(a.getTime(), b.getTime()) : -1; }
|
||||
if (util.isDate(b)) { return util.isDate(a) ? compareNSB(a.getTime(), b.getTime()) : 1; }
|
||||
|
||||
// Arrays (first element is most significant and so on)
|
||||
if (util.isArray(a)) { return util.isArray(b) ? compareArrays(a, b) : -1; }
|
||||
if (util.isArray(b)) { return util.isArray(a) ? compareArrays(a, b) : 1; }
|
||||
|
||||
// Objects
|
||||
aKeys = Object.keys(a).sort();
|
||||
bKeys = Object.keys(b).sort();
|
||||
|
||||
for (i = 0; i < Math.min(aKeys.length, bKeys.length); i += 1) {
|
||||
comp = compareThings(a[aKeys[i]], b[bKeys[i]]);
|
||||
|
||||
if (comp !== 0) { return comp; }
|
||||
}
|
||||
|
||||
return compareNSB(aKeys.length, bKeys.length);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============================================================
|
||||
// Updating documents
|
||||
// ==============================================================
|
||||
|
||||
/**
|
||||
* The signature of modifier functions is as follows
|
||||
* Their structure is always the same: recursively follow the dot notation while creating
|
||||
* the nested documents if needed, then apply the "last step modifier"
|
||||
* @param {Object} obj The model to modify
|
||||
* @param {String} field Can contain dots, in that case that means we will set a subfield recursively
|
||||
* @param {Model} value
|
||||
*/
|
||||
|
||||
/**
|
||||
* Set a field to a new value
|
||||
*/
|
||||
lastStepModifierFunctions.$set = function (obj, field, value) {
|
||||
obj[field] = value;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Unset a field
|
||||
*/
|
||||
lastStepModifierFunctions.$unset = function (obj, field, value) {
|
||||
delete obj[field];
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Push an element to the end of an array field
|
||||
*/
|
||||
lastStepModifierFunctions.$push = function (obj, field, value) {
|
||||
// Create the array if it doesn't exist
|
||||
if (!obj.hasOwnProperty(field)) { obj[field] = []; }
|
||||
|
||||
if (!util.isArray(obj[field])) { throw "Can't $push an element on non-array values"; }
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$each) {
|
||||
if (Object.keys(value).length > 1) { throw "Can't use another field in conjunction with $each"; }
|
||||
if (!util.isArray(value.$each)) { throw "$each requires an array value"; }
|
||||
|
||||
value.$each.forEach(function (v) {
|
||||
obj[field].push(v);
|
||||
});
|
||||
} else {
|
||||
obj[field].push(value);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Add an element to an array field only if it is not already in it
|
||||
* No modification if the element is already in the array
|
||||
* Note that it doesn't check whether the original array contains duplicates
|
||||
*/
|
||||
lastStepModifierFunctions.$addToSet = function (obj, field, value) {
|
||||
var addToSet = true;
|
||||
|
||||
// Create the array if it doesn't exist
|
||||
if (!obj.hasOwnProperty(field)) { obj[field] = []; }
|
||||
|
||||
if (!util.isArray(obj[field])) { throw "Can't $addToSet an element on non-array values"; }
|
||||
|
||||
if (value !== null && typeof value === 'object' && value.$each) {
|
||||
if (Object.keys(value).length > 1) { throw "Can't use another field in conjunction with $each"; }
|
||||
if (!util.isArray(value.$each)) { throw "$each requires an array value"; }
|
||||
|
||||
value.$each.forEach(function (v) {
|
||||
lastStepModifierFunctions.$addToSet(obj, field, v);
|
||||
});
|
||||
} else {
|
||||
obj[field].forEach(function (v) {
|
||||
if (compareThings(v, value) === 0) { addToSet = false; }
|
||||
});
|
||||
if (addToSet) { obj[field].push(value); }
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove the first or last element of an array
|
||||
*/
|
||||
lastStepModifierFunctions.$pop = function (obj, field, value) {
|
||||
if (!util.isArray(obj[field])) { throw "Can't $pop an element from non-array values"; }
|
||||
if (typeof value !== 'number') { throw value + " isn't an integer, can't use it with $pop"; }
|
||||
if (value === 0) { return; }
|
||||
|
||||
if (value > 0) {
|
||||
obj[field] = obj[field].slice(0, obj[field].length - 1);
|
||||
} else {
|
||||
obj[field] = obj[field].slice(1);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Removes all instances of a value from an existing array
|
||||
*/
|
||||
lastStepModifierFunctions.$pull = function (obj, field, value) {
|
||||
var arr, i;
|
||||
|
||||
if (!util.isArray(obj[field])) { throw "Can't $pull an element from non-array values"; }
|
||||
|
||||
arr = obj[field];
|
||||
for (i = arr.length - 1; i >= 0; i -= 1) {
|
||||
if (match(arr[i], value)) {
|
||||
arr.splice(i, 1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Increment a numeric field's value
|
||||
*/
|
||||
lastStepModifierFunctions.$inc = function (obj, field, value) {
|
||||
if (typeof value !== 'number') { throw value + " must be a number"; }
|
||||
|
||||
if (typeof obj[field] !== 'number') {
|
||||
if (!_.has(obj, field)) {
|
||||
obj[field] = value;
|
||||
} else {
|
||||
throw "Don't use the $inc modifier on non-number fields";
|
||||
}
|
||||
} else {
|
||||
obj[field] += value;
|
||||
}
|
||||
};
|
||||
|
||||
// Given its name, create the complete modifier function
|
||||
function createModifierFunction (modifier) {
|
||||
return function (obj, field, value) {
|
||||
var fieldParts = typeof field === 'string' ? field.split('.') : field;
|
||||
|
||||
if (fieldParts.length === 1) {
|
||||
lastStepModifierFunctions[modifier](obj, field, value);
|
||||
} else {
|
||||
obj[fieldParts[0]] = obj[fieldParts[0]] || {};
|
||||
modifierFunctions[modifier](obj[fieldParts[0]], fieldParts.slice(1), value);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Actually create all modifier functions
|
||||
Object.keys(lastStepModifierFunctions).forEach(function (modifier) {
|
||||
modifierFunctions[modifier] = createModifierFunction(modifier);
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Modify a DB object according to an update query
|
||||
* For now the updateQuery only replaces the object
|
||||
*/
|
||||
function modify (obj, updateQuery) {
|
||||
var keys = Object.keys(updateQuery)
|
||||
, firstChars = _.map(keys, function (item) { return item[0]; })
|
||||
, dollarFirstChars = _.filter(firstChars, function (c) { return c === '$'; })
|
||||
, newDoc, modifiers
|
||||
;
|
||||
|
||||
if (keys.indexOf('_id') !== -1 && updateQuery._id !== obj._id) { throw "You cannot change a document's _id"; }
|
||||
|
||||
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) {
|
||||
throw "You cannot mix modifiers and normal fields";
|
||||
}
|
||||
|
||||
if (dollarFirstChars.length === 0) {
|
||||
// Simply replace the object with the update query contents
|
||||
newDoc = deepCopy(updateQuery);
|
||||
newDoc._id = obj._id;
|
||||
} else {
|
||||
// Apply modifiers
|
||||
modifiers = _.uniq(keys);
|
||||
newDoc = deepCopy(obj);
|
||||
modifiers.forEach(function (m) {
|
||||
var keys;
|
||||
|
||||
if (!modifierFunctions[m]) { throw "Unknown modifier " + m; }
|
||||
|
||||
try {
|
||||
keys = Object.keys(updateQuery[m]);
|
||||
} catch (e) {
|
||||
throw "Modifier " + m + "'s argument must be an object";
|
||||
}
|
||||
|
||||
keys.forEach(function (k) {
|
||||
modifierFunctions[m](newDoc, k, updateQuery[m][k]);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Check result is valid and return it
|
||||
checkObject(newDoc);
|
||||
if (obj._id !== newDoc._id) { throw "You can't change a document's _id"; }
|
||||
return newDoc;
|
||||
};
|
||||
|
||||
|
||||
// ==============================================================
|
||||
// Finding documents
|
||||
// ==============================================================
|
||||
|
||||
/**
|
||||
* Get a value from object with dot notation
|
||||
* @param {Object} obj
|
||||
* @param {String} field
|
||||
*/
|
||||
function getDotValue (obj, field) {
|
||||
var fieldParts = typeof field === 'string' ? field.split('.') : field
|
||||
, i, objs;
|
||||
|
||||
if (!obj) { return undefined; } // field cannot be empty so that means we should return undefined so that nothing can match
|
||||
|
||||
if (fieldParts.length === 0) { return obj; }
|
||||
|
||||
if (fieldParts.length === 1) { return obj[fieldParts[0]]; }
|
||||
|
||||
if (util.isArray(obj[fieldParts[0]])) {
|
||||
// If the next field is an integer, return only this item of the array
|
||||
i = parseInt(fieldParts[1], 10);
|
||||
if (typeof i === 'number' && !isNaN(i)) {
|
||||
return getDotValue(obj[fieldParts[0]][i], fieldParts.slice(2))
|
||||
}
|
||||
|
||||
// Return the array of values
|
||||
objs = new Array();
|
||||
for (i = 0; i < obj[fieldParts[0]].length; i += 1) {
|
||||
objs.push(getDotValue(obj[fieldParts[0]][i], fieldParts.slice(1)));
|
||||
}
|
||||
return objs;
|
||||
} else {
|
||||
return getDotValue(obj[fieldParts[0]], fieldParts.slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check whether 'things' are equal
|
||||
* Things are defined as any native types (string, number, boolean, null, date) and objects
|
||||
* In the case of object, we check deep equality
|
||||
* Returns true if they are, false otherwise
|
||||
*/
|
||||
function areThingsEqual (a, b) {
|
||||
var aKeys , bKeys , i;
|
||||
|
||||
// Strings, booleans, numbers, null
|
||||
if (a === null || typeof a === 'string' || typeof a === 'boolean' || typeof a === 'number' ||
|
||||
b === null || typeof b === 'string' || typeof b === 'boolean' || typeof b === 'number') { return a === b; }
|
||||
|
||||
// Dates
|
||||
if (util.isDate(a) || util.isDate(b)) { return util.isDate(a) && util.isDate(b) && a.getTime() === b.getTime(); }
|
||||
|
||||
// Arrays (no match since arrays are used as a $in)
|
||||
// undefined (no match since they mean field doesn't exist and can't be serialized)
|
||||
if (util.isArray(a) || util.isArray(b) || a === undefined || b === undefined) { return false; }
|
||||
|
||||
// General objects (check for deep equality)
|
||||
// a and b should be objects at this point
|
||||
try {
|
||||
aKeys = Object.keys(a);
|
||||
bKeys = Object.keys(b);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (aKeys.length !== bKeys.length) { return false; }
|
||||
for (i = 0; i < aKeys.length; i += 1) {
|
||||
if (bKeys.indexOf(aKeys[i]) === -1) { return false; }
|
||||
if (!areThingsEqual(a[aKeys[i]], b[aKeys[i]])) { return false; }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check that two values are comparable
|
||||
*/
|
||||
function areComparable (a, b) {
|
||||
if (typeof a !== 'string' && typeof a !== 'number' && !util.isDate(a) &&
|
||||
typeof b !== 'string' && typeof b !== 'number' && !util.isDate(b)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof a !== typeof b) { return false; }
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Arithmetic and comparison operators
|
||||
* @param {Native value} a Value in the object
|
||||
* @param {Native value} b Value in the query
|
||||
*/
|
||||
comparisonFunctions.$lt = function (a, b) {
|
||||
return areComparable(a, b) && a < b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$lte = function (a, b) {
|
||||
return areComparable(a, b) && a <= b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$gt = function (a, b) {
|
||||
return areComparable(a, b) && a > b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$gte = function (a, b) {
|
||||
return areComparable(a, b) && a >= b;
|
||||
};
|
||||
|
||||
comparisonFunctions.$ne = function (a, b) {
|
||||
if (a === undefined) { return true; }
|
||||
return !areThingsEqual(a, b);
|
||||
};
|
||||
|
||||
comparisonFunctions.$in = function (a, b) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(b)) { throw "$in operator called with a non-array"; }
|
||||
|
||||
for (i = 0; i < b.length; i += 1) {
|
||||
if (areThingsEqual(a, b[i])) { return true; }
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
comparisonFunctions.$nin = function (a, b) {
|
||||
if (!util.isArray(b)) { throw "$nin operator called with a non-array"; }
|
||||
|
||||
return !comparisonFunctions.$in(a, b);
|
||||
};
|
||||
|
||||
comparisonFunctions.$regex = function (a, b) {
|
||||
if (!util.isRegExp(b)) { throw "$regex operator called with non regular expression"; }
|
||||
|
||||
if (typeof a !== 'string') {
|
||||
return false
|
||||
} else {
|
||||
return b.test(a);
|
||||
}
|
||||
};
|
||||
|
||||
comparisonFunctions.$exists = function (value, exists) {
|
||||
if (exists || exists === '') { // This will be true for all values of exists except false, null, undefined and 0
|
||||
exists = true; // That's strange behaviour (we should only use true/false) but that's the way Mongo does it...
|
||||
} else {
|
||||
exists = false;
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
return !exists
|
||||
} else {
|
||||
return exists;
|
||||
}
|
||||
};
|
||||
|
||||
// Specific to arrays
|
||||
comparisonFunctions.$size = function (obj, value) {
|
||||
if (!util.isArray(obj)) { return false; }
|
||||
if (value % 1 !== 0) { throw "$size operator called without an integer"; }
|
||||
|
||||
return (obj.length == value);
|
||||
};
|
||||
arrayComparisonFunctions.$size = true;
|
||||
|
||||
|
||||
/**
|
||||
* Match any of the subqueries
|
||||
* @param {Model} obj
|
||||
* @param {Array of Queries} query
|
||||
*/
|
||||
logicalOperators.$or = function (obj, query) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(query)) { throw "$or operator used without an array"; }
|
||||
|
||||
for (i = 0; i < query.length; i += 1) {
|
||||
if (match(obj, query[i])) { return true; }
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Match all of the subqueries
|
||||
* @param {Model} obj
|
||||
* @param {Array of Queries} query
|
||||
*/
|
||||
logicalOperators.$and = function (obj, query) {
|
||||
var i;
|
||||
|
||||
if (!util.isArray(query)) { throw "$and operator used without an array"; }
|
||||
|
||||
for (i = 0; i < query.length; i += 1) {
|
||||
if (!match(obj, query[i])) { return false; }
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Inverted match of the query
|
||||
* @param {Model} obj
|
||||
* @param {Query} query
|
||||
*/
|
||||
logicalOperators.$not = function (obj, query) {
|
||||
return !match(obj, query);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Use a function to match
|
||||
* @param {Model} obj
|
||||
* @param {Query} query
|
||||
*/
|
||||
logicalOperators.$where = function (obj, fn) {
|
||||
var result;
|
||||
|
||||
if (!_.isFunction(fn)) { throw "$where operator used without a function"; }
|
||||
|
||||
result = fn.call(obj);
|
||||
if (!_.isBoolean(result)) { throw "$where function must return boolean"; }
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Tell if a given document matches a query
|
||||
* @param {Object} obj Document to check
|
||||
* @param {Object} query
|
||||
*/
|
||||
function match (obj, query) {
|
||||
var queryKeys, queryKey, queryValue, i;
|
||||
|
||||
// Primitive query against a primitive type
|
||||
// This is a bit of a hack since we construct an object with an arbitrary key only to dereference it later
|
||||
// But I don't have time for a cleaner implementation now
|
||||
if (isPrimitiveType(obj) || isPrimitiveType(query)) {
|
||||
return matchQueryPart({ needAKey: obj }, 'needAKey', query);
|
||||
}
|
||||
|
||||
// Normal query
|
||||
queryKeys = Object.keys(query);
|
||||
for (i = 0; i < queryKeys.length; i += 1) {
|
||||
queryKey = queryKeys[i];
|
||||
queryValue = query[queryKey];
|
||||
|
||||
if (queryKey[0] === '$') {
|
||||
if (!logicalOperators[queryKey]) { throw "Unknown logical operator " + queryKey; }
|
||||
if (!logicalOperators[queryKey](obj, queryValue)) { return false; }
|
||||
} else {
|
||||
if (!matchQueryPart(obj, queryKey, queryValue)) { return false; }
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Match an object against a specific { key: value } part of a query
|
||||
* if the treatObjAsValue flag is set, don't try to match every part separately, but the array as a whole
|
||||
*/
|
||||
function matchQueryPart (obj, queryKey, queryValue, treatObjAsValue) {
|
||||
var objValue = getDotValue(obj, queryKey)
|
||||
, i, keys, firstChars, dollarFirstChars;
|
||||
|
||||
// Check if the value is an array if we don't force a treatment as value
|
||||
if (util.isArray(objValue) && !treatObjAsValue) {
|
||||
// Check if we are using an array-specific comparison function
|
||||
if (queryValue !== null && typeof queryValue === 'object' && !util.isRegExp(queryValue)) {
|
||||
keys = Object.keys(queryValue);
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
if (arrayComparisonFunctions[keys[i]]) { return matchQueryPart(obj, queryKey, queryValue, true); }
|
||||
}
|
||||
}
|
||||
|
||||
// If not, treat it as an array of { obj, query } where there needs to be at least one match
|
||||
for (i = 0; i < objValue.length; i += 1) {
|
||||
if (matchQueryPart({ k: objValue[i] }, 'k', queryValue)) { return true; } // k here could be any string
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// queryValue is an actual object. Determine whether it contains comparison operators
|
||||
// or only normal fields. Mixed objects are not allowed
|
||||
if (queryValue !== null && typeof queryValue === 'object' && !util.isRegExp(queryValue)) {
|
||||
keys = Object.keys(queryValue);
|
||||
firstChars = _.map(keys, function (item) { return item[0]; });
|
||||
dollarFirstChars = _.filter(firstChars, function (c) { return c === '$'; });
|
||||
|
||||
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) {
|
||||
throw "You cannot mix operators and normal fields";
|
||||
}
|
||||
|
||||
// queryValue is an object of this form: { $comparisonOperator1: value1, ... }
|
||||
if (dollarFirstChars.length > 0) {
|
||||
for (i = 0; i < keys.length; i += 1) {
|
||||
if (!comparisonFunctions[keys[i]]) { throw "Unknown comparison function " + keys[i]; }
|
||||
|
||||
if (!comparisonFunctions[keys[i]](objValue, queryValue[keys[i]])) { return false; }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Using regular expressions with basic querying
|
||||
if (util.isRegExp(queryValue)) { return comparisonFunctions.$regex(objValue, queryValue); }
|
||||
|
||||
// queryValue is either a native value or a normal object
|
||||
// Basic matching is possible
|
||||
if (!areThingsEqual(objValue, queryValue)) { return false; }
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports.serialize = serialize;
|
||||
module.exports.deserialize = deserialize;
|
||||
module.exports.deepCopy = deepCopy;
|
||||
module.exports.checkObject = checkObject;
|
||||
module.exports.isPrimitiveType = isPrimitiveType;
|
||||
module.exports.modify = modify;
|
||||
module.exports.getDotValue = getDotValue;
|
||||
module.exports.match = match;
|
||||
module.exports.areThingsEqual = areThingsEqual;
|
||||
module.exports.compareThings = compareThings;
|
@ -0,0 +1,335 @@
|
||||
/**
|
||||
* Handle every persistence-related task
|
||||
* The interface Datastore expects to be implemented is
|
||||
* * Persistence.loadDatabase(callback) and callback has signature err
|
||||
* * Persistence.persistNewState(newDocs, callback) where newDocs is an array of documents and callback has signature err
|
||||
*/
|
||||
|
||||
var storage = require('./storage')
|
||||
, path = require('path')
|
||||
, model = require('./model')
|
||||
, async = require('async')
|
||||
, customUtils = require('./customUtils')
|
||||
, Index = require('./indexes')
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Create a new Persistence object for database options.db
|
||||
* @param {Datastore} options.db
|
||||
* @param {Boolean} options.nodeWebkitAppName Optional, specify the name of your NW app if you want options.filename to be relative to the directory where
|
||||
* Node Webkit stores application data such as cookies and local storage (the best place to store data in my opinion)
|
||||
*/
|
||||
function Persistence (options) {
|
||||
this.db = options.db;
|
||||
this.inMemoryOnly = this.db.inMemoryOnly;
|
||||
this.filename = this.db.filename;
|
||||
|
||||
if (!this.inMemoryOnly && this.filename) {
|
||||
if (this.filename.charAt(this.filename.length - 1) === '~') {
|
||||
throw "The datafile name can't end with a ~, which is reserved for automatic backup files";
|
||||
} else {
|
||||
this.tempFilename = this.filename + '~';
|
||||
this.oldFilename = this.filename + '~~';
|
||||
}
|
||||
}
|
||||
|
||||
// For NW apps, store data in the same directory where NW stores application data
|
||||
if (this.filename && options.nodeWebkitAppName) {
|
||||
console.log("==================================================================");
|
||||
console.log("WARNING: The nodeWebkitAppName option is deprecated");
|
||||
console.log("To get the path to the directory where Node Webkit stores the data");
|
||||
console.log("for your app, use the internal nw.gui module like this");
|
||||
console.log("require('nw.gui').App.dataPath");
|
||||
console.log("See https://github.com/rogerwang/node-webkit/issues/500");
|
||||
console.log("==================================================================");
|
||||
this.filename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.filename);
|
||||
this.tempFilename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.tempFilename);
|
||||
this.oldFilename = Persistence.getNWAppFilename(options.nodeWebkitAppName, this.oldFilename);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check if a directory exists and create it on the fly if it is not the case
|
||||
* cb is optional, signature: err
|
||||
*/
|
||||
Persistence.ensureDirectoryExists = function (dir, cb) {
|
||||
var callback = cb || function () {}
|
||||
;
|
||||
|
||||
storage.mkdirp(dir, function (err) { return callback(err); });
|
||||
};
|
||||
|
||||
|
||||
Persistence.ensureFileDoesntExist = function (file, callback) {
|
||||
storage.exists(file, function (exists) {
|
||||
if (!exists) { return callback(null); }
|
||||
|
||||
storage.unlink(file, function (err) { return callback(err); });
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return the path the datafile if the given filename is relative to the directory where Node Webkit stores
|
||||
* data for this application. Probably the best place to store data
|
||||
*/
|
||||
Persistence.getNWAppFilename = function (appName, relativeFilename) {
|
||||
var home;
|
||||
|
||||
switch (process.platform) {
|
||||
case 'win32':
|
||||
case 'win64':
|
||||
home = process.env.LOCALAPPDATA || process.env.APPDATA;
|
||||
if (!home) { throw "Couldn't find the base application data folder"; }
|
||||
home = path.join(home, appName);
|
||||
break;
|
||||
case 'darwin':
|
||||
home = process.env.HOME;
|
||||
if (!home) { throw "Couldn't find the base application data directory"; }
|
||||
home = path.join(home, 'Library', 'Application Support', appName);
|
||||
break;
|
||||
case 'linux':
|
||||
home = process.env.HOME;
|
||||
if (!home) { throw "Couldn't find the base application data directory"; }
|
||||
home = path.join(home, '.config', appName);
|
||||
break;
|
||||
default:
|
||||
throw "Can't use the Node Webkit relative path for platform " + process.platform;
|
||||
break;
|
||||
}
|
||||
|
||||
return path.join(home, 'nedb-data', relativeFilename);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Persist cached database
|
||||
* This serves as a compaction function since the cache always contains only the number of documents in the collection
|
||||
* while the data file is append-only so it may grow larger
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Persistence.prototype.persistCachedDatabase = function (cb) {
|
||||
var callback = cb || function () {}
|
||||
, toPersist = ''
|
||||
, self = this
|
||||
;
|
||||
|
||||
if (this.inMemoryOnly) { return callback(null); }
|
||||
|
||||
this.db.getAllData().forEach(function (doc) {
|
||||
toPersist += model.serialize(doc) + '\n';
|
||||
});
|
||||
Object.keys(this.db.indexes).forEach(function (fieldName) {
|
||||
if (fieldName != "_id") { // The special _id index is managed by datastore.js, the others need to be persisted
|
||||
toPersist += model.serialize({ $$indexCreated: { fieldName: fieldName, unique: self.db.indexes[fieldName].unique, sparse: self.db.indexes[fieldName].sparse }}) + '\n';
|
||||
}
|
||||
});
|
||||
|
||||
async.waterfall([
|
||||
async.apply(Persistence.ensureFileDoesntExist, self.tempFilename)
|
||||
, async.apply(Persistence.ensureFileDoesntExist, self.oldFilename)
|
||||
, function (cb) {
|
||||
storage.exists(self.filename, function (exists) {
|
||||
if (exists) {
|
||||
storage.rename(self.filename, self.oldFilename, function (err) { return cb(err); });
|
||||
} else {
|
||||
return cb();
|
||||
}
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
storage.writeFile(self.tempFilename, toPersist, function (err) { return cb(err); });
|
||||
}
|
||||
, function (cb) {
|
||||
storage.rename(self.tempFilename, self.filename, function (err) { return cb(err); });
|
||||
}
|
||||
, async.apply(Persistence.ensureFileDoesntExist, self.oldFilename)
|
||||
], function (err) { if (err) { return callback(err); } else { return callback(null); } })
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Queue a rewrite of the datafile
|
||||
*/
|
||||
Persistence.prototype.compactDatafile = function () {
|
||||
this.db.executor.push({ this: this, fn: this.persistCachedDatabase, arguments: [] });
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Set automatic compaction every interval ms
|
||||
* @param {Number} interval in milliseconds, with an enforced minimum of 5 seconds
|
||||
*/
|
||||
Persistence.prototype.setAutocompactionInterval = function (interval) {
|
||||
var self = this
|
||||
, minInterval = 5000
|
||||
, realInterval = Math.max(interval || 0, minInterval)
|
||||
;
|
||||
|
||||
this.stopAutocompaction();
|
||||
|
||||
this.autocompactionIntervalId = setInterval(function () {
|
||||
self.compactDatafile();
|
||||
}, realInterval);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Stop autocompaction (do nothing if autocompaction was not running)
|
||||
*/
|
||||
Persistence.prototype.stopAutocompaction = function () {
|
||||
if (this.autocompactionIntervalId) { clearInterval(this.autocompactionIntervalId); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Persist new state for the given newDocs (can be insertion, update or removal)
|
||||
* Use an append-only format
|
||||
* @param {Array} newDocs Can be empty if no doc was updated/removed
|
||||
* @param {Function} cb Optional, signature: err
|
||||
*/
|
||||
Persistence.prototype.persistNewState = function (newDocs, cb) {
|
||||
var self = this
|
||||
, toPersist = ''
|
||||
, callback = cb || function () {}
|
||||
;
|
||||
|
||||
// In-memory only datastore
|
||||
if (self.inMemoryOnly) { return callback(null); }
|
||||
|
||||
newDocs.forEach(function (doc) {
|
||||
toPersist += model.serialize(doc) + '\n';
|
||||
});
|
||||
|
||||
if (toPersist.length === 0) { return callback(null); }
|
||||
|
||||
storage.appendFile(self.filename, toPersist, 'utf8', function (err) {
|
||||
return callback(err);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* From a database's raw data, return the corresponding
|
||||
* machine understandable collection
|
||||
*/
|
||||
Persistence.treatRawData = function (rawData) {
|
||||
var data = rawData.split('\n')
|
||||
, dataById = {}
|
||||
, tdata = []
|
||||
, i
|
||||
, indexes = {}
|
||||
;
|
||||
|
||||
for (i = 0; i < data.length; i += 1) {
|
||||
var doc;
|
||||
|
||||
try {
|
||||
doc = model.deserialize(data[i]);
|
||||
if (doc._id) {
|
||||
if (doc.$$deleted === true) {
|
||||
delete dataById[doc._id];
|
||||
} else {
|
||||
dataById[doc._id] = doc;
|
||||
}
|
||||
} else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != undefined) {
|
||||
indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated;
|
||||
} else if (typeof doc.$$indexRemoved === "string") {
|
||||
delete indexes[doc.$$indexRemoved];
|
||||
}
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
|
||||
Object.keys(dataById).forEach(function (k) {
|
||||
tdata.push(dataById[k]);
|
||||
});
|
||||
|
||||
return { data: tdata, indexes: indexes };
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Ensure that this.filename contains the most up-to-date version of the data
|
||||
* Even if a loadDatabase crashed before
|
||||
*/
|
||||
Persistence.prototype.ensureDatafileIntegrity = function (callback) {
|
||||
var self = this ;
|
||||
|
||||
storage.exists(self.filename, function (filenameExists) {
|
||||
// Write was successful
|
||||
if (filenameExists) { return callback(null); }
|
||||
|
||||
storage.exists(self.oldFilename, function (oldFilenameExists) {
|
||||
// New database
|
||||
if (!oldFilenameExists) {
|
||||
return storage.writeFile(self.filename, '', 'utf8', function (err) { callback(err); });
|
||||
}
|
||||
|
||||
// Write failed, use old version
|
||||
storage.rename(self.oldFilename, self.filename, function (err) { return callback(err); });
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Load the database
|
||||
* 1) Create all indexes
|
||||
* 2) Insert all data
|
||||
* 3) Compact the database
|
||||
* This means pulling data out of the data file or creating it if it doesn't exist
|
||||
* Also, all data is persisted right away, which has the effect of compacting the database file
|
||||
* This operation is very quick at startup for a big collection (60ms for ~10k docs)
|
||||
* @param {Function} cb Optional callback, signature: err
|
||||
*/
|
||||
Persistence.prototype.loadDatabase = function (cb) {
|
||||
var callback = cb || function () {}
|
||||
, self = this
|
||||
;
|
||||
|
||||
self.db.resetIndexes();
|
||||
|
||||
// In-memory only datastore
|
||||
if (self.inMemoryOnly) { return callback(null); }
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(self.filename), function (err) {
|
||||
self.ensureDatafileIntegrity(function (exists) {
|
||||
storage.readFile(self.filename, 'utf8', function (err, rawData) {
|
||||
|
||||
if (err) { return cb(err); }
|
||||
var treatedData = Persistence.treatRawData(rawData);
|
||||
|
||||
// Recreate all indexes in the datafile
|
||||
Object.keys(treatedData.indexes).forEach(function (key) {
|
||||
self.db.indexes[key] = new Index(treatedData.indexes[key]);
|
||||
});
|
||||
|
||||
// Fill cached database (i.e. all indexes) with data
|
||||
try {
|
||||
self.db.resetIndexes(treatedData.data);
|
||||
} catch (e) {
|
||||
self.db.resetIndexes(); // Rollback any index which didn't fail
|
||||
return cb(e);
|
||||
}
|
||||
|
||||
self.db.persistence.persistCachedDatabase(cb);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
], function (err) {
|
||||
if (err) { return callback(err); }
|
||||
|
||||
self.db.executor.processBuffer();
|
||||
return callback(null);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = Persistence;
|
@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Way data is stored for this database
|
||||
* For a Node.js/Node Webkit database it's the file system
|
||||
* For a browser-side database it's localStorage when supported
|
||||
*
|
||||
* This version is the Node.js/Node Webkit version
|
||||
*/
|
||||
|
||||
var fs = require('fs')
|
||||
, mkdirp = require('mkdirp')
|
||||
;
|
||||
|
||||
|
||||
module.exports = fs;
|
||||
module.exports.mkdirp = mkdirp;
|
@ -0,0 +1,19 @@
|
||||
Copyright (c) 2010 Caolan McMahon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "async",
|
||||
"repo": "caolan/async",
|
||||
"description": "Higher-order functions and common patterns for asynchronous code",
|
||||
"version": "0.1.23",
|
||||
"keywords": [],
|
||||
"dependencies": {},
|
||||
"development": {},
|
||||
"main": "lib/async.js",
|
||||
"scripts": [ "lib/async.js" ]
|
||||
}
|
@ -0,0 +1,958 @@
|
||||
/*global setImmediate: false, setTimeout: false, console: false */
|
||||
(function () {
|
||||
|
||||
var async = {};
|
||||
|
||||
// global on the server, window in the browser
|
||||
var root, previous_async;
|
||||
|
||||
root = this;
|
||||
if (root != null) {
|
||||
previous_async = root.async;
|
||||
}
|
||||
|
||||
async.noConflict = function () {
|
||||
root.async = previous_async;
|
||||
return async;
|
||||
};
|
||||
|
||||
function only_once(fn) {
|
||||
var called = false;
|
||||
return function() {
|
||||
if (called) throw new Error("Callback was already called.");
|
||||
called = true;
|
||||
fn.apply(root, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
//// cross-browser compatiblity functions ////
|
||||
|
||||
var _each = function (arr, iterator) {
|
||||
if (arr.forEach) {
|
||||
return arr.forEach(iterator);
|
||||
}
|
||||
for (var i = 0; i < arr.length; i += 1) {
|
||||
iterator(arr[i], i, arr);
|
||||
}
|
||||
};
|
||||
|
||||
var _map = function (arr, iterator) {
|
||||
if (arr.map) {
|
||||
return arr.map(iterator);
|
||||
}
|
||||
var results = [];
|
||||
_each(arr, function (x, i, a) {
|
||||
results.push(iterator(x, i, a));
|
||||
});
|
||||
return results;
|
||||
};
|
||||
|
||||
var _reduce = function (arr, iterator, memo) {
|
||||
if (arr.reduce) {
|
||||
return arr.reduce(iterator, memo);
|
||||
}
|
||||
_each(arr, function (x, i, a) {
|
||||
memo = iterator(memo, x, i, a);
|
||||
});
|
||||
return memo;
|
||||
};
|
||||
|
||||
var _keys = function (obj) {
|
||||
if (Object.keys) {
|
||||
return Object.keys(obj);
|
||||
}
|
||||
var keys = [];
|
||||
for (var k in obj) {
|
||||
if (obj.hasOwnProperty(k)) {
|
||||
keys.push(k);
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
|
||||
//// exported async module functions ////
|
||||
|
||||
//// nextTick implementation with browser-compatible fallback ////
|
||||
if (typeof process === 'undefined' || !(process.nextTick)) {
|
||||
if (typeof setImmediate === 'function') {
|
||||
async.nextTick = function (fn) {
|
||||
// not a direct alias for IE10 compatibility
|
||||
setImmediate(fn);
|
||||
};
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
else {
|
||||
async.nextTick = function (fn) {
|
||||
setTimeout(fn, 0);
|
||||
};
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
}
|
||||
else {
|
||||
async.nextTick = process.nextTick;
|
||||
if (typeof setImmediate !== 'undefined') {
|
||||
async.setImmediate = function (fn) {
|
||||
// not a direct alias for IE10 compatibility
|
||||
setImmediate(fn);
|
||||
};
|
||||
}
|
||||
else {
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
}
|
||||
|
||||
async.each = function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
_each(arr, function (x) {
|
||||
iterator(x, only_once(function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
if (completed >= arr.length) {
|
||||
callback(null);
|
||||
}
|
||||
}
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.forEach = async.each;
|
||||
|
||||
async.eachSeries = function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
var iterate = function () {
|
||||
iterator(arr[completed], function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
if (completed >= arr.length) {
|
||||
callback(null);
|
||||
}
|
||||
else {
|
||||
iterate();
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
iterate();
|
||||
};
|
||||
async.forEachSeries = async.eachSeries;
|
||||
|
||||
async.eachLimit = function (arr, limit, iterator, callback) {
|
||||
var fn = _eachLimit(limit);
|
||||
fn.apply(null, [arr, iterator, callback]);
|
||||
};
|
||||
async.forEachLimit = async.eachLimit;
|
||||
|
||||
var _eachLimit = function (limit) {
|
||||
|
||||
return function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length || limit <= 0) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
var started = 0;
|
||||
var running = 0;
|
||||
|
||||
(function replenish () {
|
||||
if (completed >= arr.length) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
while (running < limit && started < arr.length) {
|
||||
started += 1;
|
||||
running += 1;
|
||||
iterator(arr[started - 1], function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
running -= 1;
|
||||
if (completed >= arr.length) {
|
||||
callback();
|
||||
}
|
||||
else {
|
||||
replenish();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})();
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var doParallel = function (fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [async.each].concat(args));
|
||||
};
|
||||
};
|
||||
var doParallelLimit = function(limit, fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [_eachLimit(limit)].concat(args));
|
||||
};
|
||||
};
|
||||
var doSeries = function (fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [async.eachSeries].concat(args));
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var _asyncMap = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (err, v) {
|
||||
results[x.index] = v;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
};
|
||||
async.map = doParallel(_asyncMap);
|
||||
async.mapSeries = doSeries(_asyncMap);
|
||||
async.mapLimit = function (arr, limit, iterator, callback) {
|
||||
return _mapLimit(limit)(arr, iterator, callback);
|
||||
};
|
||||
|
||||
var _mapLimit = function(limit) {
|
||||
return doParallelLimit(limit, _asyncMap);
|
||||
};
|
||||
|
||||
// reduce only has a series version, as doing reduce in parallel won't
|
||||
// work in many situations.
|
||||
async.reduce = function (arr, memo, iterator, callback) {
|
||||
async.eachSeries(arr, function (x, callback) {
|
||||
iterator(memo, x, function (err, v) {
|
||||
memo = v;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, memo);
|
||||
});
|
||||
};
|
||||
// inject alias
|
||||
async.inject = async.reduce;
|
||||
// foldl alias
|
||||
async.foldl = async.reduce;
|
||||
|
||||
async.reduceRight = function (arr, memo, iterator, callback) {
|
||||
var reversed = _map(arr, function (x) {
|
||||
return x;
|
||||
}).reverse();
|
||||
async.reduce(reversed, memo, iterator, callback);
|
||||
};
|
||||
// foldr alias
|
||||
async.foldr = async.reduceRight;
|
||||
|
||||
var _filter = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (v) {
|
||||
if (v) {
|
||||
results.push(x);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
callback(_map(results.sort(function (a, b) {
|
||||
return a.index - b.index;
|
||||
}), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.filter = doParallel(_filter);
|
||||
async.filterSeries = doSeries(_filter);
|
||||
// select alias
|
||||
async.select = async.filter;
|
||||
async.selectSeries = async.filterSeries;
|
||||
|
||||
var _reject = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (v) {
|
||||
if (!v) {
|
||||
results.push(x);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
callback(_map(results.sort(function (a, b) {
|
||||
return a.index - b.index;
|
||||
}), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.reject = doParallel(_reject);
|
||||
async.rejectSeries = doSeries(_reject);
|
||||
|
||||
var _detect = function (eachfn, arr, iterator, main_callback) {
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x, function (result) {
|
||||
if (result) {
|
||||
main_callback(x);
|
||||
main_callback = function () {};
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback();
|
||||
});
|
||||
};
|
||||
async.detect = doParallel(_detect);
|
||||
async.detectSeries = doSeries(_detect);
|
||||
|
||||
async.some = function (arr, iterator, main_callback) {
|
||||
async.each(arr, function (x, callback) {
|
||||
iterator(x, function (v) {
|
||||
if (v) {
|
||||
main_callback(true);
|
||||
main_callback = function () {};
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback(false);
|
||||
});
|
||||
};
|
||||
// any alias
|
||||
async.any = async.some;
|
||||
|
||||
async.every = function (arr, iterator, main_callback) {
|
||||
async.each(arr, function (x, callback) {
|
||||
iterator(x, function (v) {
|
||||
if (!v) {
|
||||
main_callback(false);
|
||||
main_callback = function () {};
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback(true);
|
||||
});
|
||||
};
|
||||
// all alias
|
||||
async.all = async.every;
|
||||
|
||||
async.sortBy = function (arr, iterator, callback) {
|
||||
async.map(arr, function (x, callback) {
|
||||
iterator(x, function (err, criteria) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
}
|
||||
else {
|
||||
callback(null, {value: x, criteria: criteria});
|
||||
}
|
||||
});
|
||||
}, function (err, results) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
else {
|
||||
var fn = function (left, right) {
|
||||
var a = left.criteria, b = right.criteria;
|
||||
return a < b ? -1 : a > b ? 1 : 0;
|
||||
};
|
||||
callback(null, _map(results.sort(fn), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.auto = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
var keys = _keys(tasks);
|
||||
if (!keys.length) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
var results = {};
|
||||
|
||||
var listeners = [];
|
||||
var addListener = function (fn) {
|
||||
listeners.unshift(fn);
|
||||
};
|
||||
var removeListener = function (fn) {
|
||||
for (var i = 0; i < listeners.length; i += 1) {
|
||||
if (listeners[i] === fn) {
|
||||
listeners.splice(i, 1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
var taskComplete = function () {
|
||||
_each(listeners.slice(0), function (fn) {
|
||||
fn();
|
||||
});
|
||||
};
|
||||
|
||||
addListener(function () {
|
||||
if (_keys(results).length === keys.length) {
|
||||
callback(null, results);
|
||||
callback = function () {};
|
||||
}
|
||||
});
|
||||
|
||||
_each(keys, function (k) {
|
||||
var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k];
|
||||
var taskCallback = function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
if (err) {
|
||||
var safeResults = {};
|
||||
_each(_keys(results), function(rkey) {
|
||||
safeResults[rkey] = results[rkey];
|
||||
});
|
||||
safeResults[k] = args;
|
||||
callback(err, safeResults);
|
||||
// stop subsequent errors hitting callback multiple times
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
results[k] = args;
|
||||
async.setImmediate(taskComplete);
|
||||
}
|
||||
};
|
||||
var requires = task.slice(0, Math.abs(task.length - 1)) || [];
|
||||
var ready = function () {
|
||||
return _reduce(requires, function (a, x) {
|
||||
return (a && results.hasOwnProperty(x));
|
||||
}, true) && !results.hasOwnProperty(k);
|
||||
};
|
||||
if (ready()) {
|
||||
task[task.length - 1](taskCallback, results);
|
||||
}
|
||||
else {
|
||||
var listener = function () {
|
||||
if (ready()) {
|
||||
removeListener(listener);
|
||||
task[task.length - 1](taskCallback, results);
|
||||
}
|
||||
};
|
||||
addListener(listener);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.waterfall = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor !== Array) {
|
||||
var err = new Error('First argument to waterfall must be an array of functions');
|
||||
return callback(err);
|
||||
}
|
||||
if (!tasks.length) {
|
||||
return callback();
|
||||
}
|
||||
var wrapIterator = function (iterator) {
|
||||
return function (err) {
|
||||
if (err) {
|
||||
callback.apply(null, arguments);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
var next = iterator.next();
|
||||
if (next) {
|
||||
args.push(wrapIterator(next));
|
||||
}
|
||||
else {
|
||||
args.push(callback);
|
||||
}
|
||||
async.setImmediate(function () {
|
||||
iterator.apply(null, args);
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
wrapIterator(async.iterator(tasks))();
|
||||
};
|
||||
|
||||
var _parallel = function(eachfn, tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor === Array) {
|
||||
eachfn.map(tasks, function (fn, callback) {
|
||||
if (fn) {
|
||||
fn(function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
callback.call(null, err, args);
|
||||
});
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
else {
|
||||
var results = {};
|
||||
eachfn.each(_keys(tasks), function (k, callback) {
|
||||
tasks[k](function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
results[k] = args;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
async.parallel = function (tasks, callback) {
|
||||
_parallel({ map: async.map, each: async.each }, tasks, callback);
|
||||
};
|
||||
|
||||
async.parallelLimit = function(tasks, limit, callback) {
|
||||
_parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback);
|
||||
};
|
||||
|
||||
async.series = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor === Array) {
|
||||
async.mapSeries(tasks, function (fn, callback) {
|
||||
if (fn) {
|
||||
fn(function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
callback.call(null, err, args);
|
||||
});
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
else {
|
||||
var results = {};
|
||||
async.eachSeries(_keys(tasks), function (k, callback) {
|
||||
tasks[k](function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
results[k] = args;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
async.iterator = function (tasks) {
|
||||
var makeCallback = function (index) {
|
||||
var fn = function () {
|
||||
if (tasks.length) {
|
||||
tasks[index].apply(null, arguments);
|
||||
}
|
||||
return fn.next();
|
||||
};
|
||||
fn.next = function () {
|
||||
return (index < tasks.length - 1) ? makeCallback(index + 1): null;
|
||||
};
|
||||
return fn;
|
||||
};
|
||||
return makeCallback(0);
|
||||
};
|
||||
|
||||
async.apply = function (fn) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
return function () {
|
||||
return fn.apply(
|
||||
null, args.concat(Array.prototype.slice.call(arguments))
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
var _concat = function (eachfn, arr, fn, callback) {
|
||||
var r = [];
|
||||
eachfn(arr, function (x, cb) {
|
||||
fn(x, function (err, y) {
|
||||
r = r.concat(y || []);
|
||||
cb(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, r);
|
||||
});
|
||||
};
|
||||
async.concat = doParallel(_concat);
|
||||
async.concatSeries = doSeries(_concat);
|
||||
|
||||
async.whilst = function (test, iterator, callback) {
|
||||
if (test()) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
async.whilst(test, iterator, callback);
|
||||
});
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
async.doWhilst = function (iterator, test, callback) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (test()) {
|
||||
async.doWhilst(iterator, test, callback);
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.until = function (test, iterator, callback) {
|
||||
if (!test()) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
async.until(test, iterator, callback);
|
||||
});
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
async.doUntil = function (iterator, test, callback) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (!test()) {
|
||||
async.doUntil(iterator, test, callback);
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.queue = function (worker, concurrency) {
|
||||
if (concurrency === undefined) {
|
||||
concurrency = 1;
|
||||
}
|
||||
function _insert(q, data, pos, callback) {
|
||||
if(data.constructor !== Array) {
|
||||
data = [data];
|
||||
}
|
||||
_each(data, function(task) {
|
||||
var item = {
|
||||
data: task,
|
||||
callback: typeof callback === 'function' ? callback : null
|
||||
};
|
||||
|
||||
if (pos) {
|
||||
q.tasks.unshift(item);
|
||||
} else {
|
||||
q.tasks.push(item);
|
||||
}
|
||||
|
||||
if (q.saturated && q.tasks.length === concurrency) {
|
||||
q.saturated();
|
||||
}
|
||||
async.setImmediate(q.process);
|
||||
});
|
||||
}
|
||||
|
||||
var workers = 0;
|
||||
var q = {
|
||||
tasks: [],
|
||||
concurrency: concurrency,
|
||||
saturated: null,
|
||||
empty: null,
|
||||
drain: null,
|
||||
push: function (data, callback) {
|
||||
_insert(q, data, false, callback);
|
||||
},
|
||||
unshift: function (data, callback) {
|
||||
_insert(q, data, true, callback);
|
||||
},
|
||||
process: function () {
|
||||
if (workers < q.concurrency && q.tasks.length) {
|
||||
var task = q.tasks.shift();
|
||||
if (q.empty && q.tasks.length === 0) {
|
||||
q.empty();
|
||||
}
|
||||
workers += 1;
|
||||
var next = function () {
|
||||
workers -= 1;
|
||||
if (task.callback) {
|
||||
task.callback.apply(task, arguments);
|
||||
}
|
||||
if (q.drain && q.tasks.length + workers === 0) {
|
||||
q.drain();
|
||||
}
|
||||
q.process();
|
||||
};
|
||||
var cb = only_once(next);
|
||||
worker(task.data, cb);
|
||||
}
|
||||
},
|
||||
length: function () {
|
||||
return q.tasks.length;
|
||||
},
|
||||
running: function () {
|
||||
return workers;
|
||||
}
|
||||
};
|
||||
return q;
|
||||
};
|
||||
|
||||
async.cargo = function (worker, payload) {
|
||||
var working = false,
|
||||
tasks = [];
|
||||
|
||||
var cargo = {
|
||||
tasks: tasks,
|
||||
payload: payload,
|
||||
saturated: null,
|
||||
empty: null,
|
||||
drain: null,
|
||||
push: function (data, callback) {
|
||||
if(data.constructor !== Array) {
|
||||
data = [data];
|
||||
}
|
||||
_each(data, function(task) {
|
||||
tasks.push({
|
||||
data: task,
|
||||
callback: typeof callback === 'function' ? callback : null
|
||||
});
|
||||
if (cargo.saturated && tasks.length === payload) {
|
||||
cargo.saturated();
|
||||
}
|
||||
});
|
||||
async.setImmediate(cargo.process);
|
||||
},
|
||||
process: function process() {
|
||||
if (working) return;
|
||||
if (tasks.length === 0) {
|
||||
if(cargo.drain) cargo.drain();
|
||||
return;
|
||||
}
|
||||
|
||||
var ts = typeof payload === 'number'
|
||||
? tasks.splice(0, payload)
|
||||
: tasks.splice(0);
|
||||
|
||||
var ds = _map(ts, function (task) {
|
||||
return task.data;
|
||||
});
|
||||
|
||||
if(cargo.empty) cargo.empty();
|
||||
working = true;
|
||||
worker(ds, function () {
|
||||
working = false;
|
||||
|
||||
var args = arguments;
|
||||
_each(ts, function (data) {
|
||||
if (data.callback) {
|
||||
data.callback.apply(null, args);
|
||||
}
|
||||
});
|
||||
|
||||
process();
|
||||
});
|
||||
},
|
||||
length: function () {
|
||||
return tasks.length;
|
||||
},
|
||||
running: function () {
|
||||
return working;
|
||||
}
|
||||
};
|
||||
return cargo;
|
||||
};
|
||||
|
||||
var _console_fn = function (name) {
|
||||
return function (fn) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
fn.apply(null, args.concat([function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (typeof console !== 'undefined') {
|
||||
if (err) {
|
||||
if (console.error) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
else if (console[name]) {
|
||||
_each(args, function (x) {
|
||||
console[name](x);
|
||||
});
|
||||
}
|
||||
}
|
||||
}]));
|
||||
};
|
||||
};
|
||||
async.log = _console_fn('log');
|
||||
async.dir = _console_fn('dir');
|
||||
/*async.info = _console_fn('info');
|
||||
async.warn = _console_fn('warn');
|
||||
async.error = _console_fn('error');*/
|
||||
|
||||
async.memoize = function (fn, hasher) {
|
||||
var memo = {};
|
||||
var queues = {};
|
||||
hasher = hasher || function (x) {
|
||||
return x;
|
||||
};
|
||||
var memoized = function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
var key = hasher.apply(null, args);
|
||||
if (key in memo) {
|
||||
callback.apply(null, memo[key]);
|
||||
}
|
||||
else if (key in queues) {
|
||||
queues[key].push(callback);
|
||||
}
|
||||
else {
|
||||
queues[key] = [callback];
|
||||
fn.apply(null, args.concat([function () {
|
||||
memo[key] = arguments;
|
||||
var q = queues[key];
|
||||
delete queues[key];
|
||||
for (var i = 0, l = q.length; i < l; i++) {
|
||||
q[i].apply(null, arguments);
|
||||
}
|
||||
}]));
|
||||
}
|
||||
};
|
||||
memoized.memo = memo;
|
||||
memoized.unmemoized = fn;
|
||||
return memoized;
|
||||
};
|
||||
|
||||
async.unmemoize = function (fn) {
|
||||
return function () {
|
||||
return (fn.unmemoized || fn).apply(null, arguments);
|
||||
};
|
||||
};
|
||||
|
||||
async.times = function (count, iterator, callback) {
|
||||
var counter = [];
|
||||
for (var i = 0; i < count; i++) {
|
||||
counter.push(i);
|
||||
}
|
||||
return async.map(counter, iterator, callback);
|
||||
};
|
||||
|
||||
async.timesSeries = function (count, iterator, callback) {
|
||||
var counter = [];
|
||||
for (var i = 0; i < count; i++) {
|
||||
counter.push(i);
|
||||
}
|
||||
return async.mapSeries(counter, iterator, callback);
|
||||
};
|
||||
|
||||
async.compose = function (/* functions... */) {
|
||||
var fns = Array.prototype.reverse.call(arguments);
|
||||
return function () {
|
||||
var that = this;
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
async.reduce(fns, args, function (newargs, fn, cb) {
|
||||
fn.apply(that, newargs.concat([function () {
|
||||
var err = arguments[0];
|
||||
var nextargs = Array.prototype.slice.call(arguments, 1);
|
||||
cb(err, nextargs);
|
||||
}]))
|
||||
},
|
||||
function (err, results) {
|
||||
callback.apply(that, [err].concat(results));
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
var _applyEach = function (eachfn, fns /*args...*/) {
|
||||
var go = function () {
|
||||
var that = this;
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
return eachfn(fns, function (fn, cb) {
|
||||
fn.apply(that, args.concat([cb]));
|
||||
},
|
||||
callback);
|
||||
};
|
||||
if (arguments.length > 2) {
|
||||
var args = Array.prototype.slice.call(arguments, 2);
|
||||
return go.apply(this, args);
|
||||
}
|
||||
else {
|
||||
return go;
|
||||
}
|
||||
};
|
||||
async.applyEach = doParallel(_applyEach);
|
||||
async.applyEachSeries = doSeries(_applyEach);
|
||||
|
||||
async.forever = function (fn, callback) {
|
||||
function next(err) {
|
||||
if (err) {
|
||||
if (callback) {
|
||||
return callback(err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
fn(next);
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
// AMD / RequireJS
|
||||
if (typeof define !== 'undefined' && define.amd) {
|
||||
define([], function () {
|
||||
return async;
|
||||
});
|
||||
}
|
||||
// Node.js
|
||||
else if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = async;
|
||||
}
|
||||
// included directly via <script> tag
|
||||
else {
|
||||
root.async = async;
|
||||
}
|
||||
|
||||
}());
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,16 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
|
||||
pids
|
||||
logs
|
||||
results
|
||||
|
||||
npm-debug.log
|
||||
|
||||
node_modules
|
@ -0,0 +1,7 @@
|
||||
test:
|
||||
@echo "Launching tests"
|
||||
@ ./node_modules/.bin/mocha --timeout 10000 --reporter spec
|
||||
@echo "Tests finished"
|
||||
|
||||
.PHONY: test
|
||||
|
@ -0,0 +1,123 @@
|
||||
# Binary search trees for Node.js
|
||||
|
||||
Two implementations of binary search tree: <a href="http://en.wikipedia.org/wiki/Binary_search_tree" target="_blank">basic</a> and <a href="http://en.wikipedia.org/wiki/AVL_tree" target="_blank">AVL</a> (a kind of self-balancing binmary search tree). I wrote this module primarily to store indexes for <a href="https://github.com/louischatriot/nedb" target="_blank">NeDB</a> (a javascript dependency-less database).
|
||||
|
||||
|
||||
## Installation and tests
|
||||
Package name is `binary-search-tree`.
|
||||
|
||||
```bash
|
||||
npm install binary-search-tree --save
|
||||
|
||||
make test
|
||||
```
|
||||
|
||||
## Usage
|
||||
The API mainly provides 3 functions: `insert`, `search` and `delete`. If you do not create a unique-type binary search tree, you can store multiple pieces of data for the same key. Doing so with a unique-type BST will result in an error being thrown. Data is always returned as an array, and you can delete all data relating to a given key, or just one piece of data.
|
||||
|
||||
```javascript
|
||||
var BinarySearchTree = require('binary-search-tree').BinarySearchTree
|
||||
, AVLTree = require('binary-search-tree').AVLTree // Same API as BinarySearchTree
|
||||
|
||||
// Creating a binary search tree
|
||||
var bst = new BinarySearchTree();
|
||||
|
||||
// Inserting some data
|
||||
bst.insert(15, 'some data for key 15');
|
||||
bst.insert(12, 'something else');
|
||||
bst.insert(18, 'hello');
|
||||
|
||||
// You can insert multiple pieces of data for the same key
|
||||
// if your tree doesn't enforce a unique constraint
|
||||
bst.insert(18, 'world');
|
||||
|
||||
// Retrieving data (always returned as an array of all data stored for this key)
|
||||
bst.search(15); // Equal to ['some data for key 15']
|
||||
bst.search(18); // Equal to ['hello', 'world']
|
||||
bst.search(1); // Equal to []
|
||||
|
||||
// Search between bounds with a MongoDB-like query
|
||||
// Data is returned in key order
|
||||
// Note the difference between $lt (less than) and $gte (less than OR EQUAL)
|
||||
bst.betweenBounds({ $lt: 18, $gte: 12}); // Equal to ['something else', 'some data for key 15']
|
||||
|
||||
// Deleting all the data relating to a key
|
||||
bst.delete(15); // bst.search(15) will now give []
|
||||
bst.delete(18, 'world'); // bst.search(18) will now give ['hello']
|
||||
```
|
||||
|
||||
There are three optional parameters you can pass the BST constructor, allowing you to enforce a key-uniqueness constraint, use a custom function to compare keys and use a custom function to check whether values are equal. These parameters are all passed in an object.
|
||||
|
||||
### Uniqueness
|
||||
|
||||
```javascript
|
||||
var bst = new BinarySearchTree({ unique: true });
|
||||
bst.insert(10, 'hello');
|
||||
bst.insert(10, 'world'); // Will throw an error
|
||||
```
|
||||
|
||||
### Custom key comparison
|
||||
|
||||
```javascript
|
||||
// Custom key comparison function
|
||||
// It needs to return a negative number if a is less than b,
|
||||
// a positive number if a is greater than b
|
||||
// and 0 if they are equal
|
||||
// If none is provided, the default one can compare numbers, dates and strings
|
||||
// which are the most common usecases
|
||||
function compareKeys (a, b) {
|
||||
if (a.age < b.age) { return -1; }
|
||||
if (a.age > b.age) { return 1; }
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Now we can use objects with an 'age' property as keys
|
||||
var bst = new BinarySearchTree({ compareKeys: compareKeys });
|
||||
bst.insert({ age: 23 }, 'Mark');
|
||||
bst.insert({ age: 47 }, 'Franck');
|
||||
```
|
||||
|
||||
### Custom value checking
|
||||
|
||||
```javascript
|
||||
// Custom value equality checking function used when we try to just delete one piece of data
|
||||
// Returns true if a and b are considered the same, false otherwise
|
||||
// The default function is able to compare numbers and strings
|
||||
function checkValueEquality (a, b) {
|
||||
return a.length === b.length;
|
||||
}
|
||||
var bst = new BinarySearchTree({ checkValueEquality: checkValueEquality });
|
||||
bst.insert(10, 'hello');
|
||||
bst.insert(10, 'world');
|
||||
bst.insert(10, 'howdoyoudo');
|
||||
|
||||
bst.delete(10, 'abcde');
|
||||
bst.search(10); // Returns ['howdoyoudo']
|
||||
```
|
||||
|
||||
|
||||
## License
|
||||
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2013 Louis Chatriot <louis.chatriot@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -0,0 +1,2 @@
|
||||
module.exports.BinarySearchTree = require('./lib/bst');
|
||||
module.exports.AVLTree = require('./lib/avltree');
|
@ -0,0 +1,455 @@
|
||||
/**
|
||||
* Self-balancing binary search tree using the AVL implementation
|
||||
*/
|
||||
var BinarySearchTree = require('./bst')
|
||||
, customUtils = require('./customUtils')
|
||||
, util = require('util')
|
||||
, _ = require('underscore')
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* We can't use a direct pointer to the root node (as in the simple binary search tree)
|
||||
* as the root will change during tree rotations
|
||||
* @param {Boolean} options.unique Whether to enforce a 'unique' constraint on the key or not
|
||||
* @param {Function} options.compareKeys Initialize this BST's compareKeys
|
||||
*/
|
||||
function AVLTree (options) {
|
||||
this.tree = new _AVLTree(options);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Constructor of the internal AVLTree
|
||||
* @param {Object} options Optional
|
||||
* @param {Boolean} options.unique Whether to enforce a 'unique' constraint on the key or not
|
||||
* @param {Key} options.key Initialize this BST's key with key
|
||||
* @param {Value} options.value Initialize this BST's data with [value]
|
||||
* @param {Function} options.compareKeys Initialize this BST's compareKeys
|
||||
*/
|
||||
function _AVLTree (options) {
|
||||
options = options || {};
|
||||
|
||||
this.left = null;
|
||||
this.right = null;
|
||||
this.parent = options.parent !== undefined ? options.parent : null;
|
||||
if (options.hasOwnProperty('key')) { this.key = options.key; }
|
||||
this.data = options.hasOwnProperty('value') ? [options.value] : [];
|
||||
this.unique = options.unique || false;
|
||||
|
||||
this.compareKeys = options.compareKeys || customUtils.defaultCompareKeysFunction;
|
||||
this.checkValueEquality = options.checkValueEquality || customUtils.defaultCheckValueEquality;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Inherit basic functions from the basic binary search tree
|
||||
*/
|
||||
util.inherits(_AVLTree, BinarySearchTree);
|
||||
|
||||
/**
|
||||
* Keep a pointer to the internal tree constructor for testing purposes
|
||||
*/
|
||||
AVLTree._AVLTree = _AVLTree;
|
||||
|
||||
|
||||
/**
|
||||
* Check the recorded height is correct for every node
|
||||
* Throws if one height doesn't match
|
||||
*/
|
||||
_AVLTree.prototype.checkHeightCorrect = function () {
|
||||
var leftH, rightH;
|
||||
|
||||
if (!this.hasOwnProperty('key')) { return; } // Empty tree
|
||||
|
||||
if (this.left && this.left.height === undefined) { throw "Undefined height for node " + this.left.key; }
|
||||
if (this.right && this.right.height === undefined) { throw "Undefined height for node " + this.right.key; }
|
||||
if (this.height === undefined) { throw "Undefined height for node " + this.key; }
|
||||
|
||||
leftH = this.left ? this.left.height : 0;
|
||||
rightH = this.right ? this.right.height : 0;
|
||||
|
||||
if (this.height !== 1 + Math.max(leftH, rightH)) { throw "Height constraint failed for node " + this.key; }
|
||||
if (this.left) { this.left.checkHeightCorrect(); }
|
||||
if (this.right) { this.right.checkHeightCorrect(); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return the balance factor
|
||||
*/
|
||||
_AVLTree.prototype.balanceFactor = function () {
|
||||
var leftH = this.left ? this.left.height : 0
|
||||
, rightH = this.right ? this.right.height : 0
|
||||
;
|
||||
return leftH - rightH;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check that the balance factors are all between -1 and 1
|
||||
*/
|
||||
_AVLTree.prototype.checkBalanceFactors = function () {
|
||||
if (Math.abs(this.balanceFactor()) > 1) { throw 'Tree is unbalanced at node ' + this.key; }
|
||||
|
||||
if (this.left) { this.left.checkBalanceFactors(); }
|
||||
if (this.right) { this.right.checkBalanceFactors(); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* When checking if the BST conditions are met, also check that the heights are correct
|
||||
* and the tree is balanced
|
||||
*/
|
||||
_AVLTree.prototype.checkIsAVLT = function () {
|
||||
_AVLTree.super_.prototype.checkIsBST.call(this);
|
||||
this.checkHeightCorrect();
|
||||
this.checkBalanceFactors();
|
||||
};
|
||||
AVLTree.prototype.checkIsAVLT = function () { this.tree.checkIsAVLT(); };
|
||||
|
||||
|
||||
/**
|
||||
* Perform a right rotation of the tree if possible
|
||||
* and return the root of the resulting tree
|
||||
* The resulting tree's nodes' heights are also updated
|
||||
*/
|
||||
_AVLTree.prototype.rightRotation = function () {
|
||||
var q = this
|
||||
, p = this.left
|
||||
, b
|
||||
, ah, bh, ch;
|
||||
|
||||
if (!p) { return this; } // No change
|
||||
|
||||
b = p.right;
|
||||
|
||||
// Alter tree structure
|
||||
if (q.parent) {
|
||||
p.parent = q.parent;
|
||||
if (q.parent.left === q) { q.parent.left = p; } else { q.parent.right = p; }
|
||||
} else {
|
||||
p.parent = null;
|
||||
}
|
||||
p.right = q;
|
||||
q.parent = p;
|
||||
q.left = b;
|
||||
if (b) { b.parent = q; }
|
||||
|
||||
// Update heights
|
||||
ah = p.left ? p.left.height : 0;
|
||||
bh = b ? b.height : 0;
|
||||
ch = q.right ? q.right.height : 0;
|
||||
q.height = Math.max(bh, ch) + 1;
|
||||
p.height = Math.max(ah, q.height) + 1;
|
||||
|
||||
return p;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Perform a left rotation of the tree if possible
|
||||
* and return the root of the resulting tree
|
||||
* The resulting tree's nodes' heights are also updated
|
||||
*/
|
||||
_AVLTree.prototype.leftRotation = function () {
|
||||
var p = this
|
||||
, q = this.right
|
||||
, b
|
||||
, ah, bh, ch;
|
||||
|
||||
if (!q) { return this; } // No change
|
||||
|
||||
b = q.left;
|
||||
|
||||
// Alter tree structure
|
||||
if (p.parent) {
|
||||
q.parent = p.parent;
|
||||
if (p.parent.left === p) { p.parent.left = q; } else { p.parent.right = q; }
|
||||
} else {
|
||||
q.parent = null;
|
||||
}
|
||||
q.left = p;
|
||||
p.parent = q;
|
||||
p.right = b;
|
||||
if (b) { b.parent = p; }
|
||||
|
||||
// Update heights
|
||||
ah = p.left ? p.left.height : 0;
|
||||
bh = b ? b.height : 0;
|
||||
ch = q.right ? q.right.height : 0;
|
||||
p.height = Math.max(ah, bh) + 1;
|
||||
q.height = Math.max(ch, p.height) + 1;
|
||||
|
||||
return q;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Modify the tree if its right subtree is too small compared to the left
|
||||
* Return the new root if any
|
||||
*/
|
||||
_AVLTree.prototype.rightTooSmall = function () {
|
||||
if (this.balanceFactor() <= 1) { return this; } // Right is not too small, don't change
|
||||
|
||||
if (this.left.balanceFactor() < 0) {
|
||||
this.left.leftRotation();
|
||||
}
|
||||
|
||||
return this.rightRotation();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Modify the tree if its left subtree is too small compared to the right
|
||||
* Return the new root if any
|
||||
*/
|
||||
_AVLTree.prototype.leftTooSmall = function () {
|
||||
if (this.balanceFactor() >= -1) { return this; } // Left is not too small, don't change
|
||||
|
||||
if (this.right.balanceFactor() > 0) {
|
||||
this.right.rightRotation();
|
||||
}
|
||||
|
||||
return this.leftRotation();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Rebalance the tree along the given path. The path is given reversed (as he was calculated
|
||||
* in the insert and delete functions).
|
||||
* Returns the new root of the tree
|
||||
* Of course, the first element of the path must be the root of the tree
|
||||
*/
|
||||
_AVLTree.prototype.rebalanceAlongPath = function (path) {
|
||||
var newRoot = this
|
||||
, rotated
|
||||
, i;
|
||||
|
||||
if (!this.hasOwnProperty('key')) { delete this.height; return this; } // Empty tree
|
||||
|
||||
// Rebalance the tree and update all heights
|
||||
for (i = path.length - 1; i >= 0; i -= 1) {
|
||||
path[i].height = 1 + Math.max(path[i].left ? path[i].left.height : 0, path[i].right ? path[i].right.height : 0);
|
||||
|
||||
if (path[i].balanceFactor() > 1) {
|
||||
rotated = path[i].rightTooSmall();
|
||||
if (i === 0) { newRoot = rotated; }
|
||||
}
|
||||
|
||||
if (path[i].balanceFactor() < -1) {
|
||||
rotated = path[i].leftTooSmall();
|
||||
if (i === 0) { newRoot = rotated; }
|
||||
}
|
||||
}
|
||||
|
||||
return newRoot;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a key, value pair in the tree while maintaining the AVL tree height constraint
|
||||
* Return a pointer to the root node, which may have changed
|
||||
*/
|
||||
_AVLTree.prototype.insert = function (key, value) {
|
||||
var insertPath = []
|
||||
, currentNode = this
|
||||
;
|
||||
|
||||
// Empty tree, insert as root
|
||||
if (!this.hasOwnProperty('key')) {
|
||||
this.key = key;
|
||||
this.data.push(value);
|
||||
this.height = 1;
|
||||
return this;
|
||||
}
|
||||
|
||||
// Insert new leaf at the right place
|
||||
while (true) {
|
||||
// Same key: no change in the tree structure
|
||||
if (currentNode.compareKeys(currentNode.key, key) === 0) {
|
||||
if (currentNode.unique) {
|
||||
throw { message: "Can't insert key " + key + ", it violates the unique constraint"
|
||||
, key: key
|
||||
, errorType: 'uniqueViolated'
|
||||
};
|
||||
} else {
|
||||
currentNode.data.push(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
insertPath.push(currentNode);
|
||||
|
||||
if (currentNode.compareKeys(key, currentNode.key) < 0) {
|
||||
if (!currentNode.left) {
|
||||
insertPath.push(currentNode.createLeftChild({ key: key, value: value }));
|
||||
break;
|
||||
} else {
|
||||
currentNode = currentNode.left;
|
||||
}
|
||||
} else {
|
||||
if (!currentNode.right) {
|
||||
insertPath.push(currentNode.createRightChild({ key: key, value: value }));
|
||||
break;
|
||||
} else {
|
||||
currentNode = currentNode.right;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.rebalanceAlongPath(insertPath);
|
||||
};
|
||||
|
||||
// Insert in the internal tree, update the pointer to the root if needed
|
||||
AVLTree.prototype.insert = function (key, value) {
|
||||
var newTree = this.tree.insert(key, value);
|
||||
|
||||
// If newTree is undefined, that means its structure was not modified
|
||||
if (newTree) { this.tree = newTree; }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Delete a key or just a value and return the new root of the tree
|
||||
* @param {Key} key
|
||||
* @param {Value} value Optional. If not set, the whole key is deleted. If set, only this value is deleted
|
||||
*/
|
||||
_AVLTree.prototype.delete = function (key, value) {
|
||||
var newData = [], replaceWith
|
||||
, self = this
|
||||
, currentNode = this
|
||||
, deletePath = []
|
||||
;
|
||||
|
||||
if (!this.hasOwnProperty('key')) { return this; } // Empty tree
|
||||
|
||||
// Either no match is found and the function will return from within the loop
|
||||
// Or a match is found and deletePath will contain the path from the root to the node to delete after the loop
|
||||
while (true) {
|
||||
if (currentNode.compareKeys(key, currentNode.key) === 0) { break; }
|
||||
|
||||
deletePath.push(currentNode);
|
||||
|
||||
if (currentNode.compareKeys(key, currentNode.key) < 0) {
|
||||
if (currentNode.left) {
|
||||
currentNode = currentNode.left;
|
||||
} else {
|
||||
return this; // Key not found, no modification
|
||||
}
|
||||
} else {
|
||||
// currentNode.compareKeys(key, currentNode.key) is > 0
|
||||
if (currentNode.right) {
|
||||
currentNode = currentNode.right;
|
||||
} else {
|
||||
return this; // Key not found, no modification
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete only a value (no tree modification)
|
||||
if (currentNode.data.length > 1 && value) {
|
||||
currentNode.data.forEach(function (d) {
|
||||
if (!currentNode.checkValueEquality(d, value)) { newData.push(d); }
|
||||
});
|
||||
currentNode.data = newData;
|
||||
return this;
|
||||
}
|
||||
|
||||
// Delete a whole node
|
||||
|
||||
// Leaf
|
||||
if (!currentNode.left && !currentNode.right) {
|
||||
if (currentNode === this) { // This leaf is also the root
|
||||
delete currentNode.key;
|
||||
currentNode.data = [];
|
||||
delete currentNode.height;
|
||||
return this;
|
||||
} else {
|
||||
if (currentNode.parent.left === currentNode) {
|
||||
currentNode.parent.left = null;
|
||||
} else {
|
||||
currentNode.parent.right = null;
|
||||
}
|
||||
return this.rebalanceAlongPath(deletePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Node with only one child
|
||||
if (!currentNode.left || !currentNode.right) {
|
||||
replaceWith = currentNode.left ? currentNode.left : currentNode.right;
|
||||
|
||||
if (currentNode === this) { // This node is also the root
|
||||
replaceWith.parent = null;
|
||||
return replaceWith; // height of replaceWith is necessarily 1 because the tree was balanced before deletion
|
||||
} else {
|
||||
if (currentNode.parent.left === currentNode) {
|
||||
currentNode.parent.left = replaceWith;
|
||||
replaceWith.parent = currentNode.parent;
|
||||
} else {
|
||||
currentNode.parent.right = replaceWith;
|
||||
replaceWith.parent = currentNode.parent;
|
||||
}
|
||||
|
||||
return this.rebalanceAlongPath(deletePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Node with two children
|
||||
// Use the in-order predecessor (no need to randomize since we actively rebalance)
|
||||
deletePath.push(currentNode);
|
||||
replaceWith = currentNode.left;
|
||||
|
||||
// Special case: the in-order predecessor is right below the node to delete
|
||||
if (!replaceWith.right) {
|
||||
currentNode.key = replaceWith.key;
|
||||
currentNode.data = replaceWith.data;
|
||||
currentNode.left = replaceWith.left;
|
||||
if (replaceWith.left) { replaceWith.left.parent = currentNode; }
|
||||
return this.rebalanceAlongPath(deletePath);
|
||||
}
|
||||
|
||||
// After this loop, replaceWith is the right-most leaf in the left subtree
|
||||
// and deletePath the path from the root (inclusive) to replaceWith (exclusive)
|
||||
while (true) {
|
||||
if (replaceWith.right) {
|
||||
deletePath.push(replaceWith);
|
||||
replaceWith = replaceWith.right;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
currentNode.key = replaceWith.key;
|
||||
currentNode.data = replaceWith.data;
|
||||
|
||||
replaceWith.parent.right = replaceWith.left;
|
||||
if (replaceWith.left) { replaceWith.left.parent = replaceWith.parent; }
|
||||
|
||||
return this.rebalanceAlongPath(deletePath);
|
||||
};
|
||||
|
||||
// Delete a value
|
||||
AVLTree.prototype.delete = function (key, value) {
|
||||
var newTree = this.tree.delete(key, value);
|
||||
|
||||
// If newTree is undefined, that means its structure was not modified
|
||||
if (newTree) { this.tree = newTree; }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Other functions we want to use on an AVLTree as if it were the internal _AVLTree
|
||||
*/
|
||||
['getNumberOfKeys', 'search', 'betweenBounds', 'prettyPrint', 'executeOnEveryNode'].forEach(function (fn) {
|
||||
AVLTree.prototype[fn] = function () {
|
||||
return this.tree[fn].apply(this.tree, arguments);
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = AVLTree;
|
@ -0,0 +1,543 @@
|
||||
/**
|
||||
* Simple binary search tree
|
||||
*/
|
||||
var customUtils = require('./customUtils');
|
||||
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param {Object} options Optional
|
||||
* @param {Boolean} options.unique Whether to enforce a 'unique' constraint on the key or not
|
||||
* @param {Key} options.key Initialize this BST's key with key
|
||||
* @param {Value} options.value Initialize this BST's data with [value]
|
||||
* @param {Function} options.compareKeys Initialize this BST's compareKeys
|
||||
*/
|
||||
function BinarySearchTree (options) {
|
||||
options = options || {};
|
||||
|
||||
this.left = null;
|
||||
this.right = null;
|
||||
this.parent = options.parent !== undefined ? options.parent : null;
|
||||
if (options.hasOwnProperty('key')) { this.key = options.key; }
|
||||
this.data = options.hasOwnProperty('value') ? [options.value] : [];
|
||||
this.unique = options.unique || false;
|
||||
|
||||
this.compareKeys = options.compareKeys || customUtils.defaultCompareKeysFunction;
|
||||
this.checkValueEquality = options.checkValueEquality || customUtils.defaultCheckValueEquality;
|
||||
}
|
||||
|
||||
|
||||
// ================================
|
||||
// Methods used to test the tree
|
||||
// ================================
|
||||
|
||||
|
||||
/**
|
||||
* Get the descendant with max key
|
||||
*/
|
||||
BinarySearchTree.prototype.getMaxKeyDescendant = function () {
|
||||
if (this.right) {
|
||||
return this.right.getMaxKeyDescendant();
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get the maximum key
|
||||
*/
|
||||
BinarySearchTree.prototype.getMaxKey = function () {
|
||||
return this.getMaxKeyDescendant().key;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get the descendant with min key
|
||||
*/
|
||||
BinarySearchTree.prototype.getMinKeyDescendant = function () {
|
||||
if (this.left) {
|
||||
return this.left.getMinKeyDescendant()
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get the minimum key
|
||||
*/
|
||||
BinarySearchTree.prototype.getMinKey = function () {
|
||||
return this.getMinKeyDescendant().key;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check that all nodes (incl. leaves) fullfil condition given by fn
|
||||
* test is a function passed every (key, data) and which throws if the condition is not met
|
||||
*/
|
||||
BinarySearchTree.prototype.checkAllNodesFullfillCondition = function (test) {
|
||||
if (!this.hasOwnProperty('key')) { return; }
|
||||
|
||||
test(this.key, this.data);
|
||||
if (this.left) { this.left.checkAllNodesFullfillCondition(test); }
|
||||
if (this.right) { this.right.checkAllNodesFullfillCondition(test); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check that the core BST properties on node ordering are verified
|
||||
* Throw if they aren't
|
||||
*/
|
||||
BinarySearchTree.prototype.checkNodeOrdering = function () {
|
||||
var self = this;
|
||||
|
||||
if (!this.hasOwnProperty('key')) { return; }
|
||||
|
||||
if (this.left) {
|
||||
this.left.checkAllNodesFullfillCondition(function (k) {
|
||||
if (self.compareKeys(k, self.key) >= 0) {
|
||||
throw 'Tree with root ' + self.key + ' is not a binary search tree';
|
||||
}
|
||||
});
|
||||
this.left.checkNodeOrdering();
|
||||
}
|
||||
|
||||
if (this.right) {
|
||||
this.right.checkAllNodesFullfillCondition(function (k) {
|
||||
if (self.compareKeys(k, self.key) <= 0) {
|
||||
throw 'Tree with root ' + self.key + ' is not a binary search tree';
|
||||
}
|
||||
});
|
||||
this.right.checkNodeOrdering();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check that all pointers are coherent in this tree
|
||||
*/
|
||||
BinarySearchTree.prototype.checkInternalPointers = function () {
|
||||
if (this.left) {
|
||||
if (this.left.parent !== this) { throw 'Parent pointer broken for key ' + this.key; }
|
||||
this.left.checkInternalPointers();
|
||||
}
|
||||
|
||||
if (this.right) {
|
||||
if (this.right.parent !== this) { throw 'Parent pointer broken for key ' + this.key; }
|
||||
this.right.checkInternalPointers();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Check that a tree is a BST as defined here (node ordering and pointer references)
|
||||
*/
|
||||
BinarySearchTree.prototype.checkIsBST = function () {
|
||||
this.checkNodeOrdering();
|
||||
this.checkInternalPointers();
|
||||
if (this.parent) { throw "The root shouldn't have a parent"; }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Get number of keys inserted
|
||||
*/
|
||||
BinarySearchTree.prototype.getNumberOfKeys = function () {
|
||||
var res;
|
||||
|
||||
if (!this.hasOwnProperty('key')) { return 0; }
|
||||
|
||||
res = 1;
|
||||
if (this.left) { res += this.left.getNumberOfKeys(); }
|
||||
if (this.right) { res += this.right.getNumberOfKeys(); }
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
|
||||
// ============================================
|
||||
// Methods used to actually work on the tree
|
||||
// ============================================
|
||||
|
||||
/**
|
||||
* Create a BST similar (i.e. same options except for key and value) to the current one
|
||||
* Use the same constructor (i.e. BinarySearchTree, AVLTree etc)
|
||||
* @param {Object} options see constructor
|
||||
*/
|
||||
BinarySearchTree.prototype.createSimilar = function (options) {
|
||||
options = options || {};
|
||||
options.unique = this.unique;
|
||||
options.compareKeys = this.compareKeys;
|
||||
options.checkValueEquality = this.checkValueEquality;
|
||||
|
||||
return new this.constructor(options);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Create the left child of this BST and return it
|
||||
*/
|
||||
BinarySearchTree.prototype.createLeftChild = function (options) {
|
||||
var leftChild = this.createSimilar(options);
|
||||
leftChild.parent = this;
|
||||
this.left = leftChild;
|
||||
|
||||
return leftChild;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Create the right child of this BST and return it
|
||||
*/
|
||||
BinarySearchTree.prototype.createRightChild = function (options) {
|
||||
var rightChild = this.createSimilar(options);
|
||||
rightChild.parent = this;
|
||||
this.right = rightChild;
|
||||
|
||||
return rightChild;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Insert a new element
|
||||
*/
|
||||
BinarySearchTree.prototype.insert = function (key, value) {
|
||||
// Empty tree, insert as root
|
||||
if (!this.hasOwnProperty('key')) {
|
||||
this.key = key;
|
||||
this.data.push(value);
|
||||
return;
|
||||
}
|
||||
|
||||
// Same key as root
|
||||
if (this.compareKeys(this.key, key) === 0) {
|
||||
if (this.unique) {
|
||||
throw { message: "Can't insert key " + key + ", it violates the unique constraint"
|
||||
, key: key
|
||||
, errorType: 'uniqueViolated'
|
||||
};
|
||||
} else {
|
||||
this.data.push(value);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.compareKeys(key, this.key) < 0) {
|
||||
// Insert in left subtree
|
||||
if (this.left) {
|
||||
this.left.insert(key, value);
|
||||
} else {
|
||||
this.createLeftChild({ key: key, value: value });
|
||||
}
|
||||
} else {
|
||||
// Insert in right subtree
|
||||
if (this.right) {
|
||||
this.right.insert(key, value);
|
||||
} else {
|
||||
this.createRightChild({ key: key, value: value });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Search for all data corresponding to a key
|
||||
*/
|
||||
BinarySearchTree.prototype.search = function (key) {
|
||||
if (!this.hasOwnProperty('key')) { return []; }
|
||||
|
||||
if (this.compareKeys(this.key, key) === 0) { return this.data; }
|
||||
|
||||
if (this.compareKeys(key, this.key) < 0) {
|
||||
if (this.left) {
|
||||
return this.left.search(key);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
} else {
|
||||
if (this.right) {
|
||||
return this.right.search(key);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return a function that tells whether a given key matches a lower bound
|
||||
*/
|
||||
BinarySearchTree.prototype.getLowerBoundMatcher = function (query) {
|
||||
var self = this;
|
||||
|
||||
// No lower bound
|
||||
if (!query.hasOwnProperty('$gt') && !query.hasOwnProperty('$gte')) {
|
||||
return function () { return true; };
|
||||
}
|
||||
|
||||
if (query.hasOwnProperty('$gt') && query.hasOwnProperty('$gte')) {
|
||||
if (self.compareKeys(query.$gte, query.$gt) === 0) {
|
||||
return function (key) { return self.compareKeys(key, query.$gt) > 0; };
|
||||
}
|
||||
|
||||
if (self.compareKeys(query.$gte, query.$gt) > 0) {
|
||||
return function (key) { return self.compareKeys(key, query.$gte) >= 0; };
|
||||
} else {
|
||||
return function (key) { return self.compareKeys(key, query.$gt) > 0; };
|
||||
}
|
||||
}
|
||||
|
||||
if (query.hasOwnProperty('$gt')) {
|
||||
return function (key) { return self.compareKeys(key, query.$gt) > 0; };
|
||||
} else {
|
||||
return function (key) { return self.compareKeys(key, query.$gte) >= 0; };
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return a function that tells whether a given key matches an upper bound
|
||||
*/
|
||||
BinarySearchTree.prototype.getUpperBoundMatcher = function (query) {
|
||||
var self = this;
|
||||
|
||||
// No lower bound
|
||||
if (!query.hasOwnProperty('$lt') && !query.hasOwnProperty('$lte')) {
|
||||
return function () { return true; };
|
||||
}
|
||||
|
||||
if (query.hasOwnProperty('$lt') && query.hasOwnProperty('$lte')) {
|
||||
if (self.compareKeys(query.$lte, query.$lt) === 0) {
|
||||
return function (key) { return self.compareKeys(key, query.$lt) < 0; };
|
||||
}
|
||||
|
||||
if (self.compareKeys(query.$lte, query.$lt) < 0) {
|
||||
return function (key) { return self.compareKeys(key, query.$lte) <= 0; };
|
||||
} else {
|
||||
return function (key) { return self.compareKeys(key, query.$lt) < 0; };
|
||||
}
|
||||
}
|
||||
|
||||
if (query.hasOwnProperty('$lt')) {
|
||||
return function (key) { return self.compareKeys(key, query.$lt) < 0; };
|
||||
} else {
|
||||
return function (key) { return self.compareKeys(key, query.$lte) <= 0; };
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Append all elements in toAppend to array
|
||||
function append (array, toAppend) {
|
||||
var i;
|
||||
|
||||
for (i = 0; i < toAppend.length; i += 1) {
|
||||
array.push(toAppend[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get all data for a key between bounds
|
||||
* Return it in key order
|
||||
* @param {Object} query Mongo-style query where keys are $lt, $lte, $gt or $gte (other keys are not considered)
|
||||
* @param {Functions} lbm/ubm matching functions calculated at the first recursive step
|
||||
*/
|
||||
BinarySearchTree.prototype.betweenBounds = function (query, lbm, ubm) {
|
||||
var res = [];
|
||||
|
||||
if (!this.hasOwnProperty('key')) { return []; } // Empty tree
|
||||
|
||||
lbm = lbm || this.getLowerBoundMatcher(query);
|
||||
ubm = ubm || this.getUpperBoundMatcher(query);
|
||||
|
||||
if (lbm(this.key) && this.left) { append(res, this.left.betweenBounds(query, lbm, ubm)); }
|
||||
if (lbm(this.key) && ubm(this.key)) { append(res, this.data); }
|
||||
if (ubm(this.key) && this.right) { append(res, this.right.betweenBounds(query, lbm, ubm)); }
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Delete the current node if it is a leaf
|
||||
* Return true if it was deleted
|
||||
*/
|
||||
BinarySearchTree.prototype.deleteIfLeaf = function () {
|
||||
if (this.left || this.right) { return false; }
|
||||
|
||||
// The leaf is itself a root
|
||||
if (!this.parent) {
|
||||
delete this.key;
|
||||
this.data = [];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this.parent.left === this) {
|
||||
this.parent.left = null;
|
||||
} else {
|
||||
this.parent.right = null;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Delete the current node if it has only one child
|
||||
* Return true if it was deleted
|
||||
*/
|
||||
BinarySearchTree.prototype.deleteIfOnlyOneChild = function () {
|
||||
var child;
|
||||
|
||||
if (this.left && !this.right) { child = this.left; }
|
||||
if (!this.left && this.right) { child = this.right; }
|
||||
if (!child) { return false; }
|
||||
|
||||
// Root
|
||||
if (!this.parent) {
|
||||
this.key = child.key;
|
||||
this.data = child.data;
|
||||
|
||||
this.left = null;
|
||||
if (child.left) {
|
||||
this.left = child.left;
|
||||
child.left.parent = this;
|
||||
}
|
||||
|
||||
this.right = null;
|
||||
if (child.right) {
|
||||
this.right = child.right;
|
||||
child.right.parent = this;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this.parent.left === this) {
|
||||
this.parent.left = child;
|
||||
child.parent = this.parent;
|
||||
} else {
|
||||
this.parent.right = child;
|
||||
child.parent = this.parent;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Delete a key or just a value
|
||||
* @param {Key} key
|
||||
* @param {Value} value Optional. If not set, the whole key is deleted. If set, only this value is deleted
|
||||
*/
|
||||
BinarySearchTree.prototype.delete = function (key, value) {
|
||||
var newData = [], replaceWith
|
||||
, self = this
|
||||
;
|
||||
|
||||
if (!this.hasOwnProperty('key')) { return; }
|
||||
|
||||
if (this.compareKeys(key, this.key) < 0) {
|
||||
if (this.left) { this.left.delete(key, value); }
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.compareKeys(key, this.key) > 0) {
|
||||
if (this.right) { this.right.delete(key, value); }
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.compareKeys(key, this.key) === 0) { return; }
|
||||
|
||||
// Delete only a value
|
||||
if (this.data.length > 1 && value !== undefined) {
|
||||
this.data.forEach(function (d) {
|
||||
if (!self.checkValueEquality(d, value)) { newData.push(d); }
|
||||
});
|
||||
self.data = newData;
|
||||
return;
|
||||
}
|
||||
|
||||
// Delete the whole node
|
||||
if (this.deleteIfLeaf()) {
|
||||
return;
|
||||
}
|
||||
if (this.deleteIfOnlyOneChild()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// We are in the case where the node to delete has two children
|
||||
if (Math.random() >= 0.5) { // Randomize replacement to avoid unbalancing the tree too much
|
||||
// Use the in-order predecessor
|
||||
replaceWith = this.left.getMaxKeyDescendant();
|
||||
|
||||
this.key = replaceWith.key;
|
||||
this.data = replaceWith.data;
|
||||
|
||||
if (this === replaceWith.parent) { // Special case
|
||||
this.left = replaceWith.left;
|
||||
if (replaceWith.left) { replaceWith.left.parent = replaceWith.parent; }
|
||||
} else {
|
||||
replaceWith.parent.right = replaceWith.left;
|
||||
if (replaceWith.left) { replaceWith.left.parent = replaceWith.parent; }
|
||||
}
|
||||
} else {
|
||||
// Use the in-order successor
|
||||
replaceWith = this.right.getMinKeyDescendant();
|
||||
|
||||
this.key = replaceWith.key;
|
||||
this.data = replaceWith.data;
|
||||
|
||||
if (this === replaceWith.parent) { // Special case
|
||||
this.right = replaceWith.right;
|
||||
if (replaceWith.right) { replaceWith.right.parent = replaceWith.parent; }
|
||||
} else {
|
||||
replaceWith.parent.left = replaceWith.right;
|
||||
if (replaceWith.right) { replaceWith.right.parent = replaceWith.parent; }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Execute a function on every node of the tree, in key order
|
||||
* @param {Function} fn Signature: node. Most useful will probably be node.key and node.data
|
||||
*/
|
||||
BinarySearchTree.prototype.executeOnEveryNode = function (fn) {
|
||||
if (this.left) { this.left.executeOnEveryNode(fn); }
|
||||
fn(this);
|
||||
if (this.right) { this.right.executeOnEveryNode(fn); }
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Pretty print a tree
|
||||
* @param {Boolean} printData To print the nodes' data along with the key
|
||||
*/
|
||||
BinarySearchTree.prototype.prettyPrint = function (printData, spacing) {
|
||||
spacing = spacing || "";
|
||||
|
||||
console.log(spacing + "* " + this.key);
|
||||
if (printData) { console.log(spacing + "* " + this.data); }
|
||||
|
||||
if (!this.left && !this.right) { return; }
|
||||
|
||||
if (this.left) {
|
||||
this.left.prettyPrint(printData, spacing + " ");
|
||||
} else {
|
||||
console.log(spacing + " *");
|
||||
}
|
||||
if (this.right) {
|
||||
this.right.prettyPrint(printData, spacing + " ");
|
||||
} else {
|
||||
console.log(spacing + " *");
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
// Interface
|
||||
module.exports = BinarySearchTree;
|
@ -0,0 +1,38 @@
|
||||
/**
|
||||
* Return an array with the numbers from 0 to n-1, in a random order
|
||||
*/
|
||||
function getRandomArray (n) {
|
||||
var res, next;
|
||||
|
||||
if (n === 0) { return []; }
|
||||
if (n === 1) { return [0]; }
|
||||
|
||||
res = getRandomArray(n - 1);
|
||||
next = Math.floor(Math.random() * n);
|
||||
res.splice(next, 0, n - 1); // Add n-1 at a random position in the array
|
||||
|
||||
return res;
|
||||
};
|
||||
module.exports.getRandomArray = getRandomArray;
|
||||
|
||||
|
||||
/*
|
||||
* Default compareKeys function will work for numbers, strings and dates
|
||||
*/
|
||||
function defaultCompareKeysFunction (a, b) {
|
||||
if (a < b) { return -1; }
|
||||
if (a > b) { return 1; }
|
||||
if (a === b) { return 0; }
|
||||
|
||||
throw { message: "Couldn't compare elements", a: a, b: b };
|
||||
}
|
||||
module.exports.defaultCompareKeysFunction = defaultCompareKeysFunction;
|
||||
|
||||
|
||||
/**
|
||||
* Check whether two values are equal (used in non-unique deletion)
|
||||
*/
|
||||
function defaultCheckValueEquality (a, b) {
|
||||
return a === b;
|
||||
}
|
||||
module.exports.defaultCheckValueEquality = defaultCheckValueEquality;
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
npm-debug.log
|
@ -0,0 +1,5 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.8
|
||||
- 0.9
|
@ -0,0 +1,21 @@
|
||||
Copyright 2010 James Halliday (mail@substack.net)
|
||||
|
||||
This project is free software released under the MIT/X11 license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
@ -0,0 +1,6 @@
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
@ -0,0 +1,82 @@
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
|
||||
module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
|
||||
|
||||
function mkdirP (p, mode, f, made) {
|
||||
if (typeof mode === 'function' || mode === undefined) {
|
||||
f = mode;
|
||||
mode = 0777 & (~process.umask());
|
||||
}
|
||||
if (!made) made = null;
|
||||
|
||||
var cb = f || function () {};
|
||||
if (typeof mode === 'string') mode = parseInt(mode, 8);
|
||||
p = path.resolve(p);
|
||||
|
||||
fs.mkdir(p, mode, function (er) {
|
||||
if (!er) {
|
||||
made = made || p;
|
||||
return cb(null, made);
|
||||
}
|
||||
switch (er.code) {
|
||||
case 'ENOENT':
|
||||
mkdirP(path.dirname(p), mode, function (er, made) {
|
||||
if (er) cb(er, made);
|
||||
else mkdirP(p, mode, cb, made);
|
||||
});
|
||||
break;
|
||||
|
||||
// In the case of any other error, just see if there's a dir
|
||||
// there already. If so, then hooray! If not, then something
|
||||
// is borked.
|
||||
default:
|
||||
fs.stat(p, function (er2, stat) {
|
||||
// if the stat fails, then that's super weird.
|
||||
// let the original error be the failure reason.
|
||||
if (er2 || !stat.isDirectory()) cb(er, made)
|
||||
else cb(null, made);
|
||||
});
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
mkdirP.sync = function sync (p, mode, made) {
|
||||
if (mode === undefined) {
|
||||
mode = 0777 & (~process.umask());
|
||||
}
|
||||
if (!made) made = null;
|
||||
|
||||
if (typeof mode === 'string') mode = parseInt(mode, 8);
|
||||
p = path.resolve(p);
|
||||
|
||||
try {
|
||||
fs.mkdirSync(p, mode);
|
||||
made = made || p;
|
||||
}
|
||||
catch (err0) {
|
||||
switch (err0.code) {
|
||||
case 'ENOENT' :
|
||||
made = sync(path.dirname(p), mode, made);
|
||||
sync(p, mode, made);
|
||||
break;
|
||||
|
||||
// In the case of any other error, just see if there's a dir
|
||||
// there already. If so, then hooray! If not, then something
|
||||
// is borked.
|
||||
default:
|
||||
var stat;
|
||||
try {
|
||||
stat = fs.statSync(p);
|
||||
}
|
||||
catch (err1) {
|
||||
throw err0;
|
||||
}
|
||||
if (!stat.isDirectory()) throw err0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return made;
|
||||
};
|
@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "mkdirp",
|
||||
"description": "Recursively mkdir, like `mkdir -p`",
|
||||
"version": "0.3.5",
|
||||
"author": {
|
||||
"name": "James Halliday",
|
||||
"email": "mail@substack.net",
|
||||
"url": "http://substack.net"
|
||||
},
|
||||
"main": "./index",
|
||||
"keywords": [
|
||||
"mkdir",
|
||||
"directory"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/substack/node-mkdirp.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "~0.4.0"
|
||||
},
|
||||
"license": "MIT",
|
||||
"readme": "# mkdirp\n\nLike `mkdir -p`, but in node.js!\n\n[](http://travis-ci.org/substack/node-mkdirp)\n\n# example\n\n## pow.js\n\n```js\nvar mkdirp = require('mkdirp');\n \nmkdirp('/tmp/foo/bar/baz', function (err) {\n if (err) console.error(err)\n else console.log('pow!')\n});\n```\n\nOutput\n\n```\npow!\n```\n\nAnd now /tmp/foo/bar/baz exists, huzzah!\n\n# methods\n\n```js\nvar mkdirp = require('mkdirp');\n```\n\n## mkdirp(dir, mode, cb)\n\nCreate a new directory and any necessary subdirectories at `dir` with octal\npermission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\n`cb(err, made)` fires with the error or the first directory `made`\nthat had to be created, if any.\n\n## mkdirp.sync(dir, mode)\n\nSynchronously create a new directory and any necessary subdirectories at `dir`\nwith octal permission string `mode`.\n\nIf `mode` isn't specified, it defaults to `0777 & (~process.umask())`.\n\nReturns the first directory that had to be created, if any.\n\n# install\n\nWith [npm](http://npmjs.org) do:\n\n```\nnpm install mkdirp\n```\n\n# license\n\nMIT\n",
|
||||
"readmeFilename": "readme.markdown",
|
||||
"bugs": {
|
||||
"url": "https://github.com/substack/node-mkdirp/issues"
|
||||
},
|
||||
"_id": "mkdirp@0.3.5",
|
||||
"dist": {
|
||||
"shasum": "67e5c12bd18b5d68b5f56518ff1ea28068eecb4f"
|
||||
},
|
||||
"_from": "mkdirp@~0.3.5",
|
||||
"_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.5.tgz"
|
||||
}
|
@ -0,0 +1,63 @@
|
||||
# mkdirp
|
||||
|
||||
Like `mkdir -p`, but in node.js!
|
||||
|
||||
[](http://travis-ci.org/substack/node-mkdirp)
|
||||
|
||||
# example
|
||||
|
||||
## pow.js
|
||||
|
||||
```js
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
||||
```
|
||||
|
||||
Output
|
||||
|
||||
```
|
||||
pow!
|
||||
```
|
||||
|
||||
And now /tmp/foo/bar/baz exists, huzzah!
|
||||
|
||||
# methods
|
||||
|
||||
```js
|
||||
var mkdirp = require('mkdirp');
|
||||
```
|
||||
|
||||
## mkdirp(dir, mode, cb)
|
||||
|
||||
Create a new directory and any necessary subdirectories at `dir` with octal
|
||||
permission string `mode`.
|
||||
|
||||
If `mode` isn't specified, it defaults to `0777 & (~process.umask())`.
|
||||
|
||||
`cb(err, made)` fires with the error or the first directory `made`
|
||||
that had to be created, if any.
|
||||
|
||||
## mkdirp.sync(dir, mode)
|
||||
|
||||
Synchronously create a new directory and any necessary subdirectories at `dir`
|
||||
with octal permission string `mode`.
|
||||
|
||||
If `mode` isn't specified, it defaults to `0777 & (~process.umask())`.
|
||||
|
||||
Returns the first directory that had to be created, if any.
|
||||
|
||||
# install
|
||||
|
||||
With [npm](http://npmjs.org) do:
|
||||
|
||||
```
|
||||
npm install mkdirp
|
||||
```
|
||||
|
||||
# license
|
||||
|
||||
MIT
|
@ -0,0 +1,38 @@
|
||||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
|
||||
var file = ps.join('/');
|
||||
|
||||
test('chmod-pre', function (t) {
|
||||
var mode = 0744
|
||||
mkdirp(file, mode, function (er) {
|
||||
t.ifError(er, 'should not error');
|
||||
fs.stat(file, function (er, stat) {
|
||||
t.ifError(er, 'should exist');
|
||||
t.ok(stat && stat.isDirectory(), 'should be directory');
|
||||
t.equal(stat && stat.mode & 0777, mode, 'should be 0744');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('chmod', function (t) {
|
||||
var mode = 0755
|
||||
mkdirp(file, mode, function (er) {
|
||||
t.ifError(er, 'should not error');
|
||||
fs.stat(file, function (er, stat) {
|
||||
t.ifError(er, 'should exist');
|
||||
t.ok(stat && stat.isDirectory(), 'should be directory');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,37 @@
|
||||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
|
||||
var file = ps.join('/');
|
||||
|
||||
// a file in the way
|
||||
var itw = ps.slice(0, 3).join('/');
|
||||
|
||||
|
||||
test('clobber-pre', function (t) {
|
||||
console.error("about to write to "+itw)
|
||||
fs.writeFileSync(itw, 'I AM IN THE WAY, THE TRUTH, AND THE LIGHT.');
|
||||
|
||||
fs.stat(itw, function (er, stat) {
|
||||
t.ifError(er)
|
||||
t.ok(stat && stat.isFile(), 'should be file')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
test('clobber', function (t) {
|
||||
t.plan(2);
|
||||
mkdirp(file, 0755, function (err) {
|
||||
t.ok(err);
|
||||
t.equal(err.code, 'ENOTDIR');
|
||||
t.end();
|
||||
});
|
||||
});
|
@ -0,0 +1,28 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('woo', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
@ -0,0 +1,32 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('async perm', function (t) {
|
||||
t.plan(2);
|
||||
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16);
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
test('async root perm', function (t) {
|
||||
mkdirp('/tmp', 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
t.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
@ -0,0 +1,39 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('sync perm', function (t) {
|
||||
t.plan(2);
|
||||
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16) + '.json';
|
||||
|
||||
mkdirp.sync(file, 0755);
|
||||
path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
test('sync root perm', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var file = '/tmp';
|
||||
mkdirp.sync(file, 0755);
|
||||
path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
});
|
||||
});
|
@ -0,0 +1,41 @@
|
||||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('race', function (t) {
|
||||
t.plan(4);
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
var file = ps.join('/');
|
||||
|
||||
var res = 2;
|
||||
mk(file, function () {
|
||||
if (--res === 0) t.end();
|
||||
});
|
||||
|
||||
mk(file, function () {
|
||||
if (--res === 0) t.end();
|
||||
});
|
||||
|
||||
function mk (file, cb) {
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
if (cb) cb();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
@ -0,0 +1,32 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('rel', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var cwd = process.cwd();
|
||||
process.chdir('/tmp');
|
||||
|
||||
var file = [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
process.chdir(cwd);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
@ -0,0 +1,25 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('return value', function (t) {
|
||||
t.plan(4);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
// should return the first dir created.
|
||||
// By this point, it would be profoundly surprising if /tmp didn't
|
||||
// already exist, since every other test makes things in there.
|
||||
mkdirp(file, function (err, made) {
|
||||
t.ifError(err);
|
||||
t.equal(made, '/tmp/' + x);
|
||||
mkdirp(file, function (err, made) {
|
||||
t.ifError(err);
|
||||
t.equal(made, null);
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,24 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('return value', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
// should return the first dir created.
|
||||
// By this point, it would be profoundly surprising if /tmp didn't
|
||||
// already exist, since every other test makes things in there.
|
||||
// Note that this will throw on failure, which will fail the test.
|
||||
var made = mkdirp.sync(file);
|
||||
t.equal(made, '/tmp/' + x);
|
||||
|
||||
// making the same file again should have no effect.
|
||||
made = mkdirp.sync(file);
|
||||
t.equal(made, null);
|
||||
});
|
@ -0,0 +1,18 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('root', function (t) {
|
||||
// '/' on unix, 'c:/' on windows.
|
||||
var file = path.resolve('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) throw err
|
||||
fs.stat(file, function (er, stat) {
|
||||
if (er) throw er
|
||||
t.ok(stat.isDirectory(), 'target is a directory');
|
||||
t.end();
|
||||
})
|
||||
});
|
||||
});
|
@ -0,0 +1,32 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('sync', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
try {
|
||||
mkdirp.sync(file, 0755);
|
||||
} catch (err) {
|
||||
t.fail(err);
|
||||
return t.end();
|
||||
}
|
||||
|
||||
path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,28 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('implicit mode from umask', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0777 & (~process.umask()));
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
@ -0,0 +1,32 @@
|
||||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('umask sync modes', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
try {
|
||||
mkdirp.sync(file);
|
||||
} catch (err) {
|
||||
t.fail(err);
|
||||
return t.end();
|
||||
}
|
||||
|
||||
path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, (0777 & (~process.umask())));
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,4 @@
|
||||
test/
|
||||
Rakefile
|
||||
docs/
|
||||
raw/
|
@ -0,0 +1,5 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.8
|
||||
notifications:
|
||||
email: false
|
@ -0,0 +1 @@
|
||||
underscorejs.org
|
@ -0,0 +1,9 @@
|
||||
## How to contribute to Underscore.js
|
||||
|
||||
* Before you open a ticket or send a pull request, [search](https://github.com/documentcloud/underscore/issues) for previous discussions about the same feature or issue. Add to the earlier ticket if you find one.
|
||||
|
||||
* Before sending a pull request for a feature, be sure to have [tests](http://underscorejs.org/test/).
|
||||
|
||||
* Use the same coding style as the rest of the [codebase](https://github.com/documentcloud/underscore/blob/master/underscore.js).
|
||||
|
||||
* In your pull request, do not add documentation or re-build the minified `underscore-min.js` file. We'll do those things before cutting a new release.
|
@ -0,0 +1,22 @@
|
||||
Copyright (c) 2009-2013 Jeremy Ashkenas, DocumentCloud
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
@ -0,0 +1,19 @@
|
||||
__
|
||||
/\ \ __
|
||||
__ __ ___ \_\ \ __ _ __ ____ ___ ___ _ __ __ /\_\ ____
|
||||
/\ \/\ \ /' _ `\ /'_ \ /'__`\/\ __\/ ,__\ / ___\ / __`\/\ __\/'__`\ \/\ \ /',__\
|
||||
\ \ \_\ \/\ \/\ \/\ \ \ \/\ __/\ \ \//\__, `\/\ \__//\ \ \ \ \ \//\ __/ __ \ \ \/\__, `\
|
||||
\ \____/\ \_\ \_\ \___,_\ \____\\ \_\\/\____/\ \____\ \____/\ \_\\ \____\/\_\ _\ \ \/\____/
|
||||
\/___/ \/_/\/_/\/__,_ /\/____/ \/_/ \/___/ \/____/\/___/ \/_/ \/____/\/_//\ \_\ \/___/
|
||||
\ \____/
|
||||
\/___/
|
||||
|
||||
Underscore.js is a utility-belt library for JavaScript that provides
|
||||
support for the usual functional suspects (each, map, reduce, filter...)
|
||||
without extending any core JavaScript objects.
|
||||
|
||||
For Docs, License, Tests, and pre-packed downloads, see:
|
||||
http://underscorejs.org
|
||||
|
||||
Many thanks to our contributors:
|
||||
https://github.com/documentcloud/underscore/contributors
|
After Width: | Height: | Size: 1.4 KiB |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1 @@
|
||||
module.exports = require('./underscore');
|
@ -0,0 +1,39 @@
|
||||
{
|
||||
"name": "underscore",
|
||||
"description": "JavaScript's functional programming helper library.",
|
||||
"homepage": "http://underscorejs.org",
|
||||
"keywords": [
|
||||
"util",
|
||||
"functional",
|
||||
"server",
|
||||
"client",
|
||||
"browser"
|
||||
],
|
||||
"author": {
|
||||
"name": "Jeremy Ashkenas",
|
||||
"email": "jeremy@documentcloud.org"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/documentcloud/underscore.git"
|
||||
},
|
||||
"main": "underscore.js",
|
||||
"version": "1.4.4",
|
||||
"devDependencies": {
|
||||
"phantomjs": "0.2.2"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "phantomjs test/vendor/runner.js test/index.html?noglobals=true"
|
||||
},
|
||||
"readme": " __\n /\\ \\ __\n __ __ ___ \\_\\ \\ __ _ __ ____ ___ ___ _ __ __ /\\_\\ ____\n /\\ \\/\\ \\ /' _ `\\ /'_ \\ /'__`\\/\\ __\\/ ,__\\ / ___\\ / __`\\/\\ __\\/'__`\\ \\/\\ \\ /',__\\\n \\ \\ \\_\\ \\/\\ \\/\\ \\/\\ \\ \\ \\/\\ __/\\ \\ \\//\\__, `\\/\\ \\__//\\ \\ \\ \\ \\ \\//\\ __/ __ \\ \\ \\/\\__, `\\\n \\ \\____/\\ \\_\\ \\_\\ \\___,_\\ \\____\\\\ \\_\\\\/\\____/\\ \\____\\ \\____/\\ \\_\\\\ \\____\\/\\_\\ _\\ \\ \\/\\____/\n \\/___/ \\/_/\\/_/\\/__,_ /\\/____/ \\/_/ \\/___/ \\/____/\\/___/ \\/_/ \\/____/\\/_//\\ \\_\\ \\/___/\n \\ \\____/\n \\/___/\n\nUnderscore.js is a utility-belt library for JavaScript that provides\nsupport for the usual functional suspects (each, map, reduce, filter...)\nwithout extending any core JavaScript objects.\n\nFor Docs, License, Tests, and pre-packed downloads, see:\nhttp://underscorejs.org\n\nMany thanks to our contributors:\nhttps://github.com/documentcloud/underscore/contributors\n",
|
||||
"readmeFilename": "README.md",
|
||||
"bugs": {
|
||||
"url": "https://github.com/documentcloud/underscore/issues"
|
||||
},
|
||||
"_id": "underscore@1.4.4",
|
||||
"dist": {
|
||||
"shasum": "f5e657a6e6c96f20810ac45ff0021b99644c35bb"
|
||||
},
|
||||
"_from": "underscore@~1.4.4",
|
||||
"_resolved": "https://registry.npmjs.org/underscore/-/underscore-1.4.4.tgz"
|
||||
}
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
Binary file not shown.
@ -0,0 +1,786 @@
|
||||
var should = require('chai').should()
|
||||
, assert = require('chai').assert
|
||||
, testDb = 'workspace/test.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, _ = require('underscore')
|
||||
, async = require('async')
|
||||
, model = require('../lib/model')
|
||||
, Datastore = require('../lib/datastore')
|
||||
, Persistence = require('../lib/persistence')
|
||||
, Cursor = require('../lib/cursor')
|
||||
;
|
||||
|
||||
|
||||
describe('Cursor', function () {
|
||||
var d;
|
||||
|
||||
beforeEach(function (done) {
|
||||
d = new Datastore({ filename: testDb });
|
||||
d.filename.should.equal(testDb);
|
||||
d.inMemoryOnly.should.equal(false);
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(testDb), function () {
|
||||
fs.exists(testDb, function (exists) {
|
||||
if (exists) {
|
||||
fs.unlink(testDb, cb);
|
||||
} else { return cb(); }
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
d.getAllData().length.should.equal(0);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
describe('Without sorting', function () {
|
||||
|
||||
beforeEach(function (done) {
|
||||
d.insert({ age: 5 }, function (err) {
|
||||
d.insert({ age: 57 }, function (err) {
|
||||
d.insert({ age: 52 }, function (err) {
|
||||
d.insert({ age: 23 }, function (err) {
|
||||
d.insert({ age: 89 }, function (err) {
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Without query, an empty query or a simple query and no skip or limit', function (done) {
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
_.filter(docs, function(doc) { return doc.age === 5; })[0].age.should.equal(5);
|
||||
_.filter(docs, function(doc) { return doc.age === 57; })[0].age.should.equal(57);
|
||||
_.filter(docs, function(doc) { return doc.age === 52; })[0].age.should.equal(52);
|
||||
_.filter(docs, function(doc) { return doc.age === 23; })[0].age.should.equal(23);
|
||||
_.filter(docs, function(doc) { return doc.age === 89; })[0].age.should.equal(89);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
_.filter(docs, function(doc) { return doc.age === 5; })[0].age.should.equal(5);
|
||||
_.filter(docs, function(doc) { return doc.age === 57; })[0].age.should.equal(57);
|
||||
_.filter(docs, function(doc) { return doc.age === 52; })[0].age.should.equal(52);
|
||||
_.filter(docs, function(doc) { return doc.age === 23; })[0].age.should.equal(23);
|
||||
_.filter(docs, function(doc) { return doc.age === 89; })[0].age.should.equal(89);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, { age: { $gt: 23 } });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(3);
|
||||
_.filter(docs, function(doc) { return doc.age === 57; })[0].age.should.equal(57);
|
||||
_.filter(docs, function(doc) { return doc.age === 52; })[0].age.should.equal(52);
|
||||
_.filter(docs, function(doc) { return doc.age === 89; })[0].age.should.equal(89);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('With an empty collection', function (done) {
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function(err) { return cb(err); })
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(0);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('With a limit', function (done) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.limit(3);
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(3);
|
||||
// No way to predict which results are returned of course ...
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('With a skip', function (done) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.skip(2).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(3);
|
||||
// No way to predict which results are returned of course ...
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('With a limit and a skip and method chaining', function (done) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.limit(4).skip(3); // Only way to know that the right number of results was skipped is if limit + skip > number of results
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(2);
|
||||
// No way to predict which results are returned of course ...
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
}); // ===== End of 'Without sorting' =====
|
||||
|
||||
|
||||
describe('Sorting of the results', function () {
|
||||
|
||||
beforeEach(function (done) {
|
||||
// We don't know the order in which docs wil be inserted but we ensure correctness by testing both sort orders
|
||||
d.insert({ age: 5 }, function (err) {
|
||||
d.insert({ age: 57 }, function (err) {
|
||||
d.insert({ age: 52 }, function (err) {
|
||||
d.insert({ age: 23 }, function (err) {
|
||||
d.insert({ age: 89 }, function (err) {
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Using one sort', function (done) {
|
||||
var cursor, i;
|
||||
|
||||
cursor = new Cursor(d, {});
|
||||
cursor.sort({ age: 1 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
// Results are in ascending order
|
||||
for (i = 0; i < docs.length - 1; i += 1) {
|
||||
assert(docs[i].age < docs[i + 1].age)
|
||||
}
|
||||
|
||||
cursor.sort({ age: -1 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
// Results are in descending order
|
||||
for (i = 0; i < docs.length - 1; i += 1) {
|
||||
assert(docs[i].age > docs[i + 1].age)
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('With an empty collection', function (done) {
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function(err) { return cb(err); })
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(0);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Ability to chain sorting and exec', function (done) {
|
||||
var i;
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
// Results are in ascending order
|
||||
for (i = 0; i < docs.length - 1; i += 1) {
|
||||
assert(docs[i].age < docs[i + 1].age)
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: -1 }).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
// Results are in descending order
|
||||
for (i = 0; i < docs.length - 1; i += 1) {
|
||||
assert(docs[i].age > docs[i + 1].age)
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Using limit and sort', function (done) {
|
||||
var i;
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).limit(3).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(3);
|
||||
docs[0].age.should.equal(5);
|
||||
docs[1].age.should.equal(23);
|
||||
docs[2].age.should.equal(52);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: -1 }).limit(2).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(2);
|
||||
docs[0].age.should.equal(89);
|
||||
docs[1].age.should.equal(57);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Using a limit higher than total number of docs shouldnt cause an error', function (done) {
|
||||
var i;
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).limit(7).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
docs[0].age.should.equal(5);
|
||||
docs[1].age.should.equal(23);
|
||||
docs[2].age.should.equal(52);
|
||||
docs[3].age.should.equal(57);
|
||||
docs[4].age.should.equal(89);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Using limit and skip with sort', function (done) {
|
||||
var i;
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).limit(1).skip(2).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(1);
|
||||
docs[0].age.should.equal(52);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).limit(3).skip(1).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(3);
|
||||
docs[0].age.should.equal(23);
|
||||
docs[1].age.should.equal(52);
|
||||
docs[2].age.should.equal(57);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: -1 }).limit(2).skip(2).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(2);
|
||||
docs[0].age.should.equal(52);
|
||||
docs[1].age.should.equal(23);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Using too big a limit and a skip with sort', function (done) {
|
||||
var i;
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).limit(8).skip(2).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(3);
|
||||
docs[0].age.should.equal(52);
|
||||
docs[1].age.should.equal(57);
|
||||
docs[2].age.should.equal(89);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Using too big a skip with sort should return no result', function (done) {
|
||||
var i;
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).skip(5).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(0);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).skip(7).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(0);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).limit(3).skip(7).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(0);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d);
|
||||
cursor.sort({ age: 1 }).limit(6).skip(7).exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(0);
|
||||
cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Sorting strings', function (done) {
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function (err) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
d.insert({ name: 'jako'}, function () {
|
||||
d.insert({ name: 'jakeb' }, function () {
|
||||
d.insert({ name: 'sue' }, function () {
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ name: 1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(3);
|
||||
docs[0].name.should.equal('jakeb');
|
||||
docs[1].name.should.equal('jako');
|
||||
docs[2].name.should.equal('sue');
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ name: -1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(3);
|
||||
docs[0].name.should.equal('sue');
|
||||
docs[1].name.should.equal('jako');
|
||||
docs[2].name.should.equal('jakeb');
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Sorting nested fields with dates', function (done) {
|
||||
var doc1, doc2, doc3;
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function (err) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
d.insert({ event: { recorded: new Date(400) } }, function (err, _doc1) {
|
||||
doc1 = _doc1;
|
||||
d.insert({ event: { recorded: new Date(60000) } }, function (err, _doc2) {
|
||||
doc2 = _doc2;
|
||||
d.insert({ event: { recorded: new Date(32) } }, function (err, _doc3) {
|
||||
doc3 = _doc3;
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ "event.recorded": 1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(3);
|
||||
docs[0]._id.should.equal(doc3._id);
|
||||
docs[1]._id.should.equal(doc1._id);
|
||||
docs[2]._id.should.equal(doc2._id);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ "event.recorded": -1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(3);
|
||||
docs[0]._id.should.equal(doc2._id);
|
||||
docs[1]._id.should.equal(doc1._id);
|
||||
docs[2]._id.should.equal(doc3._id);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Sorting when some fields are undefined', function (done) {
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function (err) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
d.insert({ name: 'jako', other: 2 }, function () {
|
||||
d.insert({ name: 'jakeb', other: 3 }, function () {
|
||||
d.insert({ name: 'sue' }, function () {
|
||||
d.insert({ name: 'henry', other: 4 }, function () {
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ other: 1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(4);
|
||||
docs[0].name.should.equal('sue');
|
||||
assert.isUndefined(docs[0].other);
|
||||
docs[1].name.should.equal('jako');
|
||||
docs[1].other.should.equal(2);
|
||||
docs[2].name.should.equal('jakeb');
|
||||
docs[2].other.should.equal(3);
|
||||
docs[3].name.should.equal('henry');
|
||||
docs[3].other.should.equal(4);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, { name: { $in: [ 'suzy', 'jakeb', 'jako' ] } });
|
||||
cursor.sort({ other: -1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(2);
|
||||
docs[0].name.should.equal('jakeb');
|
||||
docs[0].other.should.equal(3);
|
||||
docs[1].name.should.equal('jako');
|
||||
docs[1].other.should.equal(2);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Sorting when all fields are undefined', function (done) {
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function (err) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
d.insert({ name: 'jako'}, function () {
|
||||
d.insert({ name: 'jakeb' }, function () {
|
||||
d.insert({ name: 'sue' }, function () {
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ other: 1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(3);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, { name: { $in: [ 'sue', 'jakeb', 'jakob' ] } });
|
||||
cursor.sort({ other: -1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(2);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Multiple consecutive sorts', function(done) {
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function (err) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
d.insert({ name: 'jako', age: 43, nid: 1 }, function () {
|
||||
d.insert({ name: 'jakeb', age: 43, nid: 2 }, function () {
|
||||
d.insert({ name: 'sue', age: 12, nid: 3 }, function () {
|
||||
d.insert({ name: 'zoe', age: 23, nid: 4 }, function () {
|
||||
d.insert({ name: 'jako', age: 35, nid: 5 }, function () {
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ name: 1, age: -1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(5);
|
||||
|
||||
docs[0].nid.should.equal(2);
|
||||
docs[1].nid.should.equal(1);
|
||||
docs[2].nid.should.equal(5);
|
||||
docs[3].nid.should.equal(3);
|
||||
docs[4].nid.should.equal(4);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ name: 1, age: 1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(5);
|
||||
|
||||
docs[0].nid.should.equal(2);
|
||||
docs[1].nid.should.equal(5);
|
||||
docs[2].nid.should.equal(1);
|
||||
docs[3].nid.should.equal(3);
|
||||
docs[4].nid.should.equal(4);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ age: 1, name: 1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(5);
|
||||
|
||||
docs[0].nid.should.equal(3);
|
||||
docs[1].nid.should.equal(4);
|
||||
docs[2].nid.should.equal(5);
|
||||
docs[3].nid.should.equal(2);
|
||||
docs[4].nid.should.equal(1);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ age: 1, name: -1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(5);
|
||||
|
||||
docs[0].nid.should.equal(3);
|
||||
docs[1].nid.should.equal(4);
|
||||
docs[2].nid.should.equal(5);
|
||||
docs[3].nid.should.equal(1);
|
||||
docs[4].nid.should.equal(2);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done); });
|
||||
|
||||
it('Similar data, multiple consecutive sorts', function(done) {
|
||||
var i, j, id
|
||||
, companies = [ 'acme', 'milkman', 'zoinks' ]
|
||||
, entities = []
|
||||
;
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
d.remove({}, { multi: true }, function (err) {
|
||||
if (err) { return cb(err); }
|
||||
|
||||
id = 1;
|
||||
for (i = 0; i < companies.length; i++) {
|
||||
for (j = 5; j <= 100; j += 5) {
|
||||
entities.push({
|
||||
company: companies[i],
|
||||
cost: j,
|
||||
nid: id
|
||||
});
|
||||
id++;
|
||||
}
|
||||
}
|
||||
|
||||
async.each(entities, function(entity, callback) {
|
||||
d.insert(entity, function() {
|
||||
callback();
|
||||
});
|
||||
}, function(err) {
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ company: 1, cost: 1 }).exec(function (err, docs) {
|
||||
docs.length.should.equal(60);
|
||||
|
||||
for (var i = 0; i < docs.length; i++) {
|
||||
docs[i].nid.should.equal(i+1);
|
||||
};
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done); });
|
||||
|
||||
}); // ===== End of 'Sorting' =====
|
||||
|
||||
|
||||
describe('Projections', function () {
|
||||
var doc1, doc2, doc3, doc4, doc0;
|
||||
|
||||
|
||||
beforeEach(function (done) {
|
||||
// We don't know the order in which docs wil be inserted but we ensure correctness by testing both sort orders
|
||||
d.insert({ age: 5, name: 'Jo', planet: 'B' }, function (err, _doc0) {
|
||||
doc0 = _doc0;
|
||||
d.insert({ age: 57, name: 'Louis', planet: 'R' }, function (err, _doc1) {
|
||||
doc1 = _doc1;
|
||||
d.insert({ age: 52, name: 'Grafitti', planet: 'C' }, function (err, _doc2) {
|
||||
doc2 = _doc2;
|
||||
d.insert({ age: 23, name: 'LM', planet: 'S' }, function (err, _doc3) {
|
||||
doc3 = _doc3;
|
||||
d.insert({ age: 89, planet: 'Earth' }, function (err, _doc4) {
|
||||
doc4 = _doc4;
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Takes all results if no projection or empty object given', function (done) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ age: 1 }); // For easier finding
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
assert.deepEqual(docs[0], doc0);
|
||||
assert.deepEqual(docs[1], doc3);
|
||||
assert.deepEqual(docs[2], doc2);
|
||||
assert.deepEqual(docs[3], doc1);
|
||||
assert.deepEqual(docs[4], doc4);
|
||||
|
||||
cursor.projection({});
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
assert.deepEqual(docs[0], doc0);
|
||||
assert.deepEqual(docs[1], doc3);
|
||||
assert.deepEqual(docs[2], doc2);
|
||||
assert.deepEqual(docs[3], doc1);
|
||||
assert.deepEqual(docs[4], doc4);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Can take only the expected fields', function (done) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ age: 1 }); // For easier finding
|
||||
cursor.projection({ age: 1, name: 1 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
// Takes the _id by default
|
||||
assert.deepEqual(docs[0], { age: 5, name: 'Jo', _id: doc0._id });
|
||||
assert.deepEqual(docs[1], { age: 23, name: 'LM', _id: doc3._id });
|
||||
assert.deepEqual(docs[2], { age: 52, name: 'Grafitti', _id: doc2._id });
|
||||
assert.deepEqual(docs[3], { age: 57, name: 'Louis', _id: doc1._id });
|
||||
assert.deepEqual(docs[4], { age: 89, _id: doc4._id }); // No problems if one field to take doesn't exist
|
||||
|
||||
cursor.projection({ age: 1, name: 1, _id: 0 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
assert.deepEqual(docs[0], { age: 5, name: 'Jo' });
|
||||
assert.deepEqual(docs[1], { age: 23, name: 'LM' });
|
||||
assert.deepEqual(docs[2], { age: 52, name: 'Grafitti' });
|
||||
assert.deepEqual(docs[3], { age: 57, name: 'Louis' });
|
||||
assert.deepEqual(docs[4], { age: 89 }); // No problems if one field to take doesn't exist
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Can omit only the expected fields', function (done) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ age: 1 }); // For easier finding
|
||||
cursor.projection({ age: 0, name: 0 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
// Takes the _id by default
|
||||
assert.deepEqual(docs[0], { planet: 'B', _id: doc0._id });
|
||||
assert.deepEqual(docs[1], { planet: 'S', _id: doc3._id });
|
||||
assert.deepEqual(docs[2], { planet: 'C', _id: doc2._id });
|
||||
assert.deepEqual(docs[3], { planet: 'R', _id: doc1._id });
|
||||
assert.deepEqual(docs[4], { planet: 'Earth', _id: doc4._id });
|
||||
|
||||
cursor.projection({ age: 0, name: 0, _id: 0 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(5);
|
||||
assert.deepEqual(docs[0], { planet: 'B' });
|
||||
assert.deepEqual(docs[1], { planet: 'S' });
|
||||
assert.deepEqual(docs[2], { planet: 'C' });
|
||||
assert.deepEqual(docs[3], { planet: 'R' });
|
||||
assert.deepEqual(docs[4], { planet: 'Earth' });
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Cannot use both modes except for _id', function (done) {
|
||||
var cursor = new Cursor(d, {});
|
||||
cursor.sort({ age: 1 }); // For easier finding
|
||||
cursor.projection({ age: 1, name: 0 });
|
||||
cursor.exec(function (err, docs) {
|
||||
assert.isNotNull(err);
|
||||
assert.isUndefined(docs);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
}); // ==== End of 'Projections' ====
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,26 @@
|
||||
var should = require('chai').should()
|
||||
, assert = require('chai').assert
|
||||
, customUtils = require('../lib/customUtils')
|
||||
, fs = require('fs')
|
||||
;
|
||||
|
||||
|
||||
describe('customUtils', function () {
|
||||
|
||||
describe('uid', function () {
|
||||
|
||||
it('Generates a string of the expected length', function () {
|
||||
customUtils.uid(3).length.should.equal(3);
|
||||
customUtils.uid(16).length.should.equal(16);
|
||||
customUtils.uid(42).length.should.equal(42);
|
||||
customUtils.uid(1000).length.should.equal(1000);
|
||||
});
|
||||
|
||||
// Very small probability of conflict
|
||||
it('Generated uids should not be the same', function () {
|
||||
customUtils.uid(56).should.not.equal(customUtils.uid(56));
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,163 @@
|
||||
var should = require('chai').should()
|
||||
, assert = require('chai').assert
|
||||
, testDb = 'workspace/test.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, _ = require('underscore')
|
||||
, async = require('async')
|
||||
, model = require('../lib/model')
|
||||
, Datastore = require('../lib/datastore')
|
||||
, Persistence = require('../lib/persistence')
|
||||
;
|
||||
|
||||
|
||||
// Test that even if a callback throws an exception, the next DB operations will still be executed
|
||||
// We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends
|
||||
function testThrowInCallback (d, done) {
|
||||
var currentUncaughtExceptionHandlers = process.listeners('uncaughtException');
|
||||
|
||||
process.removeAllListeners('uncaughtException');
|
||||
|
||||
process.on('uncaughtException', function (err) {
|
||||
// Do nothing with the error which is only there to test we stay on track
|
||||
});
|
||||
|
||||
d.find({}, function (err) {
|
||||
process.nextTick(function () {
|
||||
d.insert({ bar: 1 }, function (err) {
|
||||
for (var i = 0; i < currentUncaughtExceptionHandlers.length; i += 1) {
|
||||
process.on('uncaughtException', currentUncaughtExceptionHandlers[i]);
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
throw 'Some error';
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Test that operations are executed in the right order
|
||||
// We prevent Mocha from catching the exception we throw on purpose by remembering all current handlers, remove them and register them back after test ends
|
||||
function testRightOrder (d, done) {
|
||||
var currentUncaughtExceptionHandlers = process.listeners('uncaughtException');
|
||||
|
||||
process.removeAllListeners('uncaughtException');
|
||||
|
||||
process.on('uncaughtException', function (err) {
|
||||
// Do nothing with the error which is only there to test we stay on track
|
||||
});
|
||||
|
||||
d.find({}, function (err, docs) {
|
||||
docs.length.should.equal(0);
|
||||
|
||||
d.insert({ a: 1 }, function () {
|
||||
d.update({ a: 1 }, { a: 2 }, {}, function () {
|
||||
d.find({}, function (err, docs) {
|
||||
docs[0].a.should.equal(2);
|
||||
|
||||
process.nextTick(function () {
|
||||
d.update({ a: 2 }, { a: 3 }, {}, function () {
|
||||
d.find({}, function (err, docs) {
|
||||
docs[0].a.should.equal(3);
|
||||
done();
|
||||
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
throw 'Some error';
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Note: The following test does not have any assertion because it
|
||||
// is meant to address the deprecation warning:
|
||||
// (node) warning: Recursive process.nextTick detected. This will break in the next version of node. Please use setImmediate for recursive deferral.
|
||||
// see
|
||||
var testEventLoopStarvation = function(d, done){
|
||||
var times = 1001;
|
||||
var i = 0;
|
||||
while ( i <times) {
|
||||
i++;
|
||||
d.find({"bogus": "search"}, function (err, docs) {
|
||||
});
|
||||
}
|
||||
done();
|
||||
};
|
||||
|
||||
describe('Executor', function () {
|
||||
|
||||
describe('With persistent database', function () {
|
||||
var d;
|
||||
|
||||
beforeEach(function (done) {
|
||||
d = new Datastore({ filename: testDb });
|
||||
d.filename.should.equal(testDb);
|
||||
d.inMemoryOnly.should.equal(false);
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(testDb), function () {
|
||||
fs.exists(testDb, function (exists) {
|
||||
if (exists) {
|
||||
fs.unlink(testDb, cb);
|
||||
} else { return cb(); }
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
d.getAllData().length.should.equal(0);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('A throw in a callback doesnt prevent execution of next operations', function(done) {
|
||||
testThrowInCallback(d, done);
|
||||
});
|
||||
|
||||
it('Operations are executed in the right order', function(done) {
|
||||
testRightOrder(d, done);
|
||||
});
|
||||
|
||||
it('Does not starve event loop and raise warning when more than 1000 callbacks are in queue', function(done){
|
||||
testEventLoopStarvation(d, done);
|
||||
});
|
||||
|
||||
}); // ==== End of 'With persistent database' ====
|
||||
|
||||
|
||||
describe('With non persistent database', function () {
|
||||
var d;
|
||||
|
||||
beforeEach(function (done) {
|
||||
d = new Datastore({ inMemoryOnly: true });
|
||||
d.inMemoryOnly.should.equal(true);
|
||||
|
||||
d.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
d.getAllData().length.should.equal(0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('A throw in a callback doesnt prevent execution of next operations', function(done) {
|
||||
testThrowInCallback(d, done);
|
||||
});
|
||||
|
||||
it('Operations are executed in the right order', function(done) {
|
||||
testRightOrder(d, done);
|
||||
});
|
||||
|
||||
}); // ==== End of 'With non persistent database' ====
|
||||
|
||||
});
|
@ -0,0 +1,768 @@
|
||||
var Index = require('../lib/indexes')
|
||||
, customUtils = require('../lib/customUtils')
|
||||
, should = require('chai').should()
|
||||
, assert = require('chai').assert
|
||||
, _ = require('underscore')
|
||||
, async = require('async')
|
||||
, model = require('../lib/model')
|
||||
;
|
||||
|
||||
describe('Indexes', function () {
|
||||
|
||||
describe('Insertion', function () {
|
||||
|
||||
it('Can insert pointers to documents in the index correctly when they have the field', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
// The underlying BST now has 3 nodes which contain the docs where it's expected
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('hello'), [{ a: 5, tf: 'hello' }]);
|
||||
assert.deepEqual(idx.tree.search('world'), [{ a: 8, tf: 'world' }]);
|
||||
assert.deepEqual(idx.tree.search('bloup'), [{ a: 2, tf: 'bloup' }]);
|
||||
|
||||
// The nodes contain pointers to the actual documents
|
||||
idx.tree.search('world')[0].should.equal(doc2);
|
||||
idx.tree.search('bloup')[0].a = 42;
|
||||
doc3.a.should.equal(42);
|
||||
});
|
||||
|
||||
it('Inserting twice for the same fieldName in a unique index will result in an error thrown', function () {
|
||||
var idx = new Index({ fieldName: 'tf', unique: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.tree.getNumberOfKeys().should.equal(1);
|
||||
(function () { idx.insert(doc1); }).should.throw();
|
||||
});
|
||||
|
||||
it('Inserting twice for a fieldName the docs dont have with a unique index results in an error thrown', function () {
|
||||
var idx = new Index({ fieldName: 'nope', unique: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 5, tf: 'world' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.tree.getNumberOfKeys().should.equal(1);
|
||||
(function () { idx.insert(doc2); }).should.throw();
|
||||
});
|
||||
|
||||
it('Inserting twice for a fieldName the docs dont have with a unique and sparse index will not throw, since the docs will be non indexed', function () {
|
||||
var idx = new Index({ fieldName: 'nope', unique: true, sparse: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 5, tf: 'world' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.tree.getNumberOfKeys().should.equal(0); // Docs are not indexed
|
||||
});
|
||||
|
||||
it('Works with dot notation', function () {
|
||||
var idx = new Index({ fieldName: 'tf.nested' })
|
||||
, doc1 = { a: 5, tf: { nested: 'hello' } }
|
||||
, doc2 = { a: 8, tf: { nested: 'world', additional: true } }
|
||||
, doc3 = { a: 2, tf: { nested: 'bloup', age: 42 } }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
// The underlying BST now has 3 nodes which contain the docs where it's expected
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('hello'), [doc1]);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc2]);
|
||||
assert.deepEqual(idx.tree.search('bloup'), [doc3]);
|
||||
|
||||
// The nodes contain pointers to the actual documents
|
||||
idx.tree.search('bloup')[0].a = 42;
|
||||
doc3.a.should.equal(42);
|
||||
});
|
||||
|
||||
it('Can insert an array of documents', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
;
|
||||
|
||||
idx.insert([doc1, doc2, doc3]);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('hello'), [doc1]);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc2]);
|
||||
assert.deepEqual(idx.tree.search('bloup'), [doc3]);
|
||||
});
|
||||
|
||||
it('When inserting an array of elements, if an error is thrown all inserts need to be rolled back', function () {
|
||||
var idx = new Index({ fieldName: 'tf', unique: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc2b = { a: 84, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
;
|
||||
|
||||
try {
|
||||
idx.insert([doc1, doc2, doc2b, doc3]);
|
||||
} catch (e) {
|
||||
e.errorType.should.equal('uniqueViolated');
|
||||
}
|
||||
idx.tree.getNumberOfKeys().should.equal(0);
|
||||
assert.deepEqual(idx.tree.search('hello'), []);
|
||||
assert.deepEqual(idx.tree.search('world'), []);
|
||||
assert.deepEqual(idx.tree.search('bloup'), []);
|
||||
});
|
||||
|
||||
describe('Array fields', function () {
|
||||
|
||||
it('Inserts one entry per array element in the index', function () {
|
||||
var obj = { tf: ['aa', 'bb'], really: 'yeah' }
|
||||
, obj2 = { tf: 'normal', yes: 'indeed' }
|
||||
, idx = new Index({ fieldName: 'tf' })
|
||||
;
|
||||
|
||||
idx.insert(obj);
|
||||
idx.getAll().length.should.equal(2);
|
||||
idx.getAll()[0].should.equal(obj);
|
||||
idx.getAll()[1].should.equal(obj);
|
||||
|
||||
idx.insert(obj2);
|
||||
idx.getAll().length.should.equal(3);
|
||||
});
|
||||
|
||||
it('Inserts one entry per array element in the index, type-checked', function () {
|
||||
var obj = { tf: ['42', 42, new Date(42), 42], really: 'yeah' }
|
||||
, idx = new Index({ fieldName: 'tf' })
|
||||
;
|
||||
|
||||
idx.insert(obj);
|
||||
idx.getAll().length.should.equal(3);
|
||||
idx.getAll()[0].should.equal(obj);
|
||||
idx.getAll()[1].should.equal(obj);
|
||||
idx.getAll()[2].should.equal(obj);
|
||||
});
|
||||
|
||||
it('Inserts one entry per unique array element in the index, the unique constraint only holds across documents', function () {
|
||||
var obj = { tf: ['aa', 'aa'], really: 'yeah' }
|
||||
, obj2 = { tf: ['cc', 'yy', 'cc'], yes: 'indeed' }
|
||||
, idx = new Index({ fieldName: 'tf', unique: true })
|
||||
;
|
||||
|
||||
idx.insert(obj);
|
||||
idx.getAll().length.should.equal(1);
|
||||
idx.getAll()[0].should.equal(obj);
|
||||
|
||||
idx.insert(obj2);
|
||||
idx.getAll().length.should.equal(3);
|
||||
});
|
||||
|
||||
it('The unique constraint holds across documents', function () {
|
||||
var obj = { tf: ['aa', 'aa'], really: 'yeah' }
|
||||
, obj2 = { tf: ['cc', 'aa', 'cc'], yes: 'indeed' }
|
||||
, idx = new Index({ fieldName: 'tf', unique: true })
|
||||
;
|
||||
|
||||
idx.insert(obj);
|
||||
idx.getAll().length.should.equal(1);
|
||||
idx.getAll()[0].should.equal(obj);
|
||||
|
||||
(function () { idx.insert(obj2); }).should.throw();
|
||||
});
|
||||
|
||||
it('When removing a document, remove it from the index at all unique array elements', function () {
|
||||
var obj = { tf: ['aa', 'aa'], really: 'yeah' }
|
||||
, obj2 = { tf: ['cc', 'aa', 'cc'], yes: 'indeed' }
|
||||
, idx = new Index({ fieldName: 'tf' })
|
||||
;
|
||||
|
||||
idx.insert(obj);
|
||||
idx.insert(obj2);
|
||||
idx.getMatching('aa').length.should.equal(2);
|
||||
idx.getMatching('aa').indexOf(obj).should.not.equal(-1);
|
||||
idx.getMatching('aa').indexOf(obj2).should.not.equal(-1);
|
||||
idx.getMatching('cc').length.should.equal(1);
|
||||
|
||||
idx.remove(obj2);
|
||||
idx.getMatching('aa').length.should.equal(1);
|
||||
idx.getMatching('aa').indexOf(obj).should.not.equal(-1);
|
||||
idx.getMatching('aa').indexOf(obj2).should.equal(-1);
|
||||
idx.getMatching('cc').length.should.equal(0);
|
||||
});
|
||||
|
||||
it('If a unique constraint is violated when inserting an array key, roll back all inserts before the key', function () {
|
||||
var obj = { tf: ['aa', 'bb'], really: 'yeah' }
|
||||
, obj2 = { tf: ['cc', 'dd', 'aa', 'ee'], yes: 'indeed' }
|
||||
, idx = new Index({ fieldName: 'tf', unique: true })
|
||||
;
|
||||
|
||||
idx.insert(obj);
|
||||
idx.getAll().length.should.equal(2);
|
||||
idx.getMatching('aa').length.should.equal(1);
|
||||
idx.getMatching('bb').length.should.equal(1);
|
||||
idx.getMatching('cc').length.should.equal(0);
|
||||
idx.getMatching('dd').length.should.equal(0);
|
||||
idx.getMatching('ee').length.should.equal(0);
|
||||
|
||||
(function () { idx.insert(obj2); }).should.throw();
|
||||
idx.getAll().length.should.equal(2);
|
||||
idx.getMatching('aa').length.should.equal(1);
|
||||
idx.getMatching('bb').length.should.equal(1);
|
||||
idx.getMatching('cc').length.should.equal(0);
|
||||
idx.getMatching('dd').length.should.equal(0);
|
||||
idx.getMatching('ee').length.should.equal(0);
|
||||
});
|
||||
|
||||
}); // ==== End of 'Array fields' ==== //
|
||||
|
||||
}); // ==== End of 'Insertion' ==== //
|
||||
|
||||
|
||||
describe('Removal', function () {
|
||||
|
||||
it('Can remove pointers from the index, even when multiple documents have the same key', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, doc4 = { a: 23, tf: 'world' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.insert(doc4);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
|
||||
idx.remove(doc1);
|
||||
idx.tree.getNumberOfKeys().should.equal(2);
|
||||
idx.tree.search('hello').length.should.equal(0);
|
||||
|
||||
idx.remove(doc2);
|
||||
idx.tree.getNumberOfKeys().should.equal(2);
|
||||
idx.tree.search('world').length.should.equal(1);
|
||||
idx.tree.search('world')[0].should.equal(doc4);
|
||||
});
|
||||
|
||||
it('If we have a sparse index, removing a non indexed doc has no effect', function () {
|
||||
var idx = new Index({ fieldName: 'nope', sparse: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 5, tf: 'world' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.tree.getNumberOfKeys().should.equal(0);
|
||||
|
||||
idx.remove(doc1);
|
||||
idx.tree.getNumberOfKeys().should.equal(0);
|
||||
});
|
||||
|
||||
it('Works with dot notation', function () {
|
||||
var idx = new Index({ fieldName: 'tf.nested' })
|
||||
, doc1 = { a: 5, tf: { nested: 'hello' } }
|
||||
, doc2 = { a: 8, tf: { nested: 'world', additional: true } }
|
||||
, doc3 = { a: 2, tf: { nested: 'bloup', age: 42 } }
|
||||
, doc4 = { a: 2, tf: { nested: 'world', fruits: ['apple', 'carrot'] } }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.insert(doc4);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
|
||||
idx.remove(doc1);
|
||||
idx.tree.getNumberOfKeys().should.equal(2);
|
||||
idx.tree.search('hello').length.should.equal(0);
|
||||
|
||||
idx.remove(doc2);
|
||||
idx.tree.getNumberOfKeys().should.equal(2);
|
||||
idx.tree.search('world').length.should.equal(1);
|
||||
idx.tree.search('world')[0].should.equal(doc4);
|
||||
});
|
||||
|
||||
it('Can remove an array of documents', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
;
|
||||
|
||||
idx.insert([doc1, doc2, doc3]);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.remove([doc1, doc3]);
|
||||
idx.tree.getNumberOfKeys().should.equal(1);
|
||||
assert.deepEqual(idx.tree.search('hello'), []);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc2]);
|
||||
assert.deepEqual(idx.tree.search('bloup'), []);
|
||||
});
|
||||
|
||||
}); // ==== End of 'Removal' ==== //
|
||||
|
||||
|
||||
describe('Update', function () {
|
||||
|
||||
it('Can update a document whose key did or didnt change', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, doc4 = { a: 23, tf: 'world' }
|
||||
, doc5 = { a: 1, tf: 'changed' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc2]);
|
||||
|
||||
idx.update(doc2, doc4);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc4]);
|
||||
|
||||
idx.update(doc1, doc5);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('hello'), []);
|
||||
assert.deepEqual(idx.tree.search('changed'), [doc5]);
|
||||
});
|
||||
|
||||
it('If a simple update violates a unique constraint, changes are rolled back and an error thrown', function () {
|
||||
var idx = new Index({ fieldName: 'tf', unique: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, bad = { a: 23, tf: 'world' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('hello'), [doc1]);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc2]);
|
||||
assert.deepEqual(idx.tree.search('bloup'), [doc3]);
|
||||
|
||||
try {
|
||||
idx.update(doc3, bad);
|
||||
} catch (e) {
|
||||
e.errorType.should.equal('uniqueViolated');
|
||||
}
|
||||
|
||||
// No change
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('hello'), [doc1]);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc2]);
|
||||
assert.deepEqual(idx.tree.search('bloup'), [doc3]);
|
||||
});
|
||||
|
||||
it('Can update an array of documents', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, doc1b = { a: 23, tf: 'world' }
|
||||
, doc2b = { a: 1, tf: 'changed' }
|
||||
, doc3b = { a: 44, tf: 'bloup' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
|
||||
idx.update([{ oldDoc: doc1, newDoc: doc1b }, { oldDoc: doc2, newDoc: doc2b }, { oldDoc: doc3, newDoc: doc3b }]);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('world')[0].should.equal(doc1b);
|
||||
idx.getMatching('changed').length.should.equal(1);
|
||||
idx.getMatching('changed')[0].should.equal(doc2b);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
idx.getMatching('bloup')[0].should.equal(doc3b);
|
||||
});
|
||||
|
||||
it('If a unique constraint is violated during an array-update, all changes are rolled back and an error thrown', function () {
|
||||
var idx = new Index({ fieldName: 'tf', unique: true })
|
||||
, doc0 = { a: 432, tf: 'notthistoo' }
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, doc1b = { a: 23, tf: 'changed' }
|
||||
, doc2b = { a: 1, tf: 'changed' } // Will violate the constraint (first try)
|
||||
, doc2c = { a: 1, tf: 'notthistoo' } // Will violate the constraint (second try)
|
||||
, doc3b = { a: 44, tf: 'alsochanged' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
|
||||
try {
|
||||
idx.update([{ oldDoc: doc1, newDoc: doc1b }, { oldDoc: doc2, newDoc: doc2b }, { oldDoc: doc3, newDoc: doc3b }]);
|
||||
} catch (e) {
|
||||
e.errorType.should.equal('uniqueViolated');
|
||||
}
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('hello')[0].should.equal(doc1);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('world')[0].should.equal(doc2);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
idx.getMatching('bloup')[0].should.equal(doc3);
|
||||
|
||||
try {
|
||||
idx.update([{ oldDoc: doc1, newDoc: doc1b }, { oldDoc: doc2, newDoc: doc2b }, { oldDoc: doc3, newDoc: doc3b }]);
|
||||
} catch (e) {
|
||||
e.errorType.should.equal('uniqueViolated');
|
||||
}
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('hello')[0].should.equal(doc1);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('world')[0].should.equal(doc2);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
idx.getMatching('bloup')[0].should.equal(doc3);
|
||||
});
|
||||
|
||||
it('If an update doesnt change a document, the unique constraint is not violated', function () {
|
||||
var idx = new Index({ fieldName: 'tf', unique: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, noChange = { a: 8, tf: 'world' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('world'), [doc2]);
|
||||
|
||||
idx.update(doc2, noChange); // No error thrown
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
assert.deepEqual(idx.tree.search('world'), [noChange]);
|
||||
});
|
||||
|
||||
it('Can revert simple and batch updates', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, doc1b = { a: 23, tf: 'world' }
|
||||
, doc2b = { a: 1, tf: 'changed' }
|
||||
, doc3b = { a: 44, tf: 'bloup' }
|
||||
, batchUpdate = [{ oldDoc: doc1, newDoc: doc1b }, { oldDoc: doc2, newDoc: doc2b }, { oldDoc: doc3, newDoc: doc3b }]
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
|
||||
idx.update(batchUpdate);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('world')[0].should.equal(doc1b);
|
||||
idx.getMatching('changed').length.should.equal(1);
|
||||
idx.getMatching('changed')[0].should.equal(doc2b);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
idx.getMatching('bloup')[0].should.equal(doc3b);
|
||||
|
||||
idx.revertUpdate(batchUpdate);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('hello')[0].should.equal(doc1);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('world')[0].should.equal(doc2);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
idx.getMatching('bloup')[0].should.equal(doc3);
|
||||
|
||||
// Now a simple update
|
||||
idx.update(doc2, doc2b);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('hello')[0].should.equal(doc1);
|
||||
idx.getMatching('changed').length.should.equal(1);
|
||||
idx.getMatching('changed')[0].should.equal(doc2b);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
idx.getMatching('bloup')[0].should.equal(doc3);
|
||||
|
||||
idx.revertUpdate(doc2, doc2b);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('hello')[0].should.equal(doc1);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('world')[0].should.equal(doc2);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
idx.getMatching('bloup')[0].should.equal(doc3);
|
||||
});
|
||||
|
||||
}); // ==== End of 'Update' ==== //
|
||||
|
||||
|
||||
describe('Get matching documents', function () {
|
||||
|
||||
it('Get all documents where fieldName is equal to the given value, or an empty array if no match', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, doc4 = { a: 23, tf: 'world' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.insert(doc4);
|
||||
|
||||
assert.deepEqual(idx.getMatching('bloup'), [doc3]);
|
||||
assert.deepEqual(idx.getMatching('world'), [doc2, doc4]);
|
||||
assert.deepEqual(idx.getMatching('nope'), []);
|
||||
});
|
||||
|
||||
it('Can get all documents for a given key in a unique index', function () {
|
||||
var idx = new Index({ fieldName: 'tf', unique: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
assert.deepEqual(idx.getMatching('bloup'), [doc3]);
|
||||
assert.deepEqual(idx.getMatching('world'), [doc2]);
|
||||
assert.deepEqual(idx.getMatching('nope'), []);
|
||||
});
|
||||
|
||||
it('Can get all documents for which a field is undefined', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 2, nottf: 'bloup' }
|
||||
, doc3 = { a: 8, tf: 'world' }
|
||||
, doc4 = { a: 7, nottf: 'yes' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
assert.deepEqual(idx.getMatching('bloup'), []);
|
||||
assert.deepEqual(idx.getMatching('hello'), [doc1]);
|
||||
assert.deepEqual(idx.getMatching('world'), [doc3]);
|
||||
assert.deepEqual(idx.getMatching('yes'), []);
|
||||
assert.deepEqual(idx.getMatching(undefined), [doc2]);
|
||||
|
||||
idx.insert(doc4);
|
||||
|
||||
assert.deepEqual(idx.getMatching('bloup'), []);
|
||||
assert.deepEqual(idx.getMatching('hello'), [doc1]);
|
||||
assert.deepEqual(idx.getMatching('world'), [doc3]);
|
||||
assert.deepEqual(idx.getMatching('yes'), []);
|
||||
assert.deepEqual(idx.getMatching(undefined), [doc2, doc4]);
|
||||
});
|
||||
|
||||
it('Can get all documents for which a field is null', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 2, tf: null }
|
||||
, doc3 = { a: 8, tf: 'world' }
|
||||
, doc4 = { a: 7, tf: null }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
assert.deepEqual(idx.getMatching('bloup'), []);
|
||||
assert.deepEqual(idx.getMatching('hello'), [doc1]);
|
||||
assert.deepEqual(idx.getMatching('world'), [doc3]);
|
||||
assert.deepEqual(idx.getMatching('yes'), []);
|
||||
assert.deepEqual(idx.getMatching(null), [doc2]);
|
||||
|
||||
idx.insert(doc4);
|
||||
|
||||
assert.deepEqual(idx.getMatching('bloup'), []);
|
||||
assert.deepEqual(idx.getMatching('hello'), [doc1]);
|
||||
assert.deepEqual(idx.getMatching('world'), [doc3]);
|
||||
assert.deepEqual(idx.getMatching('yes'), []);
|
||||
assert.deepEqual(idx.getMatching(null), [doc2, doc4]);
|
||||
});
|
||||
|
||||
it('Can get all documents for a given key in a sparse index, but not unindexed docs (= field undefined)', function () {
|
||||
var idx = new Index({ fieldName: 'tf', sparse: true })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 2, nottf: 'bloup' }
|
||||
, doc3 = { a: 8, tf: 'world' }
|
||||
, doc4 = { a: 7, nottf: 'yes' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.insert(doc4);
|
||||
|
||||
assert.deepEqual(idx.getMatching('bloup'), []);
|
||||
assert.deepEqual(idx.getMatching('hello'), [doc1]);
|
||||
assert.deepEqual(idx.getMatching('world'), [doc3]);
|
||||
assert.deepEqual(idx.getMatching('yes'), []);
|
||||
assert.deepEqual(idx.getMatching(undefined), []);
|
||||
});
|
||||
|
||||
it('Can get all documents whose key is in an array of keys', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 2, tf: 'bloup' }
|
||||
, doc3 = { a: 8, tf: 'world' }
|
||||
, doc4 = { a: 7, tf: 'yes' }
|
||||
, doc5 = { a: 7, tf: 'yes' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.insert(doc4);
|
||||
idx.insert(doc5);
|
||||
|
||||
assert.deepEqual(idx.getMatching([]), []);
|
||||
assert.deepEqual(idx.getMatching(['bloup']), [doc2]);
|
||||
assert.deepEqual(idx.getMatching(['bloup', 'yes']), [doc2, doc4, doc5]);
|
||||
assert.deepEqual(idx.getMatching(['hello', 'no']), [doc1]);
|
||||
assert.deepEqual(idx.getMatching(['nope', 'no']), []);
|
||||
});
|
||||
|
||||
it('Can get all documents whose key is between certain bounds', function () {
|
||||
var idx = new Index({ fieldName: 'a' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 2, tf: 'bloup' }
|
||||
, doc3 = { a: 8, tf: 'world' }
|
||||
, doc4 = { a: 7, tf: 'yes' }
|
||||
, doc5 = { a: 10, tf: 'yes' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
idx.insert(doc4);
|
||||
idx.insert(doc5);
|
||||
|
||||
assert.deepEqual(idx.getBetweenBounds({ $lt: 10, $gte: 5 }), [ doc1, doc4, doc3 ]);
|
||||
assert.deepEqual(idx.getBetweenBounds({ $lte: 8 }), [ doc2, doc1, doc4, doc3 ]);
|
||||
assert.deepEqual(idx.getBetweenBounds({ $gt: 7 }), [ doc3, doc5 ]);
|
||||
});
|
||||
|
||||
}); // ==== End of 'Get matching documents' ==== //
|
||||
|
||||
|
||||
describe('Resetting', function () {
|
||||
|
||||
it('Can reset an index without any new data, the index will be empty afterwards', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
|
||||
idx.reset();
|
||||
idx.tree.getNumberOfKeys().should.equal(0);
|
||||
idx.getMatching('hello').length.should.equal(0);
|
||||
idx.getMatching('world').length.should.equal(0);
|
||||
idx.getMatching('bloup').length.should.equal(0);
|
||||
});
|
||||
|
||||
it('Can reset an index and initialize it with one document', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, newDoc = { a: 555, tf: 'new' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
|
||||
idx.reset(newDoc);
|
||||
idx.tree.getNumberOfKeys().should.equal(1);
|
||||
idx.getMatching('hello').length.should.equal(0);
|
||||
idx.getMatching('world').length.should.equal(0);
|
||||
idx.getMatching('bloup').length.should.equal(0);
|
||||
idx.getMatching('new')[0].a.should.equal(555);
|
||||
});
|
||||
|
||||
it('Can reset an index and initialize it with an array of documents', function () {
|
||||
var idx = new Index({ fieldName: 'tf' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
, newDocs = [{ a: 555, tf: 'new' }, { a: 666, tf: 'again' }]
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
idx.tree.getNumberOfKeys().should.equal(3);
|
||||
idx.getMatching('hello').length.should.equal(1);
|
||||
idx.getMatching('world').length.should.equal(1);
|
||||
idx.getMatching('bloup').length.should.equal(1);
|
||||
|
||||
idx.reset(newDocs);
|
||||
idx.tree.getNumberOfKeys().should.equal(2);
|
||||
idx.getMatching('hello').length.should.equal(0);
|
||||
idx.getMatching('world').length.should.equal(0);
|
||||
idx.getMatching('bloup').length.should.equal(0);
|
||||
idx.getMatching('new')[0].a.should.equal(555);
|
||||
idx.getMatching('again')[0].a.should.equal(666);
|
||||
});
|
||||
|
||||
}); // ==== End of 'Resetting' ==== //
|
||||
|
||||
it('Get all elements in the index', function () {
|
||||
var idx = new Index({ fieldName: 'a' })
|
||||
, doc1 = { a: 5, tf: 'hello' }
|
||||
, doc2 = { a: 8, tf: 'world' }
|
||||
, doc3 = { a: 2, tf: 'bloup' }
|
||||
;
|
||||
|
||||
idx.insert(doc1);
|
||||
idx.insert(doc2);
|
||||
idx.insert(doc3);
|
||||
|
||||
assert.deepEqual(idx.getAll(), [{ a: 2, tf: 'bloup' }, { a: 5, tf: 'hello' }, { a: 8, tf: 'world' }]);
|
||||
});
|
||||
|
||||
|
||||
});
|
@ -0,0 +1,2 @@
|
||||
--reporter spec
|
||||
--timeout 30000
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,645 @@
|
||||
var should = require('chai').should()
|
||||
, assert = require('chai').assert
|
||||
, testDb = 'workspace/test.db'
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, _ = require('underscore')
|
||||
, async = require('async')
|
||||
, model = require('../lib/model')
|
||||
, customUtils = require('../lib/customUtils')
|
||||
, Datastore = require('../lib/datastore')
|
||||
, Persistence = require('../lib/persistence')
|
||||
, child_process = require('child_process')
|
||||
;
|
||||
|
||||
|
||||
describe('Persistence', function () {
|
||||
var d;
|
||||
|
||||
beforeEach(function (done) {
|
||||
d = new Datastore({ filename: testDb });
|
||||
d.filename.should.equal(testDb);
|
||||
d.inMemoryOnly.should.equal(false);
|
||||
|
||||
async.waterfall([
|
||||
function (cb) {
|
||||
Persistence.ensureDirectoryExists(path.dirname(testDb), function () {
|
||||
fs.exists(testDb, function (exists) {
|
||||
if (exists) {
|
||||
fs.unlink(testDb, cb);
|
||||
} else { return cb(); }
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
d.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
d.getAllData().length.should.equal(0);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Every line represents a document', function () {
|
||||
var now = new Date()
|
||||
, rawData = model.serialize({ _id: "1", a: 2, ages: [1, 5, 12] }) + '\n' +
|
||||
model.serialize({ _id: "2", hello: 'world' }) + '\n' +
|
||||
model.serialize({ _id: "3", nested: { today: now } })
|
||||
, treatedData = Persistence.treatRawData(rawData).data
|
||||
;
|
||||
|
||||
treatedData.sort(function (a, b) { return a._id - b._id; });
|
||||
treatedData.length.should.equal(3);
|
||||
_.isEqual(treatedData[0], { _id: "1", a: 2, ages: [1, 5, 12] }).should.equal(true);
|
||||
_.isEqual(treatedData[1], { _id: "2", hello: 'world' }).should.equal(true);
|
||||
_.isEqual(treatedData[2], { _id: "3", nested: { today: now } }).should.equal(true);
|
||||
});
|
||||
|
||||
it('Badly formatted lines have no impact on the treated data', function () {
|
||||
var now = new Date()
|
||||
, rawData = model.serialize({ _id: "1", a: 2, ages: [1, 5, 12] }) + '\n' +
|
||||
'garbage\n' +
|
||||
model.serialize({ _id: "3", nested: { today: now } })
|
||||
, treatedData = Persistence.treatRawData(rawData).data
|
||||
;
|
||||
|
||||
treatedData.sort(function (a, b) { return a._id - b._id; });
|
||||
treatedData.length.should.equal(2);
|
||||
_.isEqual(treatedData[0], { _id: "1", a: 2, ages: [1, 5, 12] }).should.equal(true);
|
||||
_.isEqual(treatedData[1], { _id: "3", nested: { today: now } }).should.equal(true);
|
||||
});
|
||||
|
||||
it('Well formatted lines that have no _id are not included in the data', function () {
|
||||
var now = new Date()
|
||||
, rawData = model.serialize({ _id: "1", a: 2, ages: [1, 5, 12] }) + '\n' +
|
||||
model.serialize({ _id: "2", hello: 'world' }) + '\n' +
|
||||
model.serialize({ nested: { today: now } })
|
||||
, treatedData = Persistence.treatRawData(rawData).data
|
||||
;
|
||||
|
||||
treatedData.sort(function (a, b) { return a._id - b._id; });
|
||||
treatedData.length.should.equal(2);
|
||||
_.isEqual(treatedData[0], { _id: "1", a: 2, ages: [1, 5, 12] }).should.equal(true);
|
||||
_.isEqual(treatedData[1], { _id: "2", hello: 'world' }).should.equal(true);
|
||||
});
|
||||
|
||||
it('If two lines concern the same doc (= same _id), the last one is the good version', function () {
|
||||
var now = new Date()
|
||||
, rawData = model.serialize({ _id: "1", a: 2, ages: [1, 5, 12] }) + '\n' +
|
||||
model.serialize({ _id: "2", hello: 'world' }) + '\n' +
|
||||
model.serialize({ _id: "1", nested: { today: now } })
|
||||
, treatedData = Persistence.treatRawData(rawData).data
|
||||
;
|
||||
|
||||
treatedData.sort(function (a, b) { return a._id - b._id; });
|
||||
treatedData.length.should.equal(2);
|
||||
_.isEqual(treatedData[0], { _id: "1", nested: { today: now } }).should.equal(true);
|
||||
_.isEqual(treatedData[1], { _id: "2", hello: 'world' }).should.equal(true);
|
||||
});
|
||||
|
||||
it('If a doc contains $$deleted: true, that means we need to remove it from the data', function () {
|
||||
var now = new Date()
|
||||
, rawData = model.serialize({ _id: "1", a: 2, ages: [1, 5, 12] }) + '\n' +
|
||||
model.serialize({ _id: "2", hello: 'world' }) + '\n' +
|
||||
model.serialize({ _id: "1", $$deleted: true }) + '\n' +
|
||||
model.serialize({ _id: "3", today: now })
|
||||
, treatedData = Persistence.treatRawData(rawData).data
|
||||
;
|
||||
|
||||
treatedData.sort(function (a, b) { return a._id - b._id; });
|
||||
treatedData.length.should.equal(2);
|
||||
_.isEqual(treatedData[0], { _id: "2", hello: 'world' }).should.equal(true);
|
||||
_.isEqual(treatedData[1], { _id: "3", today: now }).should.equal(true);
|
||||
});
|
||||
|
||||
it('If a doc contains $$deleted: true, no error is thrown if the doc wasnt in the list before', function () {
|
||||
var now = new Date()
|
||||
, rawData = model.serialize({ _id: "1", a: 2, ages: [1, 5, 12] }) + '\n' +
|
||||
model.serialize({ _id: "2", $$deleted: true }) + '\n' +
|
||||
model.serialize({ _id: "3", today: now })
|
||||
, treatedData = Persistence.treatRawData(rawData).data
|
||||
;
|
||||
|
||||
treatedData.sort(function (a, b) { return a._id - b._id; });
|
||||
treatedData.length.should.equal(2);
|
||||
_.isEqual(treatedData[0], { _id: "1", a: 2, ages: [1, 5, 12] }).should.equal(true);
|
||||
_.isEqual(treatedData[1], { _id: "3", today: now }).should.equal(true);
|
||||
});
|
||||
|
||||
it('If a doc contains $$indexCreated, no error is thrown during treatRawData and we can get the index options', function () {
|
||||
var now = new Date()
|
||||
, rawData = model.serialize({ _id: "1", a: 2, ages: [1, 5, 12] }) + '\n' +
|
||||
model.serialize({ $$indexCreated: { fieldName: "test", unique: true } }) + '\n' +
|
||||
model.serialize({ _id: "3", today: now })
|
||||
, treatedData = Persistence.treatRawData(rawData).data
|
||||
, indexes = Persistence.treatRawData(rawData).indexes
|
||||
;
|
||||
|
||||
Object.keys(indexes).length.should.equal(1);
|
||||
assert.deepEqual(indexes.test, { fieldName: "test", unique: true });
|
||||
|
||||
treatedData.sort(function (a, b) { return a._id - b._id; });
|
||||
treatedData.length.should.equal(2);
|
||||
_.isEqual(treatedData[0], { _id: "1", a: 2, ages: [1, 5, 12] }).should.equal(true);
|
||||
_.isEqual(treatedData[1], { _id: "3", today: now }).should.equal(true);
|
||||
});
|
||||
|
||||
it('Compact database on load', function (done) {
|
||||
d.insert({ a: 2 }, function () {
|
||||
d.insert({ a: 4 }, function () {
|
||||
d.remove({ a: 2 }, {}, function () {
|
||||
// Here, the underlying file is 3 lines long for only one document
|
||||
var data = fs.readFileSync(d.filename, 'utf8').split('\n')
|
||||
, filledCount = 0;
|
||||
|
||||
data.forEach(function (item) { if (item.length > 0) { filledCount += 1; } });
|
||||
filledCount.should.equal(3);
|
||||
|
||||
d.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
// Now, the file has been compacted and is only 1 line long
|
||||
var data = fs.readFileSync(d.filename, 'utf8').split('\n')
|
||||
, filledCount = 0;
|
||||
|
||||
data.forEach(function (item) { if (item.length > 0) { filledCount += 1; } });
|
||||
filledCount.should.equal(1);
|
||||
|
||||
done();
|
||||
});
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Calling loadDatabase after the data was modified doesnt change its contents', function (done) {
|
||||
d.loadDatabase(function () {
|
||||
d.insert({ a: 1 }, function (err) {
|
||||
assert.isNull(err);
|
||||
d.insert({ a: 2 }, function (err) {
|
||||
var data = d.getAllData()
|
||||
, doc1 = _.find(data, function (doc) { return doc.a === 1; })
|
||||
, doc2 = _.find(data, function (doc) { return doc.a === 2; })
|
||||
;
|
||||
assert.isNull(err);
|
||||
data.length.should.equal(2);
|
||||
doc1.a.should.equal(1);
|
||||
doc2.a.should.equal(2);
|
||||
|
||||
d.loadDatabase(function (err) {
|
||||
var data = d.getAllData()
|
||||
, doc1 = _.find(data, function (doc) { return doc.a === 1; })
|
||||
, doc2 = _.find(data, function (doc) { return doc.a === 2; })
|
||||
;
|
||||
assert.isNull(err);
|
||||
data.length.should.equal(2);
|
||||
doc1.a.should.equal(1);
|
||||
doc2.a.should.equal(2);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Calling loadDatabase after the datafile was removed will reset the database', function (done) {
|
||||
d.loadDatabase(function () {
|
||||
d.insert({ a: 1 }, function (err) {
|
||||
assert.isNull(err);
|
||||
d.insert({ a: 2 }, function (err) {
|
||||
var data = d.getAllData()
|
||||
, doc1 = _.find(data, function (doc) { return doc.a === 1; })
|
||||
, doc2 = _.find(data, function (doc) { return doc.a === 2; })
|
||||
;
|
||||
assert.isNull(err);
|
||||
data.length.should.equal(2);
|
||||
doc1.a.should.equal(1);
|
||||
doc2.a.should.equal(2);
|
||||
|
||||
fs.unlink(testDb, function (err) {
|
||||
assert.isNull(err);
|
||||
d.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
d.getAllData().length.should.equal(0);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Calling loadDatabase after the datafile was modified loads the new data', function (done) {
|
||||
d.loadDatabase(function () {
|
||||
d.insert({ a: 1 }, function (err) {
|
||||
assert.isNull(err);
|
||||
d.insert({ a: 2 }, function (err) {
|
||||
var data = d.getAllData()
|
||||
, doc1 = _.find(data, function (doc) { return doc.a === 1; })
|
||||
, doc2 = _.find(data, function (doc) { return doc.a === 2; })
|
||||
;
|
||||
assert.isNull(err);
|
||||
data.length.should.equal(2);
|
||||
doc1.a.should.equal(1);
|
||||
doc2.a.should.equal(2);
|
||||
|
||||
fs.writeFile(testDb, '{"a":3,"_id":"aaa"}', 'utf8', function (err) {
|
||||
assert.isNull(err);
|
||||
d.loadDatabase(function (err) {
|
||||
var data = d.getAllData()
|
||||
, doc1 = _.find(data, function (doc) { return doc.a === 1; })
|
||||
, doc2 = _.find(data, function (doc) { return doc.a === 2; })
|
||||
, doc3 = _.find(data, function (doc) { return doc.a === 3; })
|
||||
;
|
||||
assert.isNull(err);
|
||||
data.length.should.equal(1);
|
||||
doc3.a.should.equal(3);
|
||||
assert.isUndefined(doc1);
|
||||
assert.isUndefined(doc2);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('Prevent dataloss when persisting data', function () {
|
||||
|
||||
it('Creating a datastore with in memory as true and a bad filename wont cause an error', function () {
|
||||
new Datastore({ filename: 'workspace/bad.db~', inMemoryOnly: true });
|
||||
})
|
||||
|
||||
it('Creating a persistent datastore with a bad filename will cause an error', function () {
|
||||
(function () { new Datastore({ filename: 'workspace/bad.db~' }); }).should.throw();
|
||||
})
|
||||
|
||||
it('If no file exists, ensureDatafileIntegrity creates an empty datafile', function (done) {
|
||||
var p = new Persistence({ db: { inMemoryOnly: false, filename: 'workspace/it.db' } });
|
||||
|
||||
if (fs.existsSync('workspace/it.db')) { fs.unlinkSync('workspace/it.db'); }
|
||||
if (fs.existsSync('workspace/it.db~~')) { fs.unlinkSync('workspace/it.db~~'); }
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(false);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(false);
|
||||
|
||||
p.ensureDatafileIntegrity(function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(true);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(false);
|
||||
|
||||
fs.readFileSync('workspace/it.db', 'utf8').should.equal('');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('If only datafile exists, ensureDatafileIntegrity will use it', function (done) {
|
||||
var p = new Persistence({ db: { inMemoryOnly: false, filename: 'workspace/it.db' } });
|
||||
|
||||
if (fs.existsSync('workspace/it.db')) { fs.unlinkSync('workspace/it.db'); }
|
||||
if (fs.existsSync('workspace/it.db~~')) { fs.unlinkSync('workspace/it.db~~'); }
|
||||
|
||||
fs.writeFileSync('workspace/it.db', 'something', 'utf8');
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(true);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(false);
|
||||
|
||||
p.ensureDatafileIntegrity(function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(true);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(false);
|
||||
|
||||
fs.readFileSync('workspace/it.db', 'utf8').should.equal('something');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('If old datafile exists and datafile doesnt, ensureDatafileIntegrity will use it', function (done) {
|
||||
var p = new Persistence({ db: { inMemoryOnly: false, filename: 'workspace/it.db' } });
|
||||
|
||||
if (fs.existsSync('workspace/it.db')) { fs.unlinkSync('workspace/it.db'); }
|
||||
if (fs.existsSync('workspace/it.db~~')) { fs.unlinkSync('workspace/it.db~~'); }
|
||||
|
||||
fs.writeFileSync('workspace/it.db~~', 'something', 'utf8');
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(false);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(true);
|
||||
|
||||
p.ensureDatafileIntegrity(function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(true);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(false);
|
||||
|
||||
fs.readFileSync('workspace/it.db', 'utf8').should.equal('something');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('If both old and current datafiles exist, ensureDatafileIntegrity will use the datafile, it means step 4 of persistence failed', function (done) {
|
||||
var theDb = new Datastore({ filename: 'workspace/it.db' });
|
||||
|
||||
if (fs.existsSync('workspace/it.db')) { fs.unlinkSync('workspace/it.db'); }
|
||||
if (fs.existsSync('workspace/it.db~~')) { fs.unlinkSync('workspace/it.db~~'); }
|
||||
|
||||
fs.writeFileSync('workspace/it.db', '{"_id":"0","hello":"world"}', 'utf8');
|
||||
fs.writeFileSync('workspace/it.db~~', '{"_id":"0","hello":"other"}', 'utf8');
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(true);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(true);
|
||||
|
||||
theDb.persistence.ensureDatafileIntegrity(function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
fs.existsSync('workspace/it.db').should.equal(true);
|
||||
fs.existsSync('workspace/it.db~~').should.equal(true);
|
||||
|
||||
fs.readFileSync('workspace/it.db', 'utf8').should.equal('{"_id":"0","hello":"world"}');
|
||||
|
||||
theDb.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
theDb.find({}, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(1);
|
||||
docs[0].hello.should.equal("world");
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('persistCachedDatabase should update the contents of the datafile and leave a clean state', function (done) {
|
||||
d.insert({ hello: 'world' }, function () {
|
||||
d.find({}, function (err, docs) {
|
||||
docs.length.should.equal(1);
|
||||
|
||||
if (fs.existsSync(testDb)) { fs.unlinkSync(testDb); }
|
||||
if (fs.existsSync(testDb + '~')) { fs.unlinkSync(testDb + '~'); }
|
||||
if (fs.existsSync(testDb + '~~')) { fs.unlinkSync(testDb + '~~'); }
|
||||
fs.existsSync(testDb).should.equal(false);
|
||||
|
||||
fs.writeFileSync(testDb + '~', 'something', 'utf8');
|
||||
fs.writeFileSync(testDb + '~~', 'something else', 'utf8');
|
||||
fs.existsSync(testDb + '~').should.equal(true);
|
||||
fs.existsSync(testDb + '~~').should.equal(true);
|
||||
|
||||
d.persistence.persistCachedDatabase(function (err) {
|
||||
var contents = fs.readFileSync(testDb, 'utf8');
|
||||
assert.isNull(err);
|
||||
fs.existsSync(testDb).should.equal(true);
|
||||
fs.existsSync(testDb + '~').should.equal(false);
|
||||
fs.existsSync(testDb + '~~').should.equal(false);
|
||||
if (!contents.match(/^{"hello":"world","_id":"[0-9a-zA-Z]{16}"}\n$/)) {
|
||||
throw "Datafile contents not as expected";
|
||||
}
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('After a persistCachedDatabase, there should be no temp or old filename', function (done) {
|
||||
d.insert({ hello: 'world' }, function () {
|
||||
d.find({}, function (err, docs) {
|
||||
docs.length.should.equal(1);
|
||||
|
||||
if (fs.existsSync(testDb)) { fs.unlinkSync(testDb); }
|
||||
if (fs.existsSync(testDb + '~')) { fs.unlinkSync(testDb + '~'); }
|
||||
if (fs.existsSync(testDb + '~~')) { fs.unlinkSync(testDb + '~~'); }
|
||||
fs.existsSync(testDb).should.equal(false);
|
||||
|
||||
fs.writeFileSync(testDb + '~', 'bloup', 'utf8');
|
||||
fs.writeFileSync(testDb + '~~', 'blap', 'utf8');
|
||||
fs.existsSync(testDb + '~').should.equal(true);
|
||||
fs.existsSync(testDb + '~~').should.equal(true);
|
||||
|
||||
d.persistence.persistCachedDatabase(function (err) {
|
||||
var contents = fs.readFileSync(testDb, 'utf8');
|
||||
assert.isNull(err);
|
||||
fs.existsSync(testDb).should.equal(true);
|
||||
fs.existsSync(testDb + '~').should.equal(false);
|
||||
fs.existsSync(testDb + '~~').should.equal(false);
|
||||
if (!contents.match(/^{"hello":"world","_id":"[0-9a-zA-Z]{16}"}\n$/)) {
|
||||
throw "Datafile contents not as expected";
|
||||
}
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('persistCachedDatabase should update the contents of the datafile and leave a clean state even if there is a temp or old datafile', function (done) {
|
||||
d.insert({ hello: 'world' }, function () {
|
||||
d.find({}, function (err, docs) {
|
||||
docs.length.should.equal(1);
|
||||
|
||||
if (fs.existsSync(testDb)) { fs.unlinkSync(testDb); }
|
||||
fs.writeFileSync(testDb + '~', 'blabla', 'utf8');
|
||||
fs.writeFileSync(testDb + '~~', 'bloblo', 'utf8');
|
||||
fs.existsSync(testDb).should.equal(false);
|
||||
fs.existsSync(testDb + '~').should.equal(true);
|
||||
fs.existsSync(testDb + '~~').should.equal(true);
|
||||
|
||||
d.persistence.persistCachedDatabase(function (err) {
|
||||
var contents = fs.readFileSync(testDb, 'utf8');
|
||||
assert.isNull(err);
|
||||
fs.existsSync(testDb).should.equal(true);
|
||||
fs.existsSync(testDb + '~').should.equal(false);
|
||||
fs.existsSync(testDb + '~~').should.equal(false);
|
||||
if (!contents.match(/^{"hello":"world","_id":"[0-9a-zA-Z]{16}"}\n$/)) {
|
||||
throw "Datafile contents not as expected";
|
||||
}
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('persistCachedDatabase should update the contents of the datafile and leave a clean state even if there is a temp or old datafile', function (done) {
|
||||
var dbFile = 'workspace/test2.db', theDb;
|
||||
|
||||
if (fs.existsSync(dbFile)) { fs.unlinkSync(dbFile); }
|
||||
if (fs.existsSync(dbFile + '~')) { fs.unlinkSync(dbFile + '~'); }
|
||||
if (fs.existsSync(dbFile + '~~')) { fs.unlinkSync(dbFile + '~~'); }
|
||||
|
||||
theDb = new Datastore({ filename: dbFile });
|
||||
|
||||
theDb.loadDatabase(function (err) {
|
||||
var contents = fs.readFileSync(dbFile, 'utf8');
|
||||
assert.isNull(err);
|
||||
fs.existsSync(dbFile).should.equal(true);
|
||||
fs.existsSync(dbFile + '~').should.equal(false);
|
||||
fs.existsSync(dbFile + '~~').should.equal(false);
|
||||
if (contents != "") {
|
||||
throw "Datafile contents not as expected";
|
||||
}
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Persistence works as expected when everything goes fine', function (done) {
|
||||
var dbFile = 'workspace/test2.db', theDb, theDb2, doc1, doc2;
|
||||
|
||||
async.waterfall([
|
||||
async.apply(Persistence.ensureFileDoesntExist, dbFile)
|
||||
, async.apply(Persistence.ensureFileDoesntExist, dbFile + '~')
|
||||
, async.apply(Persistence.ensureFileDoesntExist, dbFile + '~~')
|
||||
, function (cb) {
|
||||
theDb = new Datastore({ filename: dbFile });
|
||||
theDb.loadDatabase(cb);
|
||||
}
|
||||
, function (cb) {
|
||||
theDb.find({}, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(0);
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
theDb.insert({ a: 'hello' }, function (err, _doc1) {
|
||||
assert.isNull(err);
|
||||
doc1 = _doc1;
|
||||
theDb.insert({ a: 'world' }, function (err, _doc2) {
|
||||
assert.isNull(err);
|
||||
doc2 = _doc2;
|
||||
return cb();
|
||||
});
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
theDb.find({}, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(2);
|
||||
_.find(docs, function (item) { return item._id === doc1._id }).a.should.equal('hello');
|
||||
_.find(docs, function (item) { return item._id === doc2._id }).a.should.equal('world');
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
theDb.loadDatabase(cb);
|
||||
}
|
||||
, function (cb) { // No change
|
||||
theDb.find({}, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(2);
|
||||
_.find(docs, function (item) { return item._id === doc1._id }).a.should.equal('hello');
|
||||
_.find(docs, function (item) { return item._id === doc2._id }).a.should.equal('world');
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
fs.existsSync(dbFile).should.equal(true);
|
||||
fs.existsSync(dbFile + '~').should.equal(false);
|
||||
fs.existsSync(dbFile + '~~').should.equal(false);
|
||||
return cb();
|
||||
}
|
||||
, function (cb) {
|
||||
theDb2 = new Datastore({ filename: dbFile });
|
||||
theDb2.loadDatabase(cb);
|
||||
}
|
||||
, function (cb) { // No change in second db
|
||||
theDb2.find({}, function (err, docs) {
|
||||
assert.isNull(err);
|
||||
docs.length.should.equal(2);
|
||||
_.find(docs, function (item) { return item._id === doc1._id }).a.should.equal('hello');
|
||||
_.find(docs, function (item) { return item._id === doc2._id }).a.should.equal('world');
|
||||
return cb();
|
||||
});
|
||||
}
|
||||
, function (cb) {
|
||||
fs.existsSync(dbFile).should.equal(true);
|
||||
fs.existsSync(dbFile + '~').should.equal(false);
|
||||
fs.existsSync(dbFile + '~~').should.equal(false);
|
||||
return cb();
|
||||
}
|
||||
], done);
|
||||
});
|
||||
|
||||
|
||||
// This test is a bit complicated since it depends on the time I/O actions take to execute
|
||||
// That depends on the machine and the load on the machine when the tests are run
|
||||
// It is timed for my machine with nothing else running but may not work as expected on others (it will not fail but may not be a proof)
|
||||
// Every new version of NeDB passes it on my machine before rtelease
|
||||
it('If system crashes during a loadDatabase, the former version is not lost', function (done) {
|
||||
var cp, N = 150000, toWrite = "", i;
|
||||
|
||||
// Ensuring the state is clean
|
||||
if (fs.existsSync('workspace/lac.db')) { fs.unlinkSync('workspace/lac.db'); }
|
||||
if (fs.existsSync('workspace/lac.db~')) { fs.unlinkSync('workspace/lac.db~'); }
|
||||
|
||||
// Creating a db file with 150k records (a bit long to load)
|
||||
for (i = 0; i < N; i += 1) {
|
||||
toWrite += model.serialize({ _id: customUtils.uid(16), hello: 'world' }) + '\n';
|
||||
}
|
||||
fs.writeFileSync('workspace/lac.db', toWrite, 'utf8');
|
||||
|
||||
// Loading it in a separate process that we will crash before finishing the loadDatabase
|
||||
cp = child_process.fork('test_lac/loadAndCrash.test')
|
||||
|
||||
// Kill the child process when we're at step 3 of persistCachedDatabase (during write to datafile)
|
||||
setTimeout(function() {
|
||||
cp.kill('SIGINT');
|
||||
|
||||
// If the timing is correct, only the temp datafile contains data
|
||||
// The datafile was in the middle of being written and is empty
|
||||
|
||||
// Let the process crash be finished then load database without a crash, and test we didn't lose data
|
||||
setTimeout(function () {
|
||||
var db = new Datastore({ filename: 'workspace/lac.db' });
|
||||
db.loadDatabase(function (err) {
|
||||
assert.isNull(err);
|
||||
|
||||
db.count({}, function (err, n) {
|
||||
// Data has not been lost
|
||||
assert.isNull(err);
|
||||
n.should.equal(150000);
|
||||
|
||||
// State is clean, the temp datafile has been erased and the datafile contains all the data
|
||||
fs.existsSync('workspace/lac.db').should.equal(true);
|
||||
fs.existsSync('workspace/lac.db~').should.equal(false);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
}, 100);
|
||||
}, 2000);
|
||||
});
|
||||
|
||||
}); // ==== End of 'Prevent dataloss when persisting data' ====
|
||||
|
||||
|
||||
describe('ensureFileDoesntExist', function () {
|
||||
|
||||
it('Doesnt do anything if file already doesnt exist', function (done) {
|
||||
Persistence.ensureFileDoesntExist('workspace/nonexisting', function (err) {
|
||||
assert.isNull(err);
|
||||
fs.existsSync('workspace/nonexisting').should.equal(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Deletes file if it exists', function (done) {
|
||||
fs.writeFileSync('workspace/existing', 'hello world', 'utf8');
|
||||
fs.existsSync('workspace/existing').should.equal(true);
|
||||
|
||||
Persistence.ensureFileDoesntExist('workspace/existing', function (err) {
|
||||
assert.isNull(err);
|
||||
fs.existsSync('workspace/existing').should.equal(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
}); // ==== End of 'ensureFileDoesntExist' ====
|
||||
|
||||
|
||||
});
|
@ -0,0 +1,5 @@
|
||||
var Nedb = require('../lib/datastore.js')
|
||||
, db = new Nedb({ filename: 'workspace/lac.db' })
|
||||
;
|
||||
|
||||
db.loadDatabase();
|
Some files were not shown because too many files have changed in this diff Show More
Reference in new issue