diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 000000000..332c020b5
Binary files /dev/null and b/.DS_Store differ
diff --git a/app.js b/app.js
index edb9f73ea..264f62821 100644
--- a/app.js
+++ b/app.js
@@ -6,9 +6,13 @@
var express = require('express');
var http = require('http');
var path = require('path');
-var handlebars = require('express3-handlebars')
+var handlebars = require('express3-handlebars');
var index = require('./routes/index');
+var record = require('./routes/record');
+var forum = require('./routes/forum');
+var help = require('./routes/help');
+var profile = require('./routes/profile');
// Example route
// var user = require('./routes/user');
@@ -37,7 +41,10 @@ if ('development' == app.get('env')) {
app.get('/', index.view);
// Example route
// app.get('/users', user.list);
-
+app.get("/record",record.viewR);
+app.get("/help",help.viewHelp);
+app.get("/forum",forum.viewForum);
+app.get("/profile",profile.viewProfile);
http.createServer(app).listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});
diff --git a/data.json b/data.json
new file mode 100644
index 000000000..510461fd8
--- /dev/null
+++ b/data.json
@@ -0,0 +1,11 @@
+
+{
+ "data":[
+ {
+ "name": "Elias",
+ "age": "50",
+ "city": "San Diego",
+ "state": "California"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/node_modules/.bin/handlebars b/node_modules/.bin/handlebars
new file mode 120000
index 000000000..fb7d090fc
--- /dev/null
+++ b/node_modules/.bin/handlebars
@@ -0,0 +1 @@
+../handlebars/bin/handlebars
\ No newline at end of file
diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver
new file mode 120000
index 000000000..317eb293d
--- /dev/null
+++ b/node_modules/.bin/semver
@@ -0,0 +1 @@
+../semver/bin/semver
\ No newline at end of file
diff --git a/node_modules/.bin/uglifyjs b/node_modules/.bin/uglifyjs
new file mode 120000
index 000000000..fef3468b6
--- /dev/null
+++ b/node_modules/.bin/uglifyjs
@@ -0,0 +1 @@
+../uglify-js/bin/uglifyjs
\ No newline at end of file
diff --git a/node_modules/amdefine/LICENSE b/node_modules/amdefine/LICENSE
new file mode 100644
index 000000000..9b25ee006
--- /dev/null
+++ b/node_modules/amdefine/LICENSE
@@ -0,0 +1,58 @@
+amdefine is released under two licenses: new BSD, and MIT. You may pick the
+license that best suits your development needs. The text of both licenses are
+provided below.
+
+
+The "New" BSD License:
+----------------------
+
+Copyright (c) 2011-2016, The Dojo Foundation
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the Dojo Foundation nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
+MIT License
+-----------
+
+Copyright (c) 2011-2016, The Dojo Foundation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/amdefine/README.md b/node_modules/amdefine/README.md
new file mode 100644
index 000000000..037a6e817
--- /dev/null
+++ b/node_modules/amdefine/README.md
@@ -0,0 +1,171 @@
+# amdefine
+
+A module that can be used to implement AMD's define() in Node. This allows you
+to code to the AMD API and have the module work in node programs without
+requiring those other programs to use AMD.
+
+## Usage
+
+**1)** Update your package.json to indicate amdefine as a dependency:
+
+```javascript
+ "dependencies": {
+ "amdefine": ">=0.1.0"
+ }
+```
+
+Then run `npm install` to get amdefine into your project.
+
+**2)** At the top of each module that uses define(), place this code:
+
+```javascript
+if (typeof define !== 'function') { var define = require('amdefine')(module) }
+```
+
+**Only use these snippets** when loading amdefine. If you preserve the basic structure,
+with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
+
+You can add spaces, line breaks and even require amdefine with a local path, but
+keep the rest of the structure to get the stripping behavior.
+
+As you may know, because `if` statements in JavaScript don't have their own scope, the var
+declaration in the above snippet is made whether the `if` expression is truthy or not. If
+RequireJS is loaded then the declaration is superfluous because `define` is already already
+declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
+declarations of the same variable in the same scope gracefully.
+
+If you want to deliver amdefine.js with your code rather than specifying it as a dependency
+with npm, then just download the latest release and refer to it using a relative path:
+
+[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
+
+### amdefine/intercept
+
+Consider this very experimental.
+
+Instead of pasting the piece of text for the amdefine setup of a `define`
+variable in each module you create or consume, you can use `amdefine/intercept`
+instead. It will automatically insert the above snippet in each .js file loaded
+by Node.
+
+**Warning**: you should only use this if you are creating an application that
+is consuming AMD style defined()'d modules that are distributed via npm and want
+to run that code in Node.
+
+For library code where you are not sure if it will be used by others in Node or
+in the browser, then explicitly depending on amdefine and placing the code
+snippet above is suggested path, instead of using `amdefine/intercept`. The
+intercept module affects all .js files loaded in the Node app, and it is
+inconsiderate to modify global state like that unless you are also controlling
+the top level app.
+
+#### Why distribute AMD-style modules via npm?
+
+npm has a lot of weaknesses for front-end use (installed layout is not great,
+should have better support for the `baseUrl + moduleID + '.js' style of loading,
+single file JS installs), but some people want a JS package manager and are
+willing to live with those constraints. If that is you, but still want to author
+in AMD style modules to get dynamic require([]), better direct source usage and
+powerful loader plugin support in the browser, then this tool can help.
+
+#### amdefine/intercept usage
+
+Just require it in your top level app module (for example index.js, server.js):
+
+```javascript
+require('amdefine/intercept');
+```
+
+The module does not return a value, so no need to assign the result to a local
+variable.
+
+Then just require() code as you normally would with Node's require(). Any .js
+loaded after the intercept require will have the amdefine check injected in
+the .js source as it is loaded. It does not modify the source on disk, just
+prepends some content to the text of the module as it is loaded by Node.
+
+#### How amdefine/intercept works
+
+It overrides the `Module._extensions['.js']` in Node to automatically prepend
+the amdefine snippet above. So, it will affect any .js file loaded by your
+app.
+
+## define() usage
+
+It is best if you use the anonymous forms of define() in your module:
+
+```javascript
+define(function (require) {
+ var dependency = require('dependency');
+});
+```
+
+or
+
+```javascript
+define(['dependency'], function (dependency) {
+
+});
+```
+
+## RequireJS optimizer integration.
+
+Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
+will have support for stripping the `if (typeof define !== 'function')` check
+mentioned above, so you can include this snippet for code that runs in the
+browser, but avoid taking the cost of the if() statement once the code is
+optimized for deployment.
+
+## Node 0.4 Support
+
+If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
+
+```javascript
+//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
+if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
+```
+
+## Limitations
+
+### Synchronous vs Asynchronous
+
+amdefine creates a define() function that is callable by your code. It will
+execute and trace dependencies and call the factory function *synchronously*,
+to keep the behavior in line with Node's synchronous dependency tracing.
+
+The exception: calling AMD's callback-style require() from inside a factory
+function. The require callback is called on process.nextTick():
+
+```javascript
+define(function (require) {
+ require(['a'], function(a) {
+ //'a' is loaded synchronously, but
+ //this callback is called on process.nextTick().
+ });
+});
+```
+
+### Loader Plugins
+
+Loader plugins are supported as long as they call their load() callbacks
+synchronously. So ones that do network requests will not work. However plugins
+like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
+
+The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
+transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
+will not work. This may be fixable, but it is a bit complex, and I do not have
+enough node-fu to figure it out yet. See the source for amdefine.js if you want
+to get an idea of the issues involved.
+
+## Tests
+
+To run the tests, cd to **tests** and run:
+
+```
+node all.js
+node all-intercept.js
+```
+
+## License
+
+New BSD and MIT. Check the LICENSE file for all the details.
diff --git a/node_modules/amdefine/amdefine.js b/node_modules/amdefine/amdefine.js
new file mode 100644
index 000000000..ca830ba4f
--- /dev/null
+++ b/node_modules/amdefine/amdefine.js
@@ -0,0 +1,301 @@
+/** vim: et:ts=4:sw=4:sts=4
+ * @license amdefine 1.0.1 Copyright (c) 2011-2016, The Dojo Foundation All Rights Reserved.
+ * Available via the MIT or new BSD license.
+ * see: http://github.com/jrburke/amdefine for details
+ */
+
+/*jslint node: true */
+/*global module, process */
+'use strict';
+
+/**
+ * Creates a define for node.
+ * @param {Object} module the "module" object that is defined by Node for the
+ * current module.
+ * @param {Function} [requireFn]. Node's require function for the current module.
+ * It only needs to be passed in Node versions before 0.5, when module.require
+ * did not exist.
+ * @returns {Function} a define function that is usable for the current node
+ * module.
+ */
+function amdefine(module, requireFn) {
+ 'use strict';
+ var defineCache = {},
+ loaderCache = {},
+ alreadyCalled = false,
+ path = require('path'),
+ makeRequire, stringRequire;
+
+ /**
+ * Trims the . and .. from an array of path segments.
+ * It will keep a leading path segment if a .. will become
+ * the first path segment, to help with module name lookups,
+ * which act like paths, but can be remapped. But the end result,
+ * all paths that use this function should look normalized.
+ * NOTE: this method MODIFIES the input array.
+ * @param {Array} ary the array of path segments.
+ */
+ function trimDots(ary) {
+ var i, part;
+ for (i = 0; ary[i]; i+= 1) {
+ part = ary[i];
+ if (part === '.') {
+ ary.splice(i, 1);
+ i -= 1;
+ } else if (part === '..') {
+ if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
+ //End of the line. Keep at least one non-dot
+ //path segment at the front so it can be mapped
+ //correctly to disk. Otherwise, there is likely
+ //no path mapping for a path starting with '..'.
+ //This can still fail, but catches the most reasonable
+ //uses of ..
+ break;
+ } else if (i > 0) {
+ ary.splice(i - 1, 2);
+ i -= 2;
+ }
+ }
+ }
+ }
+
+ function normalize(name, baseName) {
+ var baseParts;
+
+ //Adjust any relative paths.
+ if (name && name.charAt(0) === '.') {
+ //If have a base name, try to normalize against it,
+ //otherwise, assume it is a top-level require that will
+ //be relative to baseUrl in the end.
+ if (baseName) {
+ baseParts = baseName.split('/');
+ baseParts = baseParts.slice(0, baseParts.length - 1);
+ baseParts = baseParts.concat(name.split('/'));
+ trimDots(baseParts);
+ name = baseParts.join('/');
+ }
+ }
+
+ return name;
+ }
+
+ /**
+ * Create the normalize() function passed to a loader plugin's
+ * normalize method.
+ */
+ function makeNormalize(relName) {
+ return function (name) {
+ return normalize(name, relName);
+ };
+ }
+
+ function makeLoad(id) {
+ function load(value) {
+ loaderCache[id] = value;
+ }
+
+ load.fromText = function (id, text) {
+ //This one is difficult because the text can/probably uses
+ //define, and any relative paths and requires should be relative
+ //to that id was it would be found on disk. But this would require
+ //bootstrapping a module/require fairly deeply from node core.
+ //Not sure how best to go about that yet.
+ throw new Error('amdefine does not implement load.fromText');
+ };
+
+ return load;
+ }
+
+ makeRequire = function (systemRequire, exports, module, relId) {
+ function amdRequire(deps, callback) {
+ if (typeof deps === 'string') {
+ //Synchronous, single module require('')
+ return stringRequire(systemRequire, exports, module, deps, relId);
+ } else {
+ //Array of dependencies with a callback.
+
+ //Convert the dependencies to modules.
+ deps = deps.map(function (depName) {
+ return stringRequire(systemRequire, exports, module, depName, relId);
+ });
+
+ //Wait for next tick to call back the require call.
+ if (callback) {
+ process.nextTick(function () {
+ callback.apply(null, deps);
+ });
+ }
+ }
+ }
+
+ amdRequire.toUrl = function (filePath) {
+ if (filePath.indexOf('.') === 0) {
+ return normalize(filePath, path.dirname(module.filename));
+ } else {
+ return filePath;
+ }
+ };
+
+ return amdRequire;
+ };
+
+ //Favor explicit value, passed in if the module wants to support Node 0.4.
+ requireFn = requireFn || function req() {
+ return module.require.apply(module, arguments);
+ };
+
+ function runFactory(id, deps, factory) {
+ var r, e, m, result;
+
+ if (id) {
+ e = loaderCache[id] = {};
+ m = {
+ id: id,
+ uri: __filename,
+ exports: e
+ };
+ r = makeRequire(requireFn, e, m, id);
+ } else {
+ //Only support one define call per file
+ if (alreadyCalled) {
+ throw new Error('amdefine with no module ID cannot be called more than once per file.');
+ }
+ alreadyCalled = true;
+
+ //Use the real variables from node
+ //Use module.exports for exports, since
+ //the exports in here is amdefine exports.
+ e = module.exports;
+ m = module;
+ r = makeRequire(requireFn, e, m, module.id);
+ }
+
+ //If there are dependencies, they are strings, so need
+ //to convert them to dependency values.
+ if (deps) {
+ deps = deps.map(function (depName) {
+ return r(depName);
+ });
+ }
+
+ //Call the factory with the right dependencies.
+ if (typeof factory === 'function') {
+ result = factory.apply(m.exports, deps);
+ } else {
+ result = factory;
+ }
+
+ if (result !== undefined) {
+ m.exports = result;
+ if (id) {
+ loaderCache[id] = m.exports;
+ }
+ }
+ }
+
+ stringRequire = function (systemRequire, exports, module, id, relId) {
+ //Split the ID by a ! so that
+ var index = id.indexOf('!'),
+ originalId = id,
+ prefix, plugin;
+
+ if (index === -1) {
+ id = normalize(id, relId);
+
+ //Straight module lookup. If it is one of the special dependencies,
+ //deal with it, otherwise, delegate to node.
+ if (id === 'require') {
+ return makeRequire(systemRequire, exports, module, relId);
+ } else if (id === 'exports') {
+ return exports;
+ } else if (id === 'module') {
+ return module;
+ } else if (loaderCache.hasOwnProperty(id)) {
+ return loaderCache[id];
+ } else if (defineCache[id]) {
+ runFactory.apply(null, defineCache[id]);
+ return loaderCache[id];
+ } else {
+ if(systemRequire) {
+ return systemRequire(originalId);
+ } else {
+ throw new Error('No module with ID: ' + id);
+ }
+ }
+ } else {
+ //There is a plugin in play.
+ prefix = id.substring(0, index);
+ id = id.substring(index + 1, id.length);
+
+ plugin = stringRequire(systemRequire, exports, module, prefix, relId);
+
+ if (plugin.normalize) {
+ id = plugin.normalize(id, makeNormalize(relId));
+ } else {
+ //Normalize the ID normally.
+ id = normalize(id, relId);
+ }
+
+ if (loaderCache[id]) {
+ return loaderCache[id];
+ } else {
+ plugin.load(id, makeRequire(systemRequire, exports, module, relId), makeLoad(id), {});
+
+ return loaderCache[id];
+ }
+ }
+ };
+
+ //Create a define function specific to the module asking for amdefine.
+ function define(id, deps, factory) {
+ if (Array.isArray(id)) {
+ factory = deps;
+ deps = id;
+ id = undefined;
+ } else if (typeof id !== 'string') {
+ factory = id;
+ id = deps = undefined;
+ }
+
+ if (deps && !Array.isArray(deps)) {
+ factory = deps;
+ deps = undefined;
+ }
+
+ if (!deps) {
+ deps = ['require', 'exports', 'module'];
+ }
+
+ //Set up properties for this module. If an ID, then use
+ //internal cache. If no ID, then use the external variables
+ //for this node module.
+ if (id) {
+ //Put the module in deep freeze until there is a
+ //require call for it.
+ defineCache[id] = [id, deps, factory];
+ } else {
+ runFactory(id, deps, factory);
+ }
+ }
+
+ //define.require, which has access to all the values in the
+ //cache. Useful for AMD modules that all have IDs in the file,
+ //but need to finally export a value to node based on one of those
+ //IDs.
+ define.require = function (id) {
+ if (loaderCache[id]) {
+ return loaderCache[id];
+ }
+
+ if (defineCache[id]) {
+ runFactory.apply(null, defineCache[id]);
+ return loaderCache[id];
+ }
+ };
+
+ define.amd = {};
+
+ return define;
+}
+
+module.exports = amdefine;
diff --git a/node_modules/amdefine/intercept.js b/node_modules/amdefine/intercept.js
new file mode 100644
index 000000000..771a98301
--- /dev/null
+++ b/node_modules/amdefine/intercept.js
@@ -0,0 +1,36 @@
+/*jshint node: true */
+var inserted,
+ Module = require('module'),
+ fs = require('fs'),
+ existingExtFn = Module._extensions['.js'],
+ amdefineRegExp = /amdefine\.js/;
+
+inserted = "if (typeof define !== 'function') {var define = require('amdefine')(module)}";
+
+//From the node/lib/module.js source:
+function stripBOM(content) {
+ // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
+ // because the buffer-to-string conversion in `fs.readFileSync()`
+ // translates it to FEFF, the UTF-16 BOM.
+ if (content.charCodeAt(0) === 0xFEFF) {
+ content = content.slice(1);
+ }
+ return content;
+}
+
+//Also adapted from the node/lib/module.js source:
+function intercept(module, filename) {
+ var content = stripBOM(fs.readFileSync(filename, 'utf8'));
+
+ if (!amdefineRegExp.test(module.id)) {
+ content = inserted + content;
+ }
+
+ module._compile(content, filename);
+}
+
+intercept._id = 'amdefine/intercept';
+
+if (!existingExtFn._id || existingExtFn._id !== intercept._id) {
+ Module._extensions['.js'] = intercept;
+}
diff --git a/node_modules/amdefine/package.json b/node_modules/amdefine/package.json
new file mode 100644
index 000000000..1b9f20df0
--- /dev/null
+++ b/node_modules/amdefine/package.json
@@ -0,0 +1,48 @@
+{
+ "_from": "amdefine@>=0.0.4",
+ "_id": "amdefine@1.0.1",
+ "_inBundle": false,
+ "_integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=",
+ "_location": "/amdefine",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "amdefine@>=0.0.4",
+ "name": "amdefine",
+ "escapedName": "amdefine",
+ "rawSpec": ">=0.0.4",
+ "saveSpec": null,
+ "fetchSpec": ">=0.0.4"
+ },
+ "_requiredBy": [
+ "/source-map"
+ ],
+ "_resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz",
+ "_shasum": "4a5282ac164729e93619bcfd3ad151f817ce91f5",
+ "_spec": "amdefine@>=0.0.4",
+ "_where": "/Users/haleyhammock/introHCI/ixd-skeleton/node_modules/source-map",
+ "author": {
+ "name": "James Burke",
+ "email": "jrburke@gmail.com",
+ "url": "http://github.com/jrburke"
+ },
+ "bugs": {
+ "url": "https://github.com/jrburke/amdefine/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Provide AMD's define() API for declaring modules in the AMD format",
+ "engines": {
+ "node": ">=0.4.2"
+ },
+ "homepage": "http://github.com/jrburke/amdefine",
+ "license": "BSD-3-Clause OR MIT",
+ "main": "./amdefine.js",
+ "name": "amdefine",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/jrburke/amdefine.git"
+ },
+ "version": "1.0.1"
+}
diff --git a/node_modules/async/LICENSE b/node_modules/async/LICENSE
new file mode 100644
index 000000000..b7f9d5001
--- /dev/null
+++ b/node_modules/async/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2010 Caolan McMahon
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/async/README.md b/node_modules/async/README.md
new file mode 100644
index 000000000..951f76e9f
--- /dev/null
+++ b/node_modules/async/README.md
@@ -0,0 +1,1425 @@
+# Async.js
+
+Async is a utility module which provides straight-forward, powerful functions
+for working with asynchronous JavaScript. Although originally designed for
+use with [node.js](http://nodejs.org), it can also be used directly in the
+browser. Also supports [component](https://github.com/component/component).
+
+Async provides around 20 functions that include the usual 'functional'
+suspects (map, reduce, filter, each…) as well as some common patterns
+for asynchronous control flow (parallel, series, waterfall…). All these
+functions assume you follow the node.js convention of providing a single
+callback as the last argument of your async function.
+
+
+## Quick Examples
+
+```javascript
+async.map(['file1','file2','file3'], fs.stat, function(err, results){
+ // results is now an array of stats for each file
+});
+
+async.filter(['file1','file2','file3'], fs.exists, function(results){
+ // results now equals an array of the existing files
+});
+
+async.parallel([
+ function(){ ... },
+ function(){ ... }
+], callback);
+
+async.series([
+ function(){ ... },
+ function(){ ... }
+]);
+```
+
+There are many more functions available so take a look at the docs below for a
+full list. This module aims to be comprehensive, so if you feel anything is
+missing please create a GitHub issue for it.
+
+## Common Pitfalls
+
+### Binding a context to an iterator
+
+This section is really about bind, not about async. If you are wondering how to
+make async execute your iterators in a given context, or are confused as to why
+a method of another library isn't working as an iterator, study this example:
+
+```js
+// Here is a simple object with an (unnecessarily roundabout) squaring method
+var AsyncSquaringLibrary = {
+ squareExponent: 2,
+ square: function(number, callback){
+ var result = Math.pow(number, this.squareExponent);
+ setTimeout(function(){
+ callback(null, result);
+ }, 200);
+ }
+};
+
+async.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){
+ // result is [NaN, NaN, NaN]
+ // This fails because the `this.squareExponent` expression in the square
+ // function is not evaluated in the context of AsyncSquaringLibrary, and is
+ // therefore undefined.
+});
+
+async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){
+ // result is [1, 4, 9]
+ // With the help of bind we can attach a context to the iterator before
+ // passing it to async. Now the square function will be executed in its
+ // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent`
+ // will be as expected.
+});
+```
+
+## Download
+
+The source is available for download from
+[GitHub](http://github.com/caolan/async).
+Alternatively, you can install using Node Package Manager (npm):
+
+ npm install async
+
+__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed
+
+## In the Browser
+
+So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. Usage:
+
+```html
+
+
+```
+
+## Documentation
+
+### Collections
+
+* [each](#each)
+* [eachSeries](#eachSeries)
+* [eachLimit](#eachLimit)
+* [map](#map)
+* [mapSeries](#mapSeries)
+* [mapLimit](#mapLimit)
+* [filter](#filter)
+* [filterSeries](#filterSeries)
+* [reject](#reject)
+* [rejectSeries](#rejectSeries)
+* [reduce](#reduce)
+* [reduceRight](#reduceRight)
+* [detect](#detect)
+* [detectSeries](#detectSeries)
+* [sortBy](#sortBy)
+* [some](#some)
+* [every](#every)
+* [concat](#concat)
+* [concatSeries](#concatSeries)
+
+### Control Flow
+
+* [series](#series)
+* [parallel](#parallel)
+* [parallelLimit](#parallellimittasks-limit-callback)
+* [whilst](#whilst)
+* [doWhilst](#doWhilst)
+* [until](#until)
+* [doUntil](#doUntil)
+* [forever](#forever)
+* [waterfall](#waterfall)
+* [compose](#compose)
+* [applyEach](#applyEach)
+* [applyEachSeries](#applyEachSeries)
+* [queue](#queue)
+* [cargo](#cargo)
+* [auto](#auto)
+* [iterator](#iterator)
+* [apply](#apply)
+* [nextTick](#nextTick)
+* [times](#times)
+* [timesSeries](#timesSeries)
+
+### Utils
+
+* [memoize](#memoize)
+* [unmemoize](#unmemoize)
+* [log](#log)
+* [dir](#dir)
+* [noConflict](#noConflict)
+
+
+## Collections
+
+
+
+### each(arr, iterator, callback)
+
+Applies an iterator function to each item in an array, in parallel.
+The iterator is called with an item from the list and a callback for when it
+has finished. If the iterator passes an error to this callback, the main
+callback for the each function is immediately called with the error.
+
+Note, that since this function applies the iterator to each item in parallel
+there is no guarantee that the iterator functions will complete in order.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* iterator(item, callback) - A function to apply to each item in the array.
+ The iterator is passed a callback(err) which must be called once it has
+ completed. If no error has occured, the callback should be run without
+ arguments or with an explicit null argument.
+* callback(err) - A callback which is called after all the iterator functions
+ have finished, or an error has occurred.
+
+__Example__
+
+```js
+// assuming openFiles is an array of file names and saveFile is a function
+// to save the modified contents of that file:
+
+async.each(openFiles, saveFile, function(err){
+ // if any of the saves produced an error, err would equal that error
+});
+```
+
+---------------------------------------
+
+
+
+### eachSeries(arr, iterator, callback)
+
+The same as each only the iterator is applied to each item in the array in
+series. The next iterator is only called once the current one has completed
+processing. This means the iterator functions will complete in order.
+
+
+---------------------------------------
+
+
+
+### eachLimit(arr, limit, iterator, callback)
+
+The same as each only no more than "limit" iterators will be simultaneously
+running at any time.
+
+Note that the items are not processed in batches, so there is no guarantee that
+ the first "limit" iterator functions will complete before any others are
+started.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* limit - The maximum number of iterators to run at any time.
+* iterator(item, callback) - A function to apply to each item in the array.
+ The iterator is passed a callback(err) which must be called once it has
+ completed. If no error has occured, the callback should be run without
+ arguments or with an explicit null argument.
+* callback(err) - A callback which is called after all the iterator functions
+ have finished, or an error has occurred.
+
+__Example__
+
+```js
+// Assume documents is an array of JSON objects and requestApi is a
+// function that interacts with a rate-limited REST api.
+
+async.eachLimit(documents, 20, requestApi, function(err){
+ // if any of the saves produced an error, err would equal that error
+});
+```
+
+---------------------------------------
+
+
+### map(arr, iterator, callback)
+
+Produces a new array of values by mapping each value in the given array through
+the iterator function. The iterator is called with an item from the array and a
+callback for when it has finished processing. The callback takes 2 arguments,
+an error and the transformed item from the array. If the iterator passes an
+error to this callback, the main callback for the map function is immediately
+called with the error.
+
+Note, that since this function applies the iterator to each item in parallel
+there is no guarantee that the iterator functions will complete in order, however
+the results array will be in the same order as the original array.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* iterator(item, callback) - A function to apply to each item in the array.
+ The iterator is passed a callback(err, transformed) which must be called once
+ it has completed with an error (which can be null) and a transformed item.
+* callback(err, results) - A callback which is called after all the iterator
+ functions have finished, or an error has occurred. Results is an array of the
+ transformed items from the original array.
+
+__Example__
+
+```js
+async.map(['file1','file2','file3'], fs.stat, function(err, results){
+ // results is now an array of stats for each file
+});
+```
+
+---------------------------------------
+
+
+### mapSeries(arr, iterator, callback)
+
+The same as map only the iterator is applied to each item in the array in
+series. The next iterator is only called once the current one has completed
+processing. The results array will be in the same order as the original.
+
+
+---------------------------------------
+
+
+### mapLimit(arr, limit, iterator, callback)
+
+The same as map only no more than "limit" iterators will be simultaneously
+running at any time.
+
+Note that the items are not processed in batches, so there is no guarantee that
+ the first "limit" iterator functions will complete before any others are
+started.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* limit - The maximum number of iterators to run at any time.
+* iterator(item, callback) - A function to apply to each item in the array.
+ The iterator is passed a callback(err, transformed) which must be called once
+ it has completed with an error (which can be null) and a transformed item.
+* callback(err, results) - A callback which is called after all the iterator
+ functions have finished, or an error has occurred. Results is an array of the
+ transformed items from the original array.
+
+__Example__
+
+```js
+async.mapLimit(['file1','file2','file3'], 1, fs.stat, function(err, results){
+ // results is now an array of stats for each file
+});
+```
+
+---------------------------------------
+
+
+### filter(arr, iterator, callback)
+
+__Alias:__ select
+
+Returns a new array of all the values which pass an async truth test.
+_The callback for each iterator call only accepts a single argument of true or
+false, it does not accept an error argument first!_ This is in-line with the
+way node libraries work with truth tests like fs.exists. This operation is
+performed in parallel, but the results array will be in the same order as the
+original.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* iterator(item, callback) - A truth test to apply to each item in the array.
+ The iterator is passed a callback(truthValue) which must be called with a
+ boolean argument once it has completed.
+* callback(results) - A callback which is called after all the iterator
+ functions have finished.
+
+__Example__
+
+```js
+async.filter(['file1','file2','file3'], fs.exists, function(results){
+ // results now equals an array of the existing files
+});
+```
+
+---------------------------------------
+
+
+### filterSeries(arr, iterator, callback)
+
+__alias:__ selectSeries
+
+The same as filter only the iterator is applied to each item in the array in
+series. The next iterator is only called once the current one has completed
+processing. The results array will be in the same order as the original.
+
+---------------------------------------
+
+
+### reject(arr, iterator, callback)
+
+The opposite of filter. Removes values that pass an async truth test.
+
+---------------------------------------
+
+
+### rejectSeries(arr, iterator, callback)
+
+The same as reject, only the iterator is applied to each item in the array
+in series.
+
+
+---------------------------------------
+
+
+### reduce(arr, memo, iterator, callback)
+
+__aliases:__ inject, foldl
+
+Reduces a list of values into a single value using an async iterator to return
+each successive step. Memo is the initial state of the reduction. This
+function only operates in series. For performance reasons, it may make sense to
+split a call to this function into a parallel map, then use the normal
+Array.prototype.reduce on the results. This function is for situations where
+each step in the reduction needs to be async, if you can get the data before
+reducing it then it's probably a good idea to do so.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* memo - The initial state of the reduction.
+* iterator(memo, item, callback) - A function applied to each item in the
+ array to produce the next step in the reduction. The iterator is passed a
+ callback(err, reduction) which accepts an optional error as its first
+ argument, and the state of the reduction as the second. If an error is
+ passed to the callback, the reduction is stopped and the main callback is
+ immediately called with the error.
+* callback(err, result) - A callback which is called after all the iterator
+ functions have finished. Result is the reduced value.
+
+__Example__
+
+```js
+async.reduce([1,2,3], 0, function(memo, item, callback){
+ // pointless async:
+ process.nextTick(function(){
+ callback(null, memo + item)
+ });
+}, function(err, result){
+ // result is now equal to the last value of memo, which is 6
+});
+```
+
+---------------------------------------
+
+
+### reduceRight(arr, memo, iterator, callback)
+
+__Alias:__ foldr
+
+Same as reduce, only operates on the items in the array in reverse order.
+
+
+---------------------------------------
+
+
+### detect(arr, iterator, callback)
+
+Returns the first value in a list that passes an async truth test. The
+iterator is applied in parallel, meaning the first iterator to return true will
+fire the detect callback with that result. That means the result might not be
+the first item in the original array (in terms of order) that passes the test.
+
+If order within the original array is important then look at detectSeries.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* iterator(item, callback) - A truth test to apply to each item in the array.
+ The iterator is passed a callback(truthValue) which must be called with a
+ boolean argument once it has completed.
+* callback(result) - A callback which is called as soon as any iterator returns
+ true, or after all the iterator functions have finished. Result will be
+ the first item in the array that passes the truth test (iterator) or the
+ value undefined if none passed.
+
+__Example__
+
+```js
+async.detect(['file1','file2','file3'], fs.exists, function(result){
+ // result now equals the first file in the list that exists
+});
+```
+
+---------------------------------------
+
+
+### detectSeries(arr, iterator, callback)
+
+The same as detect, only the iterator is applied to each item in the array
+in series. This means the result is always the first in the original array (in
+terms of array order) that passes the truth test.
+
+
+---------------------------------------
+
+
+### sortBy(arr, iterator, callback)
+
+Sorts a list by the results of running each value through an async iterator.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* iterator(item, callback) - A function to apply to each item in the array.
+ The iterator is passed a callback(err, sortValue) which must be called once it
+ has completed with an error (which can be null) and a value to use as the sort
+ criteria.
+* callback(err, results) - A callback which is called after all the iterator
+ functions have finished, or an error has occurred. Results is the items from
+ the original array sorted by the values returned by the iterator calls.
+
+__Example__
+
+```js
+async.sortBy(['file1','file2','file3'], function(file, callback){
+ fs.stat(file, function(err, stats){
+ callback(err, stats.mtime);
+ });
+}, function(err, results){
+ // results is now the original array of files sorted by
+ // modified date
+});
+```
+
+---------------------------------------
+
+
+### some(arr, iterator, callback)
+
+__Alias:__ any
+
+Returns true if at least one element in the array satisfies an async test.
+_The callback for each iterator call only accepts a single argument of true or
+false, it does not accept an error argument first!_ This is in-line with the
+way node libraries work with truth tests like fs.exists. Once any iterator
+call returns true, the main callback is immediately called.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* iterator(item, callback) - A truth test to apply to each item in the array.
+ The iterator is passed a callback(truthValue) which must be called with a
+ boolean argument once it has completed.
+* callback(result) - A callback which is called as soon as any iterator returns
+ true, or after all the iterator functions have finished. Result will be
+ either true or false depending on the values of the async tests.
+
+__Example__
+
+```js
+async.some(['file1','file2','file3'], fs.exists, function(result){
+ // if result is true then at least one of the files exists
+});
+```
+
+---------------------------------------
+
+
+### every(arr, iterator, callback)
+
+__Alias:__ all
+
+Returns true if every element in the array satisfies an async test.
+_The callback for each iterator call only accepts a single argument of true or
+false, it does not accept an error argument first!_ This is in-line with the
+way node libraries work with truth tests like fs.exists.
+
+__Arguments__
+
+* arr - An array to iterate over.
+* iterator(item, callback) - A truth test to apply to each item in the array.
+ The iterator is passed a callback(truthValue) which must be called with a
+ boolean argument once it has completed.
+* callback(result) - A callback which is called after all the iterator
+ functions have finished. Result will be either true or false depending on
+ the values of the async tests.
+
+__Example__
+
+```js
+async.every(['file1','file2','file3'], fs.exists, function(result){
+ // if result is true then every file exists
+});
+```
+
+---------------------------------------
+
+
+### concat(arr, iterator, callback)
+
+Applies an iterator to each item in a list, concatenating the results. Returns the
+concatenated list. The iterators are called in parallel, and the results are
+concatenated as they return. There is no guarantee that the results array will
+be returned in the original order of the arguments passed to the iterator function.
+
+__Arguments__
+
+* arr - An array to iterate over
+* iterator(item, callback) - A function to apply to each item in the array.
+ The iterator is passed a callback(err, results) which must be called once it
+ has completed with an error (which can be null) and an array of results.
+* callback(err, results) - A callback which is called after all the iterator
+ functions have finished, or an error has occurred. Results is an array containing
+ the concatenated results of the iterator function.
+
+__Example__
+
+```js
+async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){
+ // files is now a list of filenames that exist in the 3 directories
+});
+```
+
+---------------------------------------
+
+
+### concatSeries(arr, iterator, callback)
+
+Same as async.concat, but executes in series instead of parallel.
+
+
+## Control Flow
+
+
+### series(tasks, [callback])
+
+Run an array of functions in series, each one running once the previous
+function has completed. If any functions in the series pass an error to its
+callback, no more functions are run and the callback for the series is
+immediately called with the value of the error. Once the tasks have completed,
+the results are passed to the final callback as an array.
+
+It is also possible to use an object instead of an array. Each property will be
+run as a function and the results will be passed to the final callback as an object
+instead of an array. This can be a more readable way of handling results from
+async.series.
+
+
+__Arguments__
+
+* tasks - An array or object containing functions to run, each function is passed
+ a callback(err, result) it must call on completion with an error (which can
+ be null) and an optional result value.
+* callback(err, results) - An optional callback to run once all the functions
+ have completed. This function gets a results array (or object) containing all
+ the result arguments passed to the task callbacks.
+
+__Example__
+
+```js
+async.series([
+ function(callback){
+ // do some stuff ...
+ callback(null, 'one');
+ },
+ function(callback){
+ // do some more stuff ...
+ callback(null, 'two');
+ }
+],
+// optional callback
+function(err, results){
+ // results is now equal to ['one', 'two']
+});
+
+
+// an example using an object instead of an array
+async.series({
+ one: function(callback){
+ setTimeout(function(){
+ callback(null, 1);
+ }, 200);
+ },
+ two: function(callback){
+ setTimeout(function(){
+ callback(null, 2);
+ }, 100);
+ }
+},
+function(err, results) {
+ // results is now equal to: {one: 1, two: 2}
+});
+```
+
+---------------------------------------
+
+
+### parallel(tasks, [callback])
+
+Run an array of functions in parallel, without waiting until the previous
+function has completed. If any of the functions pass an error to its
+callback, the main callback is immediately called with the value of the error.
+Once the tasks have completed, the results are passed to the final callback as an
+array.
+
+It is also possible to use an object instead of an array. Each property will be
+run as a function and the results will be passed to the final callback as an object
+instead of an array. This can be a more readable way of handling results from
+async.parallel.
+
+
+__Arguments__
+
+* tasks - An array or object containing functions to run, each function is passed
+ a callback(err, result) it must call on completion with an error (which can
+ be null) and an optional result value.
+* callback(err, results) - An optional callback to run once all the functions
+ have completed. This function gets a results array (or object) containing all
+ the result arguments passed to the task callbacks.
+
+__Example__
+
+```js
+async.parallel([
+ function(callback){
+ setTimeout(function(){
+ callback(null, 'one');
+ }, 200);
+ },
+ function(callback){
+ setTimeout(function(){
+ callback(null, 'two');
+ }, 100);
+ }
+],
+// optional callback
+function(err, results){
+ // the results array will equal ['one','two'] even though
+ // the second function had a shorter timeout.
+});
+
+
+// an example using an object instead of an array
+async.parallel({
+ one: function(callback){
+ setTimeout(function(){
+ callback(null, 1);
+ }, 200);
+ },
+ two: function(callback){
+ setTimeout(function(){
+ callback(null, 2);
+ }, 100);
+ }
+},
+function(err, results) {
+ // results is now equals to: {one: 1, two: 2}
+});
+```
+
+---------------------------------------
+
+
+### parallelLimit(tasks, limit, [callback])
+
+The same as parallel only the tasks are executed in parallel with a maximum of "limit"
+tasks executing at any time.
+
+Note that the tasks are not executed in batches, so there is no guarantee that
+the first "limit" tasks will complete before any others are started.
+
+__Arguments__
+
+* tasks - An array or object containing functions to run, each function is passed
+ a callback(err, result) it must call on completion with an error (which can
+ be null) and an optional result value.
+* limit - The maximum number of tasks to run at any time.
+* callback(err, results) - An optional callback to run once all the functions
+ have completed. This function gets a results array (or object) containing all
+ the result arguments passed to the task callbacks.
+
+---------------------------------------
+
+
+### whilst(test, fn, callback)
+
+Repeatedly call fn, while test returns true. Calls the callback when stopped,
+or an error occurs.
+
+__Arguments__
+
+* test() - synchronous truth test to perform before each execution of fn.
+* fn(callback) - A function to call each time the test passes. The function is
+ passed a callback(err) which must be called once it has completed with an
+ optional error argument.
+* callback(err) - A callback which is called after the test fails and repeated
+ execution of fn has stopped.
+
+__Example__
+
+```js
+var count = 0;
+
+async.whilst(
+ function () { return count < 5; },
+ function (callback) {
+ count++;
+ setTimeout(callback, 1000);
+ },
+ function (err) {
+ // 5 seconds have passed
+ }
+);
+```
+
+---------------------------------------
+
+
+### doWhilst(fn, test, callback)
+
+The post check version of whilst. To reflect the difference in the order of operations `test` and `fn` arguments are switched. `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript.
+
+---------------------------------------
+
+
+### until(test, fn, callback)
+
+Repeatedly call fn, until test returns true. Calls the callback when stopped,
+or an error occurs.
+
+The inverse of async.whilst.
+
+---------------------------------------
+
+
+### doUntil(fn, test, callback)
+
+Like doWhilst except the test is inverted. Note the argument ordering differs from `until`.
+
+---------------------------------------
+
+
+### forever(fn, callback)
+
+Calls the asynchronous function 'fn' repeatedly, in series, indefinitely.
+If an error is passed to fn's callback then 'callback' is called with the
+error, otherwise it will never be called.
+
+---------------------------------------
+
+
+### waterfall(tasks, [callback])
+
+Runs an array of functions in series, each passing their results to the next in
+the array. However, if any of the functions pass an error to the callback, the
+next function is not executed and the main callback is immediately called with
+the error.
+
+__Arguments__
+
+* tasks - An array of functions to run, each function is passed a
+ callback(err, result1, result2, ...) it must call on completion. The first
+ argument is an error (which can be null) and any further arguments will be
+ passed as arguments in order to the next task.
+* callback(err, [results]) - An optional callback to run once all the functions
+ have completed. This will be passed the results of the last task's callback.
+
+
+
+__Example__
+
+```js
+async.waterfall([
+ function(callback){
+ callback(null, 'one', 'two');
+ },
+ function(arg1, arg2, callback){
+ callback(null, 'three');
+ },
+ function(arg1, callback){
+ // arg1 now equals 'three'
+ callback(null, 'done');
+ }
+], function (err, result) {
+ // result now equals 'done'
+});
+```
+
+---------------------------------------
+
+### compose(fn1, fn2...)
+
+Creates a function which is a composition of the passed asynchronous
+functions. Each function consumes the return value of the function that
+follows. Composing functions f(), g() and h() would produce the result of
+f(g(h())), only this version uses callbacks to obtain the return values.
+
+Each function is executed with the `this` binding of the composed function.
+
+__Arguments__
+
+* functions... - the asynchronous functions to compose
+
+
+__Example__
+
+```js
+function add1(n, callback) {
+ setTimeout(function () {
+ callback(null, n + 1);
+ }, 10);
+}
+
+function mul3(n, callback) {
+ setTimeout(function () {
+ callback(null, n * 3);
+ }, 10);
+}
+
+var add1mul3 = async.compose(mul3, add1);
+
+add1mul3(4, function (err, result) {
+ // result now equals 15
+});
+```
+
+---------------------------------------
+
+### applyEach(fns, args..., callback)
+
+Applies the provided arguments to each function in the array, calling the
+callback after all functions have completed. If you only provide the first
+argument then it will return a function which lets you pass in the
+arguments as if it were a single function call.
+
+__Arguments__
+
+* fns - the asynchronous functions to all call with the same arguments
+* args... - any number of separate arguments to pass to the function
+* callback - the final argument should be the callback, called when all
+ functions have completed processing
+
+
+__Example__
+
+```js
+async.applyEach([enableSearch, updateSchema], 'bucket', callback);
+
+// partial application example:
+async.each(
+ buckets,
+ async.applyEach([enableSearch, updateSchema]),
+ callback
+);
+```
+
+---------------------------------------
+
+
+### applyEachSeries(arr, iterator, callback)
+
+The same as applyEach only the functions are applied in series.
+
+---------------------------------------
+
+
+### queue(worker, concurrency)
+
+Creates a queue object with the specified concurrency. Tasks added to the
+queue will be processed in parallel (up to the concurrency limit). If all
+workers are in progress, the task is queued until one is available. Once
+a worker has completed a task, the task's callback is called.
+
+__Arguments__
+
+* worker(task, callback) - An asynchronous function for processing a queued
+ task, which must call its callback(err) argument when finished, with an
+ optional error as an argument.
+* concurrency - An integer for determining how many worker functions should be
+ run in parallel.
+
+__Queue objects__
+
+The queue object returned by this function has the following properties and
+methods:
+
+* length() - a function returning the number of items waiting to be processed.
+* concurrency - an integer for determining how many worker functions should be
+ run in parallel. This property can be changed after a queue is created to
+ alter the concurrency on-the-fly.
+* push(task, [callback]) - add a new task to the queue, the callback is called
+ once the worker has finished processing the task.
+ instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.
+* unshift(task, [callback]) - add a new task to the front of the queue.
+* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued
+* empty - a callback that is called when the last item from the queue is given to a worker
+* drain - a callback that is called when the last item from the queue has returned from the worker
+
+__Example__
+
+```js
+// create a queue object with concurrency 2
+
+var q = async.queue(function (task, callback) {
+ console.log('hello ' + task.name);
+ callback();
+}, 2);
+
+
+// assign a callback
+q.drain = function() {
+ console.log('all items have been processed');
+}
+
+// add some items to the queue
+
+q.push({name: 'foo'}, function (err) {
+ console.log('finished processing foo');
+});
+q.push({name: 'bar'}, function (err) {
+ console.log('finished processing bar');
+});
+
+// add some items to the queue (batch-wise)
+
+q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) {
+ console.log('finished processing bar');
+});
+
+// add some items to the front of the queue
+
+q.unshift({name: 'bar'}, function (err) {
+ console.log('finished processing bar');
+});
+```
+
+---------------------------------------
+
+
+### cargo(worker, [payload])
+
+Creates a cargo object with the specified payload. Tasks added to the
+cargo will be processed altogether (up to the payload limit). If the
+worker is in progress, the task is queued until it is available. Once
+the worker has completed some tasks, each callback of those tasks is called.
+
+__Arguments__
+
+* worker(tasks, callback) - An asynchronous function for processing an array of
+ queued tasks, which must call its callback(err) argument when finished, with
+ an optional error as an argument.
+* payload - An optional integer for determining how many tasks should be
+ processed per round; if omitted, the default is unlimited.
+
+__Cargo objects__
+
+The cargo object returned by this function has the following properties and
+methods:
+
+* length() - a function returning the number of items waiting to be processed.
+* payload - an integer for determining how many tasks should be
+ process per round. This property can be changed after a cargo is created to
+ alter the payload on-the-fly.
+* push(task, [callback]) - add a new task to the queue, the callback is called
+ once the worker has finished processing the task.
+ instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list.
+* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued
+* empty - a callback that is called when the last item from the queue is given to a worker
+* drain - a callback that is called when the last item from the queue has returned from the worker
+
+__Example__
+
+```js
+// create a cargo object with payload 2
+
+var cargo = async.cargo(function (tasks, callback) {
+ for(var i=0; i
+### auto(tasks, [callback])
+
+Determines the best order for running functions based on their requirements.
+Each function can optionally depend on other functions being completed first,
+and each function is run as soon as its requirements are satisfied. If any of
+the functions pass an error to their callback, that function will not complete
+(so any other functions depending on it will not run) and the main callback
+will be called immediately with the error. Functions also receive an object
+containing the results of functions which have completed so far.
+
+Note, all functions are called with a results object as a second argument,
+so it is unsafe to pass functions in the tasks object which cannot handle the
+extra argument. For example, this snippet of code:
+
+```js
+async.auto({
+ readData: async.apply(fs.readFile, 'data.txt', 'utf-8')
+}, callback);
+```
+
+will have the effect of calling readFile with the results object as the last
+argument, which will fail:
+
+```js
+fs.readFile('data.txt', 'utf-8', cb, {});
+```
+
+Instead, wrap the call to readFile in a function which does not forward the
+results object:
+
+```js
+async.auto({
+ readData: function(cb, results){
+ fs.readFile('data.txt', 'utf-8', cb);
+ }
+}, callback);
+```
+
+__Arguments__
+
+* tasks - An object literal containing named functions or an array of
+ requirements, with the function itself the last item in the array. The key
+ used for each function or array is used when specifying requirements. The
+ function receives two arguments: (1) a callback(err, result) which must be
+ called when finished, passing an error (which can be null) and the result of
+ the function's execution, and (2) a results object, containing the results of
+ the previously executed functions.
+* callback(err, results) - An optional callback which is called when all the
+ tasks have been completed. The callback will receive an error as an argument
+ if any tasks pass an error to their callback. Results will always be passed
+ but if an error occurred, no other tasks will be performed, and the results
+ object will only contain partial results.
+
+
+__Example__
+
+```js
+async.auto({
+ get_data: function(callback){
+ // async code to get some data
+ },
+ make_folder: function(callback){
+ // async code to create a directory to store a file in
+ // this is run at the same time as getting the data
+ },
+ write_file: ['get_data', 'make_folder', function(callback){
+ // once there is some data and the directory exists,
+ // write the data to a file in the directory
+ callback(null, filename);
+ }],
+ email_link: ['write_file', function(callback, results){
+ // once the file is written let's email a link to it...
+ // results.write_file contains the filename returned by write_file.
+ }]
+});
+```
+
+This is a fairly trivial example, but to do this using the basic parallel and
+series functions would look like this:
+
+```js
+async.parallel([
+ function(callback){
+ // async code to get some data
+ },
+ function(callback){
+ // async code to create a directory to store a file in
+ // this is run at the same time as getting the data
+ }
+],
+function(err, results){
+ async.series([
+ function(callback){
+ // once there is some data and the directory exists,
+ // write the data to a file in the directory
+ },
+ function(callback){
+ // once the file is written let's email a link to it...
+ }
+ ]);
+});
+```
+
+For a complicated series of async tasks using the auto function makes adding
+new tasks much easier and makes the code more readable.
+
+
+---------------------------------------
+
+
+### iterator(tasks)
+
+Creates an iterator function which calls the next function in the array,
+returning a continuation to call the next one after that. It's also possible to
+'peek' the next iterator by doing iterator.next().
+
+This function is used internally by the async module but can be useful when
+you want to manually control the flow of functions in series.
+
+__Arguments__
+
+* tasks - An array of functions to run.
+
+__Example__
+
+```js
+var iterator = async.iterator([
+ function(){ sys.p('one'); },
+ function(){ sys.p('two'); },
+ function(){ sys.p('three'); }
+]);
+
+node> var iterator2 = iterator();
+'one'
+node> var iterator3 = iterator2();
+'two'
+node> iterator3();
+'three'
+node> var nextfn = iterator2.next();
+node> nextfn();
+'three'
+```
+
+---------------------------------------
+
+
+### apply(function, arguments..)
+
+Creates a continuation function with some arguments already applied, a useful
+shorthand when combined with other control flow functions. Any arguments
+passed to the returned function are added to the arguments originally passed
+to apply.
+
+__Arguments__
+
+* function - The function you want to eventually apply all arguments to.
+* arguments... - Any number of arguments to automatically apply when the
+ continuation is called.
+
+__Example__
+
+```js
+// using apply
+
+async.parallel([
+ async.apply(fs.writeFile, 'testfile1', 'test1'),
+ async.apply(fs.writeFile, 'testfile2', 'test2'),
+]);
+
+
+// the same process without using apply
+
+async.parallel([
+ function(callback){
+ fs.writeFile('testfile1', 'test1', callback);
+ },
+ function(callback){
+ fs.writeFile('testfile2', 'test2', callback);
+ }
+]);
+```
+
+It's possible to pass any number of additional arguments when calling the
+continuation:
+
+```js
+node> var fn = async.apply(sys.puts, 'one');
+node> fn('two', 'three');
+one
+two
+three
+```
+
+---------------------------------------
+
+
+### nextTick(callback)
+
+Calls the callback on a later loop around the event loop. In node.js this just
+calls process.nextTick, in the browser it falls back to setImmediate(callback)
+if available, otherwise setTimeout(callback, 0), which means other higher priority
+events may precede the execution of the callback.
+
+This is used internally for browser-compatibility purposes.
+
+__Arguments__
+
+* callback - The function to call on a later loop around the event loop.
+
+__Example__
+
+```js
+var call_order = [];
+async.nextTick(function(){
+ call_order.push('two');
+ // call_order now equals ['one','two']
+});
+call_order.push('one')
+```
+
+
+### times(n, callback)
+
+Calls the callback n times and accumulates results in the same manner
+you would use with async.map.
+
+__Arguments__
+
+* n - The number of times to run the function.
+* callback - The function to call n times.
+
+__Example__
+
+```js
+// Pretend this is some complicated async factory
+var createUser = function(id, callback) {
+ callback(null, {
+ id: 'user' + id
+ })
+}
+// generate 5 users
+async.times(5, function(n, next){
+ createUser(n, function(err, user) {
+ next(err, user)
+ })
+}, function(err, users) {
+ // we should now have 5 users
+});
+```
+
+
+### timesSeries(n, callback)
+
+The same as times only the iterator is applied to each item in the array in
+series. The next iterator is only called once the current one has completed
+processing. The results array will be in the same order as the original.
+
+
+## Utils
+
+
+### memoize(fn, [hasher])
+
+Caches the results of an async function. When creating a hash to store function
+results against, the callback is omitted from the hash and an optional hash
+function can be used.
+
+The cache of results is exposed as the `memo` property of the function returned
+by `memoize`.
+
+__Arguments__
+
+* fn - the function you to proxy and cache results from.
+* hasher - an optional function for generating a custom hash for storing
+ results, it has all the arguments applied to it apart from the callback, and
+ must be synchronous.
+
+__Example__
+
+```js
+var slow_fn = function (name, callback) {
+ // do something
+ callback(null, result);
+};
+var fn = async.memoize(slow_fn);
+
+// fn can now be used as if it were slow_fn
+fn('some name', function () {
+ // callback
+});
+```
+
+
+### unmemoize(fn)
+
+Undoes a memoized function, reverting it to the original, unmemoized
+form. Comes handy in tests.
+
+__Arguments__
+
+* fn - the memoized function
+
+
+### log(function, arguments)
+
+Logs the result of an async function to the console. Only works in node.js or
+in browsers that support console.log and console.error (such as FF and Chrome).
+If multiple arguments are returned from the async function, console.log is
+called on each argument in order.
+
+__Arguments__
+
+* function - The function you want to eventually apply all arguments to.
+* arguments... - Any number of arguments to apply to the function.
+
+__Example__
+
+```js
+var hello = function(name, callback){
+ setTimeout(function(){
+ callback(null, 'hello ' + name);
+ }, 1000);
+};
+```
+```js
+node> async.log(hello, 'world');
+'hello world'
+```
+
+---------------------------------------
+
+
+### dir(function, arguments)
+
+Logs the result of an async function to the console using console.dir to
+display the properties of the resulting object. Only works in node.js or
+in browsers that support console.dir and console.error (such as FF and Chrome).
+If multiple arguments are returned from the async function, console.dir is
+called on each argument in order.
+
+__Arguments__
+
+* function - The function you want to eventually apply all arguments to.
+* arguments... - Any number of arguments to apply to the function.
+
+__Example__
+
+```js
+var hello = function(name, callback){
+ setTimeout(function(){
+ callback(null, {hello: name});
+ }, 1000);
+};
+```
+```js
+node> async.dir(hello, 'world');
+{hello: 'world'}
+```
+
+---------------------------------------
+
+
+### noConflict()
+
+Changes the value of async back to its original value, returning a reference to the
+async object.
diff --git a/node_modules/async/component.json b/node_modules/async/component.json
new file mode 100644
index 000000000..bbb011548
--- /dev/null
+++ b/node_modules/async/component.json
@@ -0,0 +1,11 @@
+{
+ "name": "async",
+ "repo": "caolan/async",
+ "description": "Higher-order functions and common patterns for asynchronous code",
+ "version": "0.1.23",
+ "keywords": [],
+ "dependencies": {},
+ "development": {},
+ "main": "lib/async.js",
+ "scripts": [ "lib/async.js" ]
+}
diff --git a/node_modules/async/lib/async.js b/node_modules/async/lib/async.js
new file mode 100755
index 000000000..1eebb153f
--- /dev/null
+++ b/node_modules/async/lib/async.js
@@ -0,0 +1,958 @@
+/*global setImmediate: false, setTimeout: false, console: false */
+(function () {
+
+ var async = {};
+
+ // global on the server, window in the browser
+ var root, previous_async;
+
+ root = this;
+ if (root != null) {
+ previous_async = root.async;
+ }
+
+ async.noConflict = function () {
+ root.async = previous_async;
+ return async;
+ };
+
+ function only_once(fn) {
+ var called = false;
+ return function() {
+ if (called) throw new Error("Callback was already called.");
+ called = true;
+ fn.apply(root, arguments);
+ }
+ }
+
+ //// cross-browser compatiblity functions ////
+
+ var _each = function (arr, iterator) {
+ if (arr.forEach) {
+ return arr.forEach(iterator);
+ }
+ for (var i = 0; i < arr.length; i += 1) {
+ iterator(arr[i], i, arr);
+ }
+ };
+
+ var _map = function (arr, iterator) {
+ if (arr.map) {
+ return arr.map(iterator);
+ }
+ var results = [];
+ _each(arr, function (x, i, a) {
+ results.push(iterator(x, i, a));
+ });
+ return results;
+ };
+
+ var _reduce = function (arr, iterator, memo) {
+ if (arr.reduce) {
+ return arr.reduce(iterator, memo);
+ }
+ _each(arr, function (x, i, a) {
+ memo = iterator(memo, x, i, a);
+ });
+ return memo;
+ };
+
+ var _keys = function (obj) {
+ if (Object.keys) {
+ return Object.keys(obj);
+ }
+ var keys = [];
+ for (var k in obj) {
+ if (obj.hasOwnProperty(k)) {
+ keys.push(k);
+ }
+ }
+ return keys;
+ };
+
+ //// exported async module functions ////
+
+ //// nextTick implementation with browser-compatible fallback ////
+ if (typeof process === 'undefined' || !(process.nextTick)) {
+ if (typeof setImmediate === 'function') {
+ async.nextTick = function (fn) {
+ // not a direct alias for IE10 compatibility
+ setImmediate(fn);
+ };
+ async.setImmediate = async.nextTick;
+ }
+ else {
+ async.nextTick = function (fn) {
+ setTimeout(fn, 0);
+ };
+ async.setImmediate = async.nextTick;
+ }
+ }
+ else {
+ async.nextTick = process.nextTick;
+ if (typeof setImmediate !== 'undefined') {
+ async.setImmediate = function (fn) {
+ // not a direct alias for IE10 compatibility
+ setImmediate(fn);
+ };
+ }
+ else {
+ async.setImmediate = async.nextTick;
+ }
+ }
+
+ async.each = function (arr, iterator, callback) {
+ callback = callback || function () {};
+ if (!arr.length) {
+ return callback();
+ }
+ var completed = 0;
+ _each(arr, function (x) {
+ iterator(x, only_once(function (err) {
+ if (err) {
+ callback(err);
+ callback = function () {};
+ }
+ else {
+ completed += 1;
+ if (completed >= arr.length) {
+ callback(null);
+ }
+ }
+ }));
+ });
+ };
+ async.forEach = async.each;
+
+ async.eachSeries = function (arr, iterator, callback) {
+ callback = callback || function () {};
+ if (!arr.length) {
+ return callback();
+ }
+ var completed = 0;
+ var iterate = function () {
+ iterator(arr[completed], function (err) {
+ if (err) {
+ callback(err);
+ callback = function () {};
+ }
+ else {
+ completed += 1;
+ if (completed >= arr.length) {
+ callback(null);
+ }
+ else {
+ iterate();
+ }
+ }
+ });
+ };
+ iterate();
+ };
+ async.forEachSeries = async.eachSeries;
+
+ async.eachLimit = function (arr, limit, iterator, callback) {
+ var fn = _eachLimit(limit);
+ fn.apply(null, [arr, iterator, callback]);
+ };
+ async.forEachLimit = async.eachLimit;
+
+ var _eachLimit = function (limit) {
+
+ return function (arr, iterator, callback) {
+ callback = callback || function () {};
+ if (!arr.length || limit <= 0) {
+ return callback();
+ }
+ var completed = 0;
+ var started = 0;
+ var running = 0;
+
+ (function replenish () {
+ if (completed >= arr.length) {
+ return callback();
+ }
+
+ while (running < limit && started < arr.length) {
+ started += 1;
+ running += 1;
+ iterator(arr[started - 1], function (err) {
+ if (err) {
+ callback(err);
+ callback = function () {};
+ }
+ else {
+ completed += 1;
+ running -= 1;
+ if (completed >= arr.length) {
+ callback();
+ }
+ else {
+ replenish();
+ }
+ }
+ });
+ }
+ })();
+ };
+ };
+
+
+ var doParallel = function (fn) {
+ return function () {
+ var args = Array.prototype.slice.call(arguments);
+ return fn.apply(null, [async.each].concat(args));
+ };
+ };
+ var doParallelLimit = function(limit, fn) {
+ return function () {
+ var args = Array.prototype.slice.call(arguments);
+ return fn.apply(null, [_eachLimit(limit)].concat(args));
+ };
+ };
+ var doSeries = function (fn) {
+ return function () {
+ var args = Array.prototype.slice.call(arguments);
+ return fn.apply(null, [async.eachSeries].concat(args));
+ };
+ };
+
+
+ var _asyncMap = function (eachfn, arr, iterator, callback) {
+ var results = [];
+ arr = _map(arr, function (x, i) {
+ return {index: i, value: x};
+ });
+ eachfn(arr, function (x, callback) {
+ iterator(x.value, function (err, v) {
+ results[x.index] = v;
+ callback(err);
+ });
+ }, function (err) {
+ callback(err, results);
+ });
+ };
+ async.map = doParallel(_asyncMap);
+ async.mapSeries = doSeries(_asyncMap);
+ async.mapLimit = function (arr, limit, iterator, callback) {
+ return _mapLimit(limit)(arr, iterator, callback);
+ };
+
+ var _mapLimit = function(limit) {
+ return doParallelLimit(limit, _asyncMap);
+ };
+
+ // reduce only has a series version, as doing reduce in parallel won't
+ // work in many situations.
+ async.reduce = function (arr, memo, iterator, callback) {
+ async.eachSeries(arr, function (x, callback) {
+ iterator(memo, x, function (err, v) {
+ memo = v;
+ callback(err);
+ });
+ }, function (err) {
+ callback(err, memo);
+ });
+ };
+ // inject alias
+ async.inject = async.reduce;
+ // foldl alias
+ async.foldl = async.reduce;
+
+ async.reduceRight = function (arr, memo, iterator, callback) {
+ var reversed = _map(arr, function (x) {
+ return x;
+ }).reverse();
+ async.reduce(reversed, memo, iterator, callback);
+ };
+ // foldr alias
+ async.foldr = async.reduceRight;
+
+ var _filter = function (eachfn, arr, iterator, callback) {
+ var results = [];
+ arr = _map(arr, function (x, i) {
+ return {index: i, value: x};
+ });
+ eachfn(arr, function (x, callback) {
+ iterator(x.value, function (v) {
+ if (v) {
+ results.push(x);
+ }
+ callback();
+ });
+ }, function (err) {
+ callback(_map(results.sort(function (a, b) {
+ return a.index - b.index;
+ }), function (x) {
+ return x.value;
+ }));
+ });
+ };
+ async.filter = doParallel(_filter);
+ async.filterSeries = doSeries(_filter);
+ // select alias
+ async.select = async.filter;
+ async.selectSeries = async.filterSeries;
+
+ var _reject = function (eachfn, arr, iterator, callback) {
+ var results = [];
+ arr = _map(arr, function (x, i) {
+ return {index: i, value: x};
+ });
+ eachfn(arr, function (x, callback) {
+ iterator(x.value, function (v) {
+ if (!v) {
+ results.push(x);
+ }
+ callback();
+ });
+ }, function (err) {
+ callback(_map(results.sort(function (a, b) {
+ return a.index - b.index;
+ }), function (x) {
+ return x.value;
+ }));
+ });
+ };
+ async.reject = doParallel(_reject);
+ async.rejectSeries = doSeries(_reject);
+
+ var _detect = function (eachfn, arr, iterator, main_callback) {
+ eachfn(arr, function (x, callback) {
+ iterator(x, function (result) {
+ if (result) {
+ main_callback(x);
+ main_callback = function () {};
+ }
+ else {
+ callback();
+ }
+ });
+ }, function (err) {
+ main_callback();
+ });
+ };
+ async.detect = doParallel(_detect);
+ async.detectSeries = doSeries(_detect);
+
+ async.some = function (arr, iterator, main_callback) {
+ async.each(arr, function (x, callback) {
+ iterator(x, function (v) {
+ if (v) {
+ main_callback(true);
+ main_callback = function () {};
+ }
+ callback();
+ });
+ }, function (err) {
+ main_callback(false);
+ });
+ };
+ // any alias
+ async.any = async.some;
+
+ async.every = function (arr, iterator, main_callback) {
+ async.each(arr, function (x, callback) {
+ iterator(x, function (v) {
+ if (!v) {
+ main_callback(false);
+ main_callback = function () {};
+ }
+ callback();
+ });
+ }, function (err) {
+ main_callback(true);
+ });
+ };
+ // all alias
+ async.all = async.every;
+
+ async.sortBy = function (arr, iterator, callback) {
+ async.map(arr, function (x, callback) {
+ iterator(x, function (err, criteria) {
+ if (err) {
+ callback(err);
+ }
+ else {
+ callback(null, {value: x, criteria: criteria});
+ }
+ });
+ }, function (err, results) {
+ if (err) {
+ return callback(err);
+ }
+ else {
+ var fn = function (left, right) {
+ var a = left.criteria, b = right.criteria;
+ return a < b ? -1 : a > b ? 1 : 0;
+ };
+ callback(null, _map(results.sort(fn), function (x) {
+ return x.value;
+ }));
+ }
+ });
+ };
+
+ async.auto = function (tasks, callback) {
+ callback = callback || function () {};
+ var keys = _keys(tasks);
+ if (!keys.length) {
+ return callback(null);
+ }
+
+ var results = {};
+
+ var listeners = [];
+ var addListener = function (fn) {
+ listeners.unshift(fn);
+ };
+ var removeListener = function (fn) {
+ for (var i = 0; i < listeners.length; i += 1) {
+ if (listeners[i] === fn) {
+ listeners.splice(i, 1);
+ return;
+ }
+ }
+ };
+ var taskComplete = function () {
+ _each(listeners.slice(0), function (fn) {
+ fn();
+ });
+ };
+
+ addListener(function () {
+ if (_keys(results).length === keys.length) {
+ callback(null, results);
+ callback = function () {};
+ }
+ });
+
+ _each(keys, function (k) {
+ var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k];
+ var taskCallback = function (err) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ if (args.length <= 1) {
+ args = args[0];
+ }
+ if (err) {
+ var safeResults = {};
+ _each(_keys(results), function(rkey) {
+ safeResults[rkey] = results[rkey];
+ });
+ safeResults[k] = args;
+ callback(err, safeResults);
+ // stop subsequent errors hitting callback multiple times
+ callback = function () {};
+ }
+ else {
+ results[k] = args;
+ async.setImmediate(taskComplete);
+ }
+ };
+ var requires = task.slice(0, Math.abs(task.length - 1)) || [];
+ var ready = function () {
+ return _reduce(requires, function (a, x) {
+ return (a && results.hasOwnProperty(x));
+ }, true) && !results.hasOwnProperty(k);
+ };
+ if (ready()) {
+ task[task.length - 1](taskCallback, results);
+ }
+ else {
+ var listener = function () {
+ if (ready()) {
+ removeListener(listener);
+ task[task.length - 1](taskCallback, results);
+ }
+ };
+ addListener(listener);
+ }
+ });
+ };
+
+ async.waterfall = function (tasks, callback) {
+ callback = callback || function () {};
+ if (tasks.constructor !== Array) {
+ var err = new Error('First argument to waterfall must be an array of functions');
+ return callback(err);
+ }
+ if (!tasks.length) {
+ return callback();
+ }
+ var wrapIterator = function (iterator) {
+ return function (err) {
+ if (err) {
+ callback.apply(null, arguments);
+ callback = function () {};
+ }
+ else {
+ var args = Array.prototype.slice.call(arguments, 1);
+ var next = iterator.next();
+ if (next) {
+ args.push(wrapIterator(next));
+ }
+ else {
+ args.push(callback);
+ }
+ async.setImmediate(function () {
+ iterator.apply(null, args);
+ });
+ }
+ };
+ };
+ wrapIterator(async.iterator(tasks))();
+ };
+
+ var _parallel = function(eachfn, tasks, callback) {
+ callback = callback || function () {};
+ if (tasks.constructor === Array) {
+ eachfn.map(tasks, function (fn, callback) {
+ if (fn) {
+ fn(function (err) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ if (args.length <= 1) {
+ args = args[0];
+ }
+ callback.call(null, err, args);
+ });
+ }
+ }, callback);
+ }
+ else {
+ var results = {};
+ eachfn.each(_keys(tasks), function (k, callback) {
+ tasks[k](function (err) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ if (args.length <= 1) {
+ args = args[0];
+ }
+ results[k] = args;
+ callback(err);
+ });
+ }, function (err) {
+ callback(err, results);
+ });
+ }
+ };
+
+ async.parallel = function (tasks, callback) {
+ _parallel({ map: async.map, each: async.each }, tasks, callback);
+ };
+
+ async.parallelLimit = function(tasks, limit, callback) {
+ _parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback);
+ };
+
+ async.series = function (tasks, callback) {
+ callback = callback || function () {};
+ if (tasks.constructor === Array) {
+ async.mapSeries(tasks, function (fn, callback) {
+ if (fn) {
+ fn(function (err) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ if (args.length <= 1) {
+ args = args[0];
+ }
+ callback.call(null, err, args);
+ });
+ }
+ }, callback);
+ }
+ else {
+ var results = {};
+ async.eachSeries(_keys(tasks), function (k, callback) {
+ tasks[k](function (err) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ if (args.length <= 1) {
+ args = args[0];
+ }
+ results[k] = args;
+ callback(err);
+ });
+ }, function (err) {
+ callback(err, results);
+ });
+ }
+ };
+
+ async.iterator = function (tasks) {
+ var makeCallback = function (index) {
+ var fn = function () {
+ if (tasks.length) {
+ tasks[index].apply(null, arguments);
+ }
+ return fn.next();
+ };
+ fn.next = function () {
+ return (index < tasks.length - 1) ? makeCallback(index + 1): null;
+ };
+ return fn;
+ };
+ return makeCallback(0);
+ };
+
+ async.apply = function (fn) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ return function () {
+ return fn.apply(
+ null, args.concat(Array.prototype.slice.call(arguments))
+ );
+ };
+ };
+
+ var _concat = function (eachfn, arr, fn, callback) {
+ var r = [];
+ eachfn(arr, function (x, cb) {
+ fn(x, function (err, y) {
+ r = r.concat(y || []);
+ cb(err);
+ });
+ }, function (err) {
+ callback(err, r);
+ });
+ };
+ async.concat = doParallel(_concat);
+ async.concatSeries = doSeries(_concat);
+
+ async.whilst = function (test, iterator, callback) {
+ if (test()) {
+ iterator(function (err) {
+ if (err) {
+ return callback(err);
+ }
+ async.whilst(test, iterator, callback);
+ });
+ }
+ else {
+ callback();
+ }
+ };
+
+ async.doWhilst = function (iterator, test, callback) {
+ iterator(function (err) {
+ if (err) {
+ return callback(err);
+ }
+ if (test()) {
+ async.doWhilst(iterator, test, callback);
+ }
+ else {
+ callback();
+ }
+ });
+ };
+
+ async.until = function (test, iterator, callback) {
+ if (!test()) {
+ iterator(function (err) {
+ if (err) {
+ return callback(err);
+ }
+ async.until(test, iterator, callback);
+ });
+ }
+ else {
+ callback();
+ }
+ };
+
+ async.doUntil = function (iterator, test, callback) {
+ iterator(function (err) {
+ if (err) {
+ return callback(err);
+ }
+ if (!test()) {
+ async.doUntil(iterator, test, callback);
+ }
+ else {
+ callback();
+ }
+ });
+ };
+
+ async.queue = function (worker, concurrency) {
+ if (concurrency === undefined) {
+ concurrency = 1;
+ }
+ function _insert(q, data, pos, callback) {
+ if(data.constructor !== Array) {
+ data = [data];
+ }
+ _each(data, function(task) {
+ var item = {
+ data: task,
+ callback: typeof callback === 'function' ? callback : null
+ };
+
+ if (pos) {
+ q.tasks.unshift(item);
+ } else {
+ q.tasks.push(item);
+ }
+
+ if (q.saturated && q.tasks.length === concurrency) {
+ q.saturated();
+ }
+ async.setImmediate(q.process);
+ });
+ }
+
+ var workers = 0;
+ var q = {
+ tasks: [],
+ concurrency: concurrency,
+ saturated: null,
+ empty: null,
+ drain: null,
+ push: function (data, callback) {
+ _insert(q, data, false, callback);
+ },
+ unshift: function (data, callback) {
+ _insert(q, data, true, callback);
+ },
+ process: function () {
+ if (workers < q.concurrency && q.tasks.length) {
+ var task = q.tasks.shift();
+ if (q.empty && q.tasks.length === 0) {
+ q.empty();
+ }
+ workers += 1;
+ var next = function () {
+ workers -= 1;
+ if (task.callback) {
+ task.callback.apply(task, arguments);
+ }
+ if (q.drain && q.tasks.length + workers === 0) {
+ q.drain();
+ }
+ q.process();
+ };
+ var cb = only_once(next);
+ worker(task.data, cb);
+ }
+ },
+ length: function () {
+ return q.tasks.length;
+ },
+ running: function () {
+ return workers;
+ }
+ };
+ return q;
+ };
+
+ async.cargo = function (worker, payload) {
+ var working = false,
+ tasks = [];
+
+ var cargo = {
+ tasks: tasks,
+ payload: payload,
+ saturated: null,
+ empty: null,
+ drain: null,
+ push: function (data, callback) {
+ if(data.constructor !== Array) {
+ data = [data];
+ }
+ _each(data, function(task) {
+ tasks.push({
+ data: task,
+ callback: typeof callback === 'function' ? callback : null
+ });
+ if (cargo.saturated && tasks.length === payload) {
+ cargo.saturated();
+ }
+ });
+ async.setImmediate(cargo.process);
+ },
+ process: function process() {
+ if (working) return;
+ if (tasks.length === 0) {
+ if(cargo.drain) cargo.drain();
+ return;
+ }
+
+ var ts = typeof payload === 'number'
+ ? tasks.splice(0, payload)
+ : tasks.splice(0);
+
+ var ds = _map(ts, function (task) {
+ return task.data;
+ });
+
+ if(cargo.empty) cargo.empty();
+ working = true;
+ worker(ds, function () {
+ working = false;
+
+ var args = arguments;
+ _each(ts, function (data) {
+ if (data.callback) {
+ data.callback.apply(null, args);
+ }
+ });
+
+ process();
+ });
+ },
+ length: function () {
+ return tasks.length;
+ },
+ running: function () {
+ return working;
+ }
+ };
+ return cargo;
+ };
+
+ var _console_fn = function (name) {
+ return function (fn) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ fn.apply(null, args.concat([function (err) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ if (typeof console !== 'undefined') {
+ if (err) {
+ if (console.error) {
+ console.error(err);
+ }
+ }
+ else if (console[name]) {
+ _each(args, function (x) {
+ console[name](x);
+ });
+ }
+ }
+ }]));
+ };
+ };
+ async.log = _console_fn('log');
+ async.dir = _console_fn('dir');
+ /*async.info = _console_fn('info');
+ async.warn = _console_fn('warn');
+ async.error = _console_fn('error');*/
+
+ async.memoize = function (fn, hasher) {
+ var memo = {};
+ var queues = {};
+ hasher = hasher || function (x) {
+ return x;
+ };
+ var memoized = function () {
+ var args = Array.prototype.slice.call(arguments);
+ var callback = args.pop();
+ var key = hasher.apply(null, args);
+ if (key in memo) {
+ callback.apply(null, memo[key]);
+ }
+ else if (key in queues) {
+ queues[key].push(callback);
+ }
+ else {
+ queues[key] = [callback];
+ fn.apply(null, args.concat([function () {
+ memo[key] = arguments;
+ var q = queues[key];
+ delete queues[key];
+ for (var i = 0, l = q.length; i < l; i++) {
+ q[i].apply(null, arguments);
+ }
+ }]));
+ }
+ };
+ memoized.memo = memo;
+ memoized.unmemoized = fn;
+ return memoized;
+ };
+
+ async.unmemoize = function (fn) {
+ return function () {
+ return (fn.unmemoized || fn).apply(null, arguments);
+ };
+ };
+
+ async.times = function (count, iterator, callback) {
+ var counter = [];
+ for (var i = 0; i < count; i++) {
+ counter.push(i);
+ }
+ return async.map(counter, iterator, callback);
+ };
+
+ async.timesSeries = function (count, iterator, callback) {
+ var counter = [];
+ for (var i = 0; i < count; i++) {
+ counter.push(i);
+ }
+ return async.mapSeries(counter, iterator, callback);
+ };
+
+ async.compose = function (/* functions... */) {
+ var fns = Array.prototype.reverse.call(arguments);
+ return function () {
+ var that = this;
+ var args = Array.prototype.slice.call(arguments);
+ var callback = args.pop();
+ async.reduce(fns, args, function (newargs, fn, cb) {
+ fn.apply(that, newargs.concat([function () {
+ var err = arguments[0];
+ var nextargs = Array.prototype.slice.call(arguments, 1);
+ cb(err, nextargs);
+ }]))
+ },
+ function (err, results) {
+ callback.apply(that, [err].concat(results));
+ });
+ };
+ };
+
+ var _applyEach = function (eachfn, fns /*args...*/) {
+ var go = function () {
+ var that = this;
+ var args = Array.prototype.slice.call(arguments);
+ var callback = args.pop();
+ return eachfn(fns, function (fn, cb) {
+ fn.apply(that, args.concat([cb]));
+ },
+ callback);
+ };
+ if (arguments.length > 2) {
+ var args = Array.prototype.slice.call(arguments, 2);
+ return go.apply(this, args);
+ }
+ else {
+ return go;
+ }
+ };
+ async.applyEach = doParallel(_applyEach);
+ async.applyEachSeries = doSeries(_applyEach);
+
+ async.forever = function (fn, callback) {
+ function next(err) {
+ if (err) {
+ if (callback) {
+ return callback(err);
+ }
+ throw err;
+ }
+ fn(next);
+ }
+ next();
+ };
+
+ // AMD / RequireJS
+ if (typeof define !== 'undefined' && define.amd) {
+ define([], function () {
+ return async;
+ });
+ }
+ // Node.js
+ else if (typeof module !== 'undefined' && module.exports) {
+ module.exports = async;
+ }
+ // included directly via
+
+
+
+
+
+ It's got two simple methods to use in your application.
+
+ * BSON.serialize(object, checkKeys, asBuffer, serializeFunctions)
+ * @param {Object} object the Javascript object to serialize.
+ * @param {Boolean} checkKeys the serializer will check if keys are valid.
+ * @param {Boolean} asBuffer return the serialized object as a Buffer object **(ignore)**.
+ * @param {Boolean} serializeFunctions serialize the javascript functions **(default:false)**
+ * @return {TypedArray/Array} returns a TypedArray or Array depending on what your browser supports
+
+ * BSON.deserialize(buffer, options, isArray)
+ * Options
+ * **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.
+ * **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.
+ * **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.
+ * @param {TypedArray/Array} a TypedArray/Array containing the BSON data
+ * @param {Object} [options] additional options used for the deserialization.
+ * @param {Boolean} [isArray] ignore used for recursive parsing.
+ * @return {Object} returns the deserialized Javascript Object.
diff --git a/node_modules/bson/binding.gyp b/node_modules/bson/binding.gyp
new file mode 100644
index 000000000..f308f3ea1
--- /dev/null
+++ b/node_modules/bson/binding.gyp
@@ -0,0 +1,17 @@
+{
+ 'targets': [
+ {
+ 'target_name': 'bson',
+ 'sources': [ 'ext/bson.cc' ],
+ 'cflags!': [ '-fno-exceptions' ],
+ 'cflags_cc!': [ '-fno-exceptions' ],
+ 'conditions': [
+ ['OS=="mac"', {
+ 'xcode_settings': {
+ 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES'
+ }
+ }]
+ ]
+ }
+ ]
+}
diff --git a/node_modules/bson/browser_build/bson.js b/node_modules/bson/browser_build/bson.js
new file mode 100644
index 000000000..d86a00d4b
--- /dev/null
+++ b/node_modules/bson/browser_build/bson.js
@@ -0,0 +1,4843 @@
+var bson = (function(){
+
+ var pkgmap = {},
+ global = {},
+ nativeRequire = typeof require != 'undefined' && require,
+ lib, ties, main, async;
+
+ function exports(){ return main(); };
+
+ exports.main = exports;
+ exports.module = module;
+ exports.packages = pkgmap;
+ exports.pkg = pkg;
+ exports.require = function require(uri){
+ return pkgmap.main.index.require(uri);
+ };
+
+
+ ties = {};
+
+ aliases = {};
+
+
+ return exports;
+
+function join() {
+ return normalize(Array.prototype.join.call(arguments, "/"));
+};
+
+function normalize(path) {
+ var ret = [], parts = path.split('/'), cur, prev;
+
+ var i = 0, l = parts.length-1;
+ for (; i <= l; i++) {
+ cur = parts[i];
+
+ if (cur === "." && prev !== undefined) continue;
+
+ if (cur === ".." && ret.length && prev !== ".." && prev !== "." && prev !== undefined) {
+ ret.pop();
+ prev = ret.slice(-1)[0];
+ } else {
+ if (prev === ".") ret.pop();
+ ret.push(cur);
+ prev = cur;
+ }
+ }
+
+ return ret.join("/");
+};
+
+function dirname(path) {
+ return path && path.substr(0, path.lastIndexOf("/")) || ".";
+};
+
+function findModule(workingModule, uri){
+ var moduleId = join(dirname(workingModule.id), /\.\/$/.test(uri) ? (uri + 'index') : uri ).replace(/\.js$/, ''),
+ moduleIndexId = join(moduleId, 'index'),
+ pkg = workingModule.pkg,
+ module;
+
+ var i = pkg.modules.length,
+ id;
+
+ while(i-->0){
+ id = pkg.modules[i].id;
+
+ if(id==moduleId || id == moduleIndexId){
+ module = pkg.modules[i];
+ break;
+ }
+ }
+
+ return module;
+}
+
+function newRequire(callingModule){
+ function require(uri){
+ var module, pkg;
+
+ if(/^\./.test(uri)){
+ module = findModule(callingModule, uri);
+ } else if ( ties && ties.hasOwnProperty( uri ) ) {
+ return ties[uri];
+ } else if ( aliases && aliases.hasOwnProperty( uri ) ) {
+ return require(aliases[uri]);
+ } else {
+ pkg = pkgmap[uri];
+
+ if(!pkg && nativeRequire){
+ try {
+ pkg = nativeRequire(uri);
+ } catch (nativeRequireError) {}
+
+ if(pkg) return pkg;
+ }
+
+ if(!pkg){
+ throw new Error('Cannot find module "'+uri+'" @[module: '+callingModule.id+' package: '+callingModule.pkg.name+']');
+ }
+
+ module = pkg.index;
+ }
+
+ if(!module){
+ throw new Error('Cannot find module "'+uri+'" @[module: '+callingModule.id+' package: '+callingModule.pkg.name+']');
+ }
+
+ module.parent = callingModule;
+ return module.call();
+ };
+
+
+ return require;
+}
+
+
+function module(parent, id, wrapper){
+ var mod = { pkg: parent, id: id, wrapper: wrapper },
+ cached = false;
+
+ mod.exports = {};
+ mod.require = newRequire(mod);
+
+ mod.call = function(){
+ if(cached) {
+ return mod.exports;
+ }
+
+ cached = true;
+
+ global.require = mod.require;
+
+ mod.wrapper(mod, mod.exports, global, global.require);
+ return mod.exports;
+ };
+
+ if(parent.mainModuleId == mod.id){
+ parent.index = mod;
+ parent.parents.length === 0 && ( main = mod.call );
+ }
+
+ parent.modules.push(mod);
+}
+
+function pkg(/* [ parentId ...], wrapper */){
+ var wrapper = arguments[ arguments.length - 1 ],
+ parents = Array.prototype.slice.call(arguments, 0, arguments.length - 1),
+ ctx = wrapper(parents);
+
+
+ pkgmap[ctx.name] = ctx;
+
+ arguments.length == 1 && ( pkgmap.main = ctx );
+
+ return function(modules){
+ var id;
+ for(id in modules){
+ module(ctx, id, modules[id]);
+ }
+ };
+}
+
+
+}(this));
+
+bson.pkg(function(parents){
+
+ return {
+ 'name' : 'bson',
+ 'mainModuleId' : 'bson',
+ 'modules' : [],
+ 'parents' : parents
+ };
+
+})({ 'binary': function(module, exports, global, require, undefined){
+ /**
+ * Module dependencies.
+ */
+if(typeof window === 'undefined') {
+ var Buffer = require('buffer').Buffer; // TODO just use global Buffer
+}
+
+// Binary default subtype
+var BSON_BINARY_SUBTYPE_DEFAULT = 0;
+
+/**
+ * @ignore
+ * @api private
+ */
+var writeStringToArray = function(data) {
+ // Create a buffer
+ var buffer = typeof Uint8Array != 'undefined' ? new Uint8Array(new ArrayBuffer(data.length)) : new Array(data.length);
+ // Write the content to the buffer
+ for(var i = 0; i < data.length; i++) {
+ buffer[i] = data.charCodeAt(i);
+ }
+ // Write the string to the buffer
+ return buffer;
+}
+
+/**
+ * Convert Array ot Uint8Array to Binary String
+ *
+ * @ignore
+ * @api private
+ */
+var convertArraytoUtf8BinaryString = function(byteArray, startIndex, endIndex) {
+ var result = "";
+ for(var i = startIndex; i < endIndex; i++) {
+ result = result + String.fromCharCode(byteArray[i]);
+ }
+ return result;
+};
+
+/**
+ * A class representation of the BSON Binary type.
+ *
+ * Sub types
+ * - **BSON.BSON_BINARY_SUBTYPE_DEFAULT**, default BSON type.
+ * - **BSON.BSON_BINARY_SUBTYPE_FUNCTION**, BSON function type.
+ * - **BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY**, BSON byte array type.
+ * - **BSON.BSON_BINARY_SUBTYPE_UUID**, BSON uuid type.
+ * - **BSON.BSON_BINARY_SUBTYPE_MD5**, BSON md5 type.
+ * - **BSON.BSON_BINARY_SUBTYPE_USER_DEFINED**, BSON user defined type.
+ *
+ * @class Represents the Binary BSON type.
+ * @param {Buffer} buffer a buffer object containing the binary data.
+ * @param {Number} [subType] the option binary type.
+ * @return {Grid}
+ */
+function Binary(buffer, subType) {
+ if(!(this instanceof Binary)) return new Binary(buffer, subType);
+
+ this._bsontype = 'Binary';
+
+ if(buffer instanceof Number) {
+ this.sub_type = buffer;
+ this.position = 0;
+ } else {
+ this.sub_type = subType == null ? BSON_BINARY_SUBTYPE_DEFAULT : subType;
+ this.position = 0;
+ }
+
+ if(buffer != null && !(buffer instanceof Number)) {
+ // Only accept Buffer, Uint8Array or Arrays
+ if(typeof buffer == 'string') {
+ // Different ways of writing the length of the string for the different types
+ if(typeof Buffer != 'undefined') {
+ this.buffer = new Buffer(buffer);
+ } else if(typeof Uint8Array != 'undefined' || (Object.prototype.toString.call(buffer) == '[object Array]')) {
+ this.buffer = writeStringToArray(buffer);
+ } else {
+ throw new Error("only String, Buffer, Uint8Array or Array accepted");
+ }
+ } else {
+ this.buffer = buffer;
+ }
+ this.position = buffer.length;
+ } else {
+ if(typeof Buffer != 'undefined') {
+ this.buffer = new Buffer(Binary.BUFFER_SIZE);
+ } else if(typeof Uint8Array != 'undefined'){
+ this.buffer = new Uint8Array(new ArrayBuffer(Binary.BUFFER_SIZE));
+ } else {
+ this.buffer = new Array(Binary.BUFFER_SIZE);
+ }
+ // Set position to start of buffer
+ this.position = 0;
+ }
+};
+
+/**
+ * Updates this binary with byte_value.
+ *
+ * @param {Character} byte_value a single byte we wish to write.
+ * @api public
+ */
+Binary.prototype.put = function put(byte_value) {
+ // If it's a string and a has more than one character throw an error
+ if(byte_value['length'] != null && typeof byte_value != 'number' && byte_value.length != 1) throw new Error("only accepts single character String, Uint8Array or Array");
+ if(typeof byte_value != 'number' && byte_value < 0 || byte_value > 255) throw new Error("only accepts number in a valid unsigned byte range 0-255");
+
+ // Decode the byte value once
+ var decoded_byte = null;
+ if(typeof byte_value == 'string') {
+ decoded_byte = byte_value.charCodeAt(0);
+ } else if(byte_value['length'] != null) {
+ decoded_byte = byte_value[0];
+ } else {
+ decoded_byte = byte_value;
+ }
+
+ if(this.buffer.length > this.position) {
+ this.buffer[this.position++] = decoded_byte;
+ } else {
+ if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
+ // Create additional overflow buffer
+ var buffer = new Buffer(Binary.BUFFER_SIZE + this.buffer.length);
+ // Combine the two buffers together
+ this.buffer.copy(buffer, 0, 0, this.buffer.length);
+ this.buffer = buffer;
+ this.buffer[this.position++] = decoded_byte;
+ } else {
+ var buffer = null;
+ // Create a new buffer (typed or normal array)
+ if(Object.prototype.toString.call(this.buffer) == '[object Uint8Array]') {
+ buffer = new Uint8Array(new ArrayBuffer(Binary.BUFFER_SIZE + this.buffer.length));
+ } else {
+ buffer = new Array(Binary.BUFFER_SIZE + this.buffer.length);
+ }
+
+ // We need to copy all the content to the new array
+ for(var i = 0; i < this.buffer.length; i++) {
+ buffer[i] = this.buffer[i];
+ }
+
+ // Reassign the buffer
+ this.buffer = buffer;
+ // Write the byte
+ this.buffer[this.position++] = decoded_byte;
+ }
+ }
+};
+
+/**
+ * Writes a buffer or string to the binary.
+ *
+ * @param {Buffer|String} string a string or buffer to be written to the Binary BSON object.
+ * @param {Number} offset specify the binary of where to write the content.
+ * @api public
+ */
+Binary.prototype.write = function write(string, offset) {
+ offset = typeof offset == 'number' ? offset : this.position;
+
+ // If the buffer is to small let's extend the buffer
+ if(this.buffer.length < offset + string.length) {
+ var buffer = null;
+ // If we are in node.js
+ if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
+ buffer = new Buffer(this.buffer.length + string.length);
+ this.buffer.copy(buffer, 0, 0, this.buffer.length);
+ } else if(Object.prototype.toString.call(this.buffer) == '[object Uint8Array]') {
+ // Create a new buffer
+ buffer = new Uint8Array(new ArrayBuffer(this.buffer.length + string.length))
+ // Copy the content
+ for(var i = 0; i < this.position; i++) {
+ buffer[i] = this.buffer[i];
+ }
+ }
+
+ // Assign the new buffer
+ this.buffer = buffer;
+ }
+
+ if(typeof Buffer != 'undefined' && Buffer.isBuffer(string) && Buffer.isBuffer(this.buffer)) {
+ string.copy(this.buffer, offset, 0, string.length);
+ this.position = (offset + string.length) > this.position ? (offset + string.length) : this.position;
+ // offset = string.length
+ } else if(typeof Buffer != 'undefined' && typeof string == 'string' && Buffer.isBuffer(this.buffer)) {
+ this.buffer.write(string, 'binary', offset);
+ this.position = (offset + string.length) > this.position ? (offset + string.length) : this.position;
+ // offset = string.length;
+ } else if(Object.prototype.toString.call(string) == '[object Uint8Array]'
+ || Object.prototype.toString.call(string) == '[object Array]' && typeof string != 'string') {
+ for(var i = 0; i < string.length; i++) {
+ this.buffer[offset++] = string[i];
+ }
+
+ this.position = offset > this.position ? offset : this.position;
+ } else if(typeof string == 'string') {
+ for(var i = 0; i < string.length; i++) {
+ this.buffer[offset++] = string.charCodeAt(i);
+ }
+
+ this.position = offset > this.position ? offset : this.position;
+ }
+};
+
+/**
+ * Reads **length** bytes starting at **position**.
+ *
+ * @param {Number} position read from the given position in the Binary.
+ * @param {Number} length the number of bytes to read.
+ * @return {Buffer}
+ * @api public
+ */
+Binary.prototype.read = function read(position, length) {
+ length = length && length > 0
+ ? length
+ : this.position;
+
+ // Let's return the data based on the type we have
+ if(this.buffer['slice']) {
+ return this.buffer.slice(position, position + length);
+ } else {
+ // Create a buffer to keep the result
+ var buffer = typeof Uint8Array != 'undefined' ? new Uint8Array(new ArrayBuffer(length)) : new Array(length);
+ for(var i = 0; i < length; i++) {
+ buffer[i] = this.buffer[position++];
+ }
+ }
+ // Return the buffer
+ return buffer;
+};
+
+/**
+ * Returns the value of this binary as a string.
+ *
+ * @return {String}
+ * @api public
+ */
+Binary.prototype.value = function value(asRaw) {
+ asRaw = asRaw == null ? false : asRaw;
+
+ // If it's a node.js buffer object
+ if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
+ return asRaw ? this.buffer.slice(0, this.position) : this.buffer.toString('binary', 0, this.position);
+ } else {
+ if(asRaw) {
+ // we support the slice command use it
+ if(this.buffer['slice'] != null) {
+ return this.buffer.slice(0, this.position);
+ } else {
+ // Create a new buffer to copy content to
+ var newBuffer = Object.prototype.toString.call(this.buffer) == '[object Uint8Array]' ? new Uint8Array(new ArrayBuffer(this.position)) : new Array(this.position);
+ // Copy content
+ for(var i = 0; i < this.position; i++) {
+ newBuffer[i] = this.buffer[i];
+ }
+ // Return the buffer
+ return newBuffer;
+ }
+ } else {
+ return convertArraytoUtf8BinaryString(this.buffer, 0, this.position);
+ }
+ }
+};
+
+/**
+ * Length.
+ *
+ * @return {Number} the length of the binary.
+ * @api public
+ */
+Binary.prototype.length = function length() {
+ return this.position;
+};
+
+/**
+ * @ignore
+ * @api private
+ */
+Binary.prototype.toJSON = function() {
+ return this.buffer != null ? this.buffer.toString('base64') : '';
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+Binary.prototype.toString = function(format) {
+ return this.buffer != null ? this.buffer.slice(0, this.position).toString(format) : '';
+}
+
+Binary.BUFFER_SIZE = 256;
+
+/**
+ * Default BSON type
+ *
+ * @classconstant SUBTYPE_DEFAULT
+ **/
+Binary.SUBTYPE_DEFAULT = 0;
+/**
+ * Function BSON type
+ *
+ * @classconstant SUBTYPE_DEFAULT
+ **/
+Binary.SUBTYPE_FUNCTION = 1;
+/**
+ * Byte Array BSON type
+ *
+ * @classconstant SUBTYPE_DEFAULT
+ **/
+Binary.SUBTYPE_BYTE_ARRAY = 2;
+/**
+ * OLD UUID BSON type
+ *
+ * @classconstant SUBTYPE_DEFAULT
+ **/
+Binary.SUBTYPE_UUID_OLD = 3;
+/**
+ * UUID BSON type
+ *
+ * @classconstant SUBTYPE_DEFAULT
+ **/
+Binary.SUBTYPE_UUID = 4;
+/**
+ * MD5 BSON type
+ *
+ * @classconstant SUBTYPE_DEFAULT
+ **/
+Binary.SUBTYPE_MD5 = 5;
+/**
+ * User BSON type
+ *
+ * @classconstant SUBTYPE_DEFAULT
+ **/
+Binary.SUBTYPE_USER_DEFINED = 128;
+
+/**
+ * Expose.
+ */
+exports.Binary = Binary;
+
+
+},
+
+
+
+'binary_parser': function(module, exports, global, require, undefined){
+ /**
+ * Binary Parser.
+ * Jonas Raoni Soares Silva
+ * http://jsfromhell.com/classes/binary-parser [v1.0]
+ */
+var chr = String.fromCharCode;
+
+var maxBits = [];
+for (var i = 0; i < 64; i++) {
+ maxBits[i] = Math.pow(2, i);
+}
+
+function BinaryParser (bigEndian, allowExceptions) {
+ if(!(this instanceof BinaryParser)) return new BinaryParser(bigEndian, allowExceptions);
+
+ this.bigEndian = bigEndian;
+ this.allowExceptions = allowExceptions;
+};
+
+BinaryParser.warn = function warn (msg) {
+ if (this.allowExceptions) {
+ throw new Error(msg);
+ }
+
+ return 1;
+};
+
+BinaryParser.decodeFloat = function decodeFloat (data, precisionBits, exponentBits) {
+ var b = new this.Buffer(this.bigEndian, data);
+
+ b.checkBuffer(precisionBits + exponentBits + 1);
+
+ var bias = maxBits[exponentBits - 1] - 1
+ , signal = b.readBits(precisionBits + exponentBits, 1)
+ , exponent = b.readBits(precisionBits, exponentBits)
+ , significand = 0
+ , divisor = 2
+ , curByte = b.buffer.length + (-precisionBits >> 3) - 1;
+
+ do {
+ for (var byteValue = b.buffer[ ++curByte ], startBit = precisionBits % 8 || 8, mask = 1 << startBit; mask >>= 1; ( byteValue & mask ) && ( significand += 1 / divisor ), divisor *= 2 );
+ } while (precisionBits -= startBit);
+
+ return exponent == ( bias << 1 ) + 1 ? significand ? NaN : signal ? -Infinity : +Infinity : ( 1 + signal * -2 ) * ( exponent || significand ? !exponent ? Math.pow( 2, -bias + 1 ) * significand : Math.pow( 2, exponent - bias ) * ( 1 + significand ) : 0 );
+};
+
+BinaryParser.decodeInt = function decodeInt (data, bits, signed, forceBigEndian) {
+ var b = new this.Buffer(this.bigEndian || forceBigEndian, data)
+ , x = b.readBits(0, bits)
+ , max = maxBits[bits]; //max = Math.pow( 2, bits );
+
+ return signed && x >= max / 2
+ ? x - max
+ : x;
+};
+
+BinaryParser.encodeFloat = function encodeFloat (data, precisionBits, exponentBits) {
+ var bias = maxBits[exponentBits - 1] - 1
+ , minExp = -bias + 1
+ , maxExp = bias
+ , minUnnormExp = minExp - precisionBits
+ , n = parseFloat(data)
+ , status = isNaN(n) || n == -Infinity || n == +Infinity ? n : 0
+ , exp = 0
+ , len = 2 * bias + 1 + precisionBits + 3
+ , bin = new Array(len)
+ , signal = (n = status !== 0 ? 0 : n) < 0
+ , intPart = Math.floor(n = Math.abs(n))
+ , floatPart = n - intPart
+ , lastBit
+ , rounded
+ , result
+ , i
+ , j;
+
+ for (i = len; i; bin[--i] = 0);
+
+ for (i = bias + 2; intPart && i; bin[--i] = intPart % 2, intPart = Math.floor(intPart / 2));
+
+ for (i = bias + 1; floatPart > 0 && i; (bin[++i] = ((floatPart *= 2) >= 1) - 0 ) && --floatPart);
+
+ for (i = -1; ++i < len && !bin[i];);
+
+ if (bin[(lastBit = precisionBits - 1 + (i = (exp = bias + 1 - i) >= minExp && exp <= maxExp ? i + 1 : bias + 1 - (exp = minExp - 1))) + 1]) {
+ if (!(rounded = bin[lastBit])) {
+ for (j = lastBit + 2; !rounded && j < len; rounded = bin[j++]);
+ }
+
+ for (j = lastBit + 1; rounded && --j >= 0; (bin[j] = !bin[j] - 0) && (rounded = 0));
+ }
+
+ for (i = i - 2 < 0 ? -1 : i - 3; ++i < len && !bin[i];);
+
+ if ((exp = bias + 1 - i) >= minExp && exp <= maxExp) {
+ ++i;
+ } else if (exp < minExp) {
+ exp != bias + 1 - len && exp < minUnnormExp && this.warn("encodeFloat::float underflow");
+ i = bias + 1 - (exp = minExp - 1);
+ }
+
+ if (intPart || status !== 0) {
+ this.warn(intPart ? "encodeFloat::float overflow" : "encodeFloat::" + status);
+ exp = maxExp + 1;
+ i = bias + 2;
+
+ if (status == -Infinity) {
+ signal = 1;
+ } else if (isNaN(status)) {
+ bin[i] = 1;
+ }
+ }
+
+ for (n = Math.abs(exp + bias), j = exponentBits + 1, result = ""; --j; result = (n % 2) + result, n = n >>= 1);
+
+ for (n = 0, j = 0, i = (result = (signal ? "1" : "0") + result + bin.slice(i, i + precisionBits).join("")).length, r = []; i; j = (j + 1) % 8) {
+ n += (1 << j) * result.charAt(--i);
+ if (j == 7) {
+ r[r.length] = String.fromCharCode(n);
+ n = 0;
+ }
+ }
+
+ r[r.length] = n
+ ? String.fromCharCode(n)
+ : "";
+
+ return (this.bigEndian ? r.reverse() : r).join("");
+};
+
+BinaryParser.encodeInt = function encodeInt (data, bits, signed, forceBigEndian) {
+ var max = maxBits[bits];
+
+ if (data >= max || data < -(max / 2)) {
+ this.warn("encodeInt::overflow");
+ data = 0;
+ }
+
+ if (data < 0) {
+ data += max;
+ }
+
+ for (var r = []; data; r[r.length] = String.fromCharCode(data % 256), data = Math.floor(data / 256));
+
+ for (bits = -(-bits >> 3) - r.length; bits--; r[r.length] = "\0");
+
+ return ((this.bigEndian || forceBigEndian) ? r.reverse() : r).join("");
+};
+
+BinaryParser.toSmall = function( data ){ return this.decodeInt( data, 8, true ); };
+BinaryParser.fromSmall = function( data ){ return this.encodeInt( data, 8, true ); };
+BinaryParser.toByte = function( data ){ return this.decodeInt( data, 8, false ); };
+BinaryParser.fromByte = function( data ){ return this.encodeInt( data, 8, false ); };
+BinaryParser.toShort = function( data ){ return this.decodeInt( data, 16, true ); };
+BinaryParser.fromShort = function( data ){ return this.encodeInt( data, 16, true ); };
+BinaryParser.toWord = function( data ){ return this.decodeInt( data, 16, false ); };
+BinaryParser.fromWord = function( data ){ return this.encodeInt( data, 16, false ); };
+BinaryParser.toInt = function( data ){ return this.decodeInt( data, 32, true ); };
+BinaryParser.fromInt = function( data ){ return this.encodeInt( data, 32, true ); };
+BinaryParser.toLong = function( data ){ return this.decodeInt( data, 64, true ); };
+BinaryParser.fromLong = function( data ){ return this.encodeInt( data, 64, true ); };
+BinaryParser.toDWord = function( data ){ return this.decodeInt( data, 32, false ); };
+BinaryParser.fromDWord = function( data ){ return this.encodeInt( data, 32, false ); };
+BinaryParser.toQWord = function( data ){ return this.decodeInt( data, 64, true ); };
+BinaryParser.fromQWord = function( data ){ return this.encodeInt( data, 64, true ); };
+BinaryParser.toFloat = function( data ){ return this.decodeFloat( data, 23, 8 ); };
+BinaryParser.fromFloat = function( data ){ return this.encodeFloat( data, 23, 8 ); };
+BinaryParser.toDouble = function( data ){ return this.decodeFloat( data, 52, 11 ); };
+BinaryParser.fromDouble = function( data ){ return this.encodeFloat( data, 52, 11 ); };
+
+// Factor out the encode so it can be shared by add_header and push_int32
+BinaryParser.encode_int32 = function encode_int32 (number, asArray) {
+ var a, b, c, d, unsigned;
+ unsigned = (number < 0) ? (number + 0x100000000) : number;
+ a = Math.floor(unsigned / 0xffffff);
+ unsigned &= 0xffffff;
+ b = Math.floor(unsigned / 0xffff);
+ unsigned &= 0xffff;
+ c = Math.floor(unsigned / 0xff);
+ unsigned &= 0xff;
+ d = Math.floor(unsigned);
+ return asArray ? [chr(a), chr(b), chr(c), chr(d)] : chr(a) + chr(b) + chr(c) + chr(d);
+};
+
+BinaryParser.encode_int64 = function encode_int64 (number) {
+ var a, b, c, d, e, f, g, h, unsigned;
+ unsigned = (number < 0) ? (number + 0x10000000000000000) : number;
+ a = Math.floor(unsigned / 0xffffffffffffff);
+ unsigned &= 0xffffffffffffff;
+ b = Math.floor(unsigned / 0xffffffffffff);
+ unsigned &= 0xffffffffffff;
+ c = Math.floor(unsigned / 0xffffffffff);
+ unsigned &= 0xffffffffff;
+ d = Math.floor(unsigned / 0xffffffff);
+ unsigned &= 0xffffffff;
+ e = Math.floor(unsigned / 0xffffff);
+ unsigned &= 0xffffff;
+ f = Math.floor(unsigned / 0xffff);
+ unsigned &= 0xffff;
+ g = Math.floor(unsigned / 0xff);
+ unsigned &= 0xff;
+ h = Math.floor(unsigned);
+ return chr(a) + chr(b) + chr(c) + chr(d) + chr(e) + chr(f) + chr(g) + chr(h);
+};
+
+/**
+ * UTF8 methods
+ */
+
+// Take a raw binary string and return a utf8 string
+BinaryParser.decode_utf8 = function decode_utf8 (binaryStr) {
+ var len = binaryStr.length
+ , decoded = ''
+ , i = 0
+ , c = 0
+ , c1 = 0
+ , c2 = 0
+ , c3;
+
+ while (i < len) {
+ c = binaryStr.charCodeAt(i);
+ if (c < 128) {
+ decoded += String.fromCharCode(c);
+ i++;
+ } else if ((c > 191) && (c < 224)) {
+ c2 = binaryStr.charCodeAt(i+1);
+ decoded += String.fromCharCode(((c & 31) << 6) | (c2 & 63));
+ i += 2;
+ } else {
+ c2 = binaryStr.charCodeAt(i+1);
+ c3 = binaryStr.charCodeAt(i+2);
+ decoded += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63));
+ i += 3;
+ }
+ }
+
+ return decoded;
+};
+
+// Encode a cstring
+BinaryParser.encode_cstring = function encode_cstring (s) {
+ return unescape(encodeURIComponent(s)) + BinaryParser.fromByte(0);
+};
+
+// Take a utf8 string and return a binary string
+BinaryParser.encode_utf8 = function encode_utf8 (s) {
+ var a = ""
+ , c;
+
+ for (var n = 0, len = s.length; n < len; n++) {
+ c = s.charCodeAt(n);
+
+ if (c < 128) {
+ a += String.fromCharCode(c);
+ } else if ((c > 127) && (c < 2048)) {
+ a += String.fromCharCode((c>>6) | 192) ;
+ a += String.fromCharCode((c&63) | 128);
+ } else {
+ a += String.fromCharCode((c>>12) | 224);
+ a += String.fromCharCode(((c>>6) & 63) | 128);
+ a += String.fromCharCode((c&63) | 128);
+ }
+ }
+
+ return a;
+};
+
+BinaryParser.hprint = function hprint (s) {
+ var number;
+
+ for (var i = 0, len = s.length; i < len; i++) {
+ if (s.charCodeAt(i) < 32) {
+ number = s.charCodeAt(i) <= 15
+ ? "0" + s.charCodeAt(i).toString(16)
+ : s.charCodeAt(i).toString(16);
+ process.stdout.write(number + " ")
+ } else {
+ number = s.charCodeAt(i) <= 15
+ ? "0" + s.charCodeAt(i).toString(16)
+ : s.charCodeAt(i).toString(16);
+ process.stdout.write(number + " ")
+ }
+ }
+
+ process.stdout.write("\n\n");
+};
+
+BinaryParser.ilprint = function hprint (s) {
+ var number;
+
+ for (var i = 0, len = s.length; i < len; i++) {
+ if (s.charCodeAt(i) < 32) {
+ number = s.charCodeAt(i) <= 15
+ ? "0" + s.charCodeAt(i).toString(10)
+ : s.charCodeAt(i).toString(10);
+
+ require('util').debug(number+' : ');
+ } else {
+ number = s.charCodeAt(i) <= 15
+ ? "0" + s.charCodeAt(i).toString(10)
+ : s.charCodeAt(i).toString(10);
+ require('util').debug(number+' : '+ s.charAt(i));
+ }
+ }
+};
+
+BinaryParser.hlprint = function hprint (s) {
+ var number;
+
+ for (var i = 0, len = s.length; i < len; i++) {
+ if (s.charCodeAt(i) < 32) {
+ number = s.charCodeAt(i) <= 15
+ ? "0" + s.charCodeAt(i).toString(16)
+ : s.charCodeAt(i).toString(16);
+ require('util').debug(number+' : ');
+ } else {
+ number = s.charCodeAt(i) <= 15
+ ? "0" + s.charCodeAt(i).toString(16)
+ : s.charCodeAt(i).toString(16);
+ require('util').debug(number+' : '+ s.charAt(i));
+ }
+ }
+};
+
+/**
+ * BinaryParser buffer constructor.
+ */
+function BinaryParserBuffer (bigEndian, buffer) {
+ this.bigEndian = bigEndian || 0;
+ this.buffer = [];
+ this.setBuffer(buffer);
+};
+
+BinaryParserBuffer.prototype.setBuffer = function setBuffer (data) {
+ var l, i, b;
+
+ if (data) {
+ i = l = data.length;
+ b = this.buffer = new Array(l);
+ for (; i; b[l - i] = data.charCodeAt(--i));
+ this.bigEndian && b.reverse();
+ }
+};
+
+BinaryParserBuffer.prototype.hasNeededBits = function hasNeededBits (neededBits) {
+ return this.buffer.length >= -(-neededBits >> 3);
+};
+
+BinaryParserBuffer.prototype.checkBuffer = function checkBuffer (neededBits) {
+ if (!this.hasNeededBits(neededBits)) {
+ throw new Error("checkBuffer::missing bytes");
+ }
+};
+
+BinaryParserBuffer.prototype.readBits = function readBits (start, length) {
+ //shl fix: Henri Torgemane ~1996 (compressed by Jonas Raoni)
+
+ function shl (a, b) {
+ for (; b--; a = ((a %= 0x7fffffff + 1) & 0x40000000) == 0x40000000 ? a * 2 : (a - 0x40000000) * 2 + 0x7fffffff + 1);
+ return a;
+ }
+
+ if (start < 0 || length <= 0) {
+ return 0;
+ }
+
+ this.checkBuffer(start + length);
+
+ var offsetLeft
+ , offsetRight = start % 8
+ , curByte = this.buffer.length - ( start >> 3 ) - 1
+ , lastByte = this.buffer.length + ( -( start + length ) >> 3 )
+ , diff = curByte - lastByte
+ , sum = ((this.buffer[ curByte ] >> offsetRight) & ((1 << (diff ? 8 - offsetRight : length)) - 1)) + (diff && (offsetLeft = (start + length) % 8) ? (this.buffer[lastByte++] & ((1 << offsetLeft) - 1)) << (diff-- << 3) - offsetRight : 0);
+
+ for(; diff; sum += shl(this.buffer[lastByte++], (diff-- << 3) - offsetRight));
+
+ return sum;
+};
+
+/**
+ * Expose.
+ */
+BinaryParser.Buffer = BinaryParserBuffer;
+
+exports.BinaryParser = BinaryParser;
+
+},
+
+
+
+'bson': function(module, exports, global, require, undefined){
+ var Long = require('./long').Long
+ , Double = require('./double').Double
+ , Timestamp = require('./timestamp').Timestamp
+ , ObjectID = require('./objectid').ObjectID
+ , Symbol = require('./symbol').Symbol
+ , Code = require('./code').Code
+ , MinKey = require('./min_key').MinKey
+ , MaxKey = require('./max_key').MaxKey
+ , DBRef = require('./db_ref').DBRef
+ , Binary = require('./binary').Binary
+ , BinaryParser = require('./binary_parser').BinaryParser
+ , writeIEEE754 = require('./float_parser').writeIEEE754
+ , readIEEE754 = require('./float_parser').readIEEE754
+
+// To ensure that 0.4 of node works correctly
+var isDate = function isDate(d) {
+ return typeof d === 'object' && Object.prototype.toString.call(d) === '[object Date]';
+}
+
+/**
+ * Create a new BSON instance
+ *
+ * @class Represents the BSON Parser
+ * @return {BSON} instance of BSON Parser.
+ */
+function BSON () {};
+
+/**
+ * @ignore
+ * @api private
+ */
+// BSON MAX VALUES
+BSON.BSON_INT32_MAX = 0x7FFFFFFF;
+BSON.BSON_INT32_MIN = -0x80000000;
+
+BSON.BSON_INT64_MAX = Math.pow(2, 63) - 1;
+BSON.BSON_INT64_MIN = -Math.pow(2, 63);
+
+// JS MAX PRECISE VALUES
+BSON.JS_INT_MAX = 0x20000000000000; // Any integer up to 2^53 can be precisely represented by a double.
+BSON.JS_INT_MIN = -0x20000000000000; // Any integer down to -2^53 can be precisely represented by a double.
+
+// Internal long versions
+var JS_INT_MAX_LONG = Long.fromNumber(0x20000000000000); // Any integer up to 2^53 can be precisely represented by a double.
+var JS_INT_MIN_LONG = Long.fromNumber(-0x20000000000000); // Any integer down to -2^53 can be precisely represented by a double.
+
+/**
+ * Number BSON Type
+ *
+ * @classconstant BSON_DATA_NUMBER
+ **/
+BSON.BSON_DATA_NUMBER = 1;
+/**
+ * String BSON Type
+ *
+ * @classconstant BSON_DATA_STRING
+ **/
+BSON.BSON_DATA_STRING = 2;
+/**
+ * Object BSON Type
+ *
+ * @classconstant BSON_DATA_OBJECT
+ **/
+BSON.BSON_DATA_OBJECT = 3;
+/**
+ * Array BSON Type
+ *
+ * @classconstant BSON_DATA_ARRAY
+ **/
+BSON.BSON_DATA_ARRAY = 4;
+/**
+ * Binary BSON Type
+ *
+ * @classconstant BSON_DATA_BINARY
+ **/
+BSON.BSON_DATA_BINARY = 5;
+/**
+ * ObjectID BSON Type
+ *
+ * @classconstant BSON_DATA_OID
+ **/
+BSON.BSON_DATA_OID = 7;
+/**
+ * Boolean BSON Type
+ *
+ * @classconstant BSON_DATA_BOOLEAN
+ **/
+BSON.BSON_DATA_BOOLEAN = 8;
+/**
+ * Date BSON Type
+ *
+ * @classconstant BSON_DATA_DATE
+ **/
+BSON.BSON_DATA_DATE = 9;
+/**
+ * null BSON Type
+ *
+ * @classconstant BSON_DATA_NULL
+ **/
+BSON.BSON_DATA_NULL = 10;
+/**
+ * RegExp BSON Type
+ *
+ * @classconstant BSON_DATA_REGEXP
+ **/
+BSON.BSON_DATA_REGEXP = 11;
+/**
+ * Code BSON Type
+ *
+ * @classconstant BSON_DATA_CODE
+ **/
+BSON.BSON_DATA_CODE = 13;
+/**
+ * Symbol BSON Type
+ *
+ * @classconstant BSON_DATA_SYMBOL
+ **/
+BSON.BSON_DATA_SYMBOL = 14;
+/**
+ * Code with Scope BSON Type
+ *
+ * @classconstant BSON_DATA_CODE_W_SCOPE
+ **/
+BSON.BSON_DATA_CODE_W_SCOPE = 15;
+/**
+ * 32 bit Integer BSON Type
+ *
+ * @classconstant BSON_DATA_INT
+ **/
+BSON.BSON_DATA_INT = 16;
+/**
+ * Timestamp BSON Type
+ *
+ * @classconstant BSON_DATA_TIMESTAMP
+ **/
+BSON.BSON_DATA_TIMESTAMP = 17;
+/**
+ * Long BSON Type
+ *
+ * @classconstant BSON_DATA_LONG
+ **/
+BSON.BSON_DATA_LONG = 18;
+/**
+ * MinKey BSON Type
+ *
+ * @classconstant BSON_DATA_MIN_KEY
+ **/
+BSON.BSON_DATA_MIN_KEY = 0xff;
+/**
+ * MaxKey BSON Type
+ *
+ * @classconstant BSON_DATA_MAX_KEY
+ **/
+BSON.BSON_DATA_MAX_KEY = 0x7f;
+
+/**
+ * Binary Default Type
+ *
+ * @classconstant BSON_BINARY_SUBTYPE_DEFAULT
+ **/
+BSON.BSON_BINARY_SUBTYPE_DEFAULT = 0;
+/**
+ * Binary Function Type
+ *
+ * @classconstant BSON_BINARY_SUBTYPE_FUNCTION
+ **/
+BSON.BSON_BINARY_SUBTYPE_FUNCTION = 1;
+/**
+ * Binary Byte Array Type
+ *
+ * @classconstant BSON_BINARY_SUBTYPE_BYTE_ARRAY
+ **/
+BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY = 2;
+/**
+ * Binary UUID Type
+ *
+ * @classconstant BSON_BINARY_SUBTYPE_UUID
+ **/
+BSON.BSON_BINARY_SUBTYPE_UUID = 3;
+/**
+ * Binary MD5 Type
+ *
+ * @classconstant BSON_BINARY_SUBTYPE_MD5
+ **/
+BSON.BSON_BINARY_SUBTYPE_MD5 = 4;
+/**
+ * Binary User Defined Type
+ *
+ * @classconstant BSON_BINARY_SUBTYPE_USER_DEFINED
+ **/
+BSON.BSON_BINARY_SUBTYPE_USER_DEFINED = 128;
+
+/**
+ * Calculate the bson size for a passed in Javascript object.
+ *
+ * @param {Object} object the Javascript object to calculate the BSON byte size for.
+ * @param {Boolean} [serializeFunctions] serialize all functions in the object **(default:false)**.
+ * @return {Number} returns the number of bytes the BSON object will take up.
+ * @api public
+ */
+BSON.calculateObjectSize = function calculateObjectSize(object, serializeFunctions) {
+ var totalLength = (4 + 1);
+
+ if(Array.isArray(object)) {
+ for(var i = 0; i < object.length; i++) {
+ totalLength += calculateElement(i.toString(), object[i], serializeFunctions)
+ }
+ } else {
+ // If we have toBSON defined, override the current object
+ if(object.toBSON) {
+ object = object.toBSON();
+ }
+
+ // Calculate size
+ for(var key in object) {
+ totalLength += calculateElement(key, object[key], serializeFunctions)
+ }
+ }
+
+ return totalLength;
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+function calculateElement(name, value, serializeFunctions) {
+ var isBuffer = typeof Buffer !== 'undefined';
+
+ switch(typeof value) {
+ case 'string':
+ return 1 + (!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1 + 4 + (!isBuffer ? numberOfBytes(value) : Buffer.byteLength(value, 'utf8')) + 1;
+ case 'number':
+ if(Math.floor(value) === value && value >= BSON.JS_INT_MIN && value <= BSON.JS_INT_MAX) {
+ if(value >= BSON.BSON_INT32_MIN && value <= BSON.BSON_INT32_MAX) { // 32 bit
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (4 + 1);
+ } else {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (8 + 1);
+ }
+ } else { // 64 bit
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (8 + 1);
+ }
+ case 'undefined':
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (1);
+ case 'boolean':
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (1 + 1);
+ case 'object':
+ if(value == null || value instanceof MinKey || value instanceof MaxKey || value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (1);
+ } else if(value instanceof ObjectID || value['_bsontype'] == 'ObjectID') {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (12 + 1);
+ } else if(value instanceof Date || isDate(value)) {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (8 + 1);
+ } else if(typeof Buffer !== 'undefined' && Buffer.isBuffer(value)) {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (1 + 4 + 1) + value.length;
+ } else if(value instanceof Long || value instanceof Double || value instanceof Timestamp
+ || value['_bsontype'] == 'Long' || value['_bsontype'] == 'Double' || value['_bsontype'] == 'Timestamp') {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (8 + 1);
+ } else if(value instanceof Code || value['_bsontype'] == 'Code') {
+ // Calculate size depending on the availability of a scope
+ if(value.scope != null && Object.keys(value.scope).length > 0) {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + 1 + 4 + 4 + (!isBuffer ? numberOfBytes(value.code.toString()) : Buffer.byteLength(value.code.toString(), 'utf8')) + 1 + BSON.calculateObjectSize(value.scope, serializeFunctions);
+ } else {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + 1 + 4 + (!isBuffer ? numberOfBytes(value.code.toString()) : Buffer.byteLength(value.code.toString(), 'utf8')) + 1;
+ }
+ } else if(value instanceof Binary || value['_bsontype'] == 'Binary') {
+ // Check what kind of subtype we have
+ if(value.sub_type == Binary.SUBTYPE_BYTE_ARRAY) {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (value.position + 1 + 4 + 1 + 4);
+ } else {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + (value.position + 1 + 4 + 1);
+ }
+ } else if(value instanceof Symbol || value['_bsontype'] == 'Symbol') {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + ((!isBuffer ? numberOfBytes(value.value) : Buffer.byteLength(value.value, 'utf8')) + 4 + 1 + 1);
+ } else if(value instanceof DBRef || value['_bsontype'] == 'DBRef') {
+ // Set up correct object for serialization
+ var ordered_values = {
+ '$ref': value.namespace
+ , '$id' : value.oid
+ };
+
+ // Add db reference if it exists
+ if(null != value.db) {
+ ordered_values['$db'] = value.db;
+ }
+
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + 1 + BSON.calculateObjectSize(ordered_values, serializeFunctions);
+ } else if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]') {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + 1 + (!isBuffer ? numberOfBytes(value.source) : Buffer.byteLength(value.source, 'utf8')) + 1
+ + (value.global ? 1 : 0) + (value.ignoreCase ? 1 : 0) + (value.multiline ? 1 : 0) + 1
+ } else {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + BSON.calculateObjectSize(value, serializeFunctions) + 1;
+ }
+ case 'function':
+ // WTF for 0.4.X where typeof /someregexp/ === 'function'
+ if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]' || String.call(value) == '[object RegExp]') {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + 1 + (!isBuffer ? numberOfBytes(value.source) : Buffer.byteLength(value.source, 'utf8')) + 1
+ + (value.global ? 1 : 0) + (value.ignoreCase ? 1 : 0) + (value.multiline ? 1 : 0) + 1
+ } else {
+ if(serializeFunctions && value.scope != null && Object.keys(value.scope).length > 0) {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + 1 + 4 + 4 + (!isBuffer ? numberOfBytes(value.toString()) : Buffer.byteLength(value.toString(), 'utf8')) + 1 + BSON.calculateObjectSize(value.scope, serializeFunctions);
+ } else if(serializeFunctions) {
+ return (name != null ? ((!isBuffer ? numberOfBytes(name) : Buffer.byteLength(name, 'utf8')) + 1) : 0) + 1 + 4 + (!isBuffer ? numberOfBytes(value.toString()) : Buffer.byteLength(value.toString(), 'utf8')) + 1;
+ }
+ }
+ }
+
+ return 0;
+}
+
+/**
+ * Serialize a Javascript object using a predefined Buffer and index into the buffer, useful when pre-allocating the space for serialization.
+ *
+ * @param {Object} object the Javascript object to serialize.
+ * @param {Boolean} checkKeys the serializer will check if keys are valid.
+ * @param {Buffer} buffer the Buffer you pre-allocated to store the serialized BSON object.
+ * @param {Number} index the index in the buffer where we wish to start serializing into.
+ * @param {Boolean} serializeFunctions serialize the javascript functions **(default:false)**.
+ * @return {Number} returns the new write index in the Buffer.
+ * @api public
+ */
+BSON.serializeWithBufferAndIndex = function serializeWithBufferAndIndex(object, checkKeys, buffer, index, serializeFunctions) {
+ // Default setting false
+ serializeFunctions = serializeFunctions == null ? false : serializeFunctions;
+ // Write end information (length of the object)
+ var size = buffer.length;
+ // Write the size of the object
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ return serializeObject(object, checkKeys, buffer, index, serializeFunctions) - 1;
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+var serializeObject = function(object, checkKeys, buffer, index, serializeFunctions) {
+ // Process the object
+ if(Array.isArray(object)) {
+ for(var i = 0; i < object.length; i++) {
+ index = packElement(i.toString(), object[i], checkKeys, buffer, index, serializeFunctions);
+ }
+ } else {
+ // If we have toBSON defined, override the current object
+ if(object.toBSON) {
+ object = object.toBSON();
+ }
+
+ // Serialize the object
+ for(var key in object) {
+ // Check the key and throw error if it's illegal
+ if (key != '$db' && key != '$ref' && key != '$id') {
+ // dollars and dots ok
+ BSON.checkKey(key, !checkKeys);
+ }
+
+ // Pack the element
+ index = packElement(key, object[key], checkKeys, buffer, index, serializeFunctions);
+ }
+ }
+
+ // Write zero
+ buffer[index++] = 0;
+ return index;
+}
+
+var stringToBytes = function(str) {
+ var ch, st, re = [];
+ for (var i = 0; i < str.length; i++ ) {
+ ch = str.charCodeAt(i); // get char
+ st = []; // set up "stack"
+ do {
+ st.push( ch & 0xFF ); // push byte to stack
+ ch = ch >> 8; // shift value down by 1 byte
+ }
+ while ( ch );
+ // add stack contents to result
+ // done because chars have "wrong" endianness
+ re = re.concat( st.reverse() );
+ }
+ // return an array of bytes
+ return re;
+}
+
+var numberOfBytes = function(str) {
+ var ch, st, re = 0;
+ for (var i = 0; i < str.length; i++ ) {
+ ch = str.charCodeAt(i); // get char
+ st = []; // set up "stack"
+ do {
+ st.push( ch & 0xFF ); // push byte to stack
+ ch = ch >> 8; // shift value down by 1 byte
+ }
+ while ( ch );
+ // add stack contents to result
+ // done because chars have "wrong" endianness
+ re = re + st.length;
+ }
+ // return an array of bytes
+ return re;
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+var writeToTypedArray = function(buffer, string, index) {
+ var bytes = stringToBytes(string);
+ for(var i = 0; i < bytes.length; i++) {
+ buffer[index + i] = bytes[i];
+ }
+ return bytes.length;
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+var supportsBuffer = typeof Buffer != 'undefined';
+
+/**
+ * @ignore
+ * @api private
+ */
+var packElement = function(name, value, checkKeys, buffer, index, serializeFunctions) {
+ var startIndex = index;
+
+ switch(typeof value) {
+ case 'string':
+ // Encode String type
+ buffer[index++] = BSON.BSON_DATA_STRING;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+
+ // Calculate size
+ var size = supportsBuffer ? Buffer.byteLength(value) + 1 : numberOfBytes(value) + 1;
+ // Write the size of the string to buffer
+ buffer[index + 3] = (size >> 24) & 0xff;
+ buffer[index + 2] = (size >> 16) & 0xff;
+ buffer[index + 1] = (size >> 8) & 0xff;
+ buffer[index] = size & 0xff;
+ // Ajust the index
+ index = index + 4;
+ // Write the string
+ supportsBuffer ? buffer.write(value, index, 'utf8') : writeToTypedArray(buffer, value, index);
+ // Update index
+ index = index + size - 1;
+ // Write zero
+ buffer[index++] = 0;
+ // Return index
+ return index;
+ case 'number':
+ // We have an integer value
+ if(Math.floor(value) === value && value >= BSON.JS_INT_MIN && value <= BSON.JS_INT_MAX) {
+ // If the value fits in 32 bits encode as int, if it fits in a double
+ // encode it as a double, otherwise long
+ if(value >= BSON.BSON_INT32_MIN && value <= BSON.BSON_INT32_MAX) {
+ // Set int type 32 bits or less
+ buffer[index++] = BSON.BSON_DATA_INT;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Write the int value
+ buffer[index++] = value & 0xff;
+ buffer[index++] = (value >> 8) & 0xff;
+ buffer[index++] = (value >> 16) & 0xff;
+ buffer[index++] = (value >> 24) & 0xff;
+ } else if(value >= BSON.JS_INT_MIN && value <= BSON.JS_INT_MAX) {
+ // Encode as double
+ buffer[index++] = BSON.BSON_DATA_NUMBER;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Write float
+ writeIEEE754(buffer, value, index, 'little', 52, 8);
+ // Ajust index
+ index = index + 8;
+ } else {
+ // Set long type
+ buffer[index++] = BSON.BSON_DATA_LONG;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ var longVal = Long.fromNumber(value);
+ var lowBits = longVal.getLowBits();
+ var highBits = longVal.getHighBits();
+ // Encode low bits
+ buffer[index++] = lowBits & 0xff;
+ buffer[index++] = (lowBits >> 8) & 0xff;
+ buffer[index++] = (lowBits >> 16) & 0xff;
+ buffer[index++] = (lowBits >> 24) & 0xff;
+ // Encode high bits
+ buffer[index++] = highBits & 0xff;
+ buffer[index++] = (highBits >> 8) & 0xff;
+ buffer[index++] = (highBits >> 16) & 0xff;
+ buffer[index++] = (highBits >> 24) & 0xff;
+ }
+ } else {
+ // Encode as double
+ buffer[index++] = BSON.BSON_DATA_NUMBER;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Write float
+ writeIEEE754(buffer, value, index, 'little', 52, 8);
+ // Ajust index
+ index = index + 8;
+ }
+
+ return index;
+ case 'undefined':
+ // Set long type
+ buffer[index++] = BSON.BSON_DATA_NULL;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ return index;
+ case 'boolean':
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_BOOLEAN;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Encode the boolean value
+ buffer[index++] = value ? 1 : 0;
+ return index;
+ case 'object':
+ if(value === null || value instanceof MinKey || value instanceof MaxKey
+ || value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
+ // Write the type of either min or max key
+ if(value === null) {
+ buffer[index++] = BSON.BSON_DATA_NULL;
+ } else if(value instanceof MinKey) {
+ buffer[index++] = BSON.BSON_DATA_MIN_KEY;
+ } else {
+ buffer[index++] = BSON.BSON_DATA_MAX_KEY;
+ }
+
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ return index;
+ } else if(value instanceof ObjectID || value['_bsontype'] == 'ObjectID') {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_OID;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+
+ // Write objectid
+ supportsBuffer ? buffer.write(value.id, index, 'binary') : writeToTypedArray(buffer, value.id, index);
+ // Ajust index
+ index = index + 12;
+ return index;
+ } else if(value instanceof Date || isDate(value)) {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_DATE;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+
+ // Write the date
+ var dateInMilis = Long.fromNumber(value.getTime());
+ var lowBits = dateInMilis.getLowBits();
+ var highBits = dateInMilis.getHighBits();
+ // Encode low bits
+ buffer[index++] = lowBits & 0xff;
+ buffer[index++] = (lowBits >> 8) & 0xff;
+ buffer[index++] = (lowBits >> 16) & 0xff;
+ buffer[index++] = (lowBits >> 24) & 0xff;
+ // Encode high bits
+ buffer[index++] = highBits & 0xff;
+ buffer[index++] = (highBits >> 8) & 0xff;
+ buffer[index++] = (highBits >> 16) & 0xff;
+ buffer[index++] = (highBits >> 24) & 0xff;
+ return index;
+ } else if(typeof Buffer !== 'undefined' && Buffer.isBuffer(value)) {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_BINARY;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Get size of the buffer (current write point)
+ var size = value.length;
+ // Write the size of the string to buffer
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ // Write the default subtype
+ buffer[index++] = BSON.BSON_BINARY_SUBTYPE_DEFAULT;
+ // Copy the content form the binary field to the buffer
+ value.copy(buffer, index, 0, size);
+ // Adjust the index
+ index = index + size;
+ return index;
+ } else if(value instanceof Long || value instanceof Timestamp || value['_bsontype'] == 'Long' || value['_bsontype'] == 'Timestamp') {
+ // Write the type
+ buffer[index++] = value instanceof Long ? BSON.BSON_DATA_LONG : BSON.BSON_DATA_TIMESTAMP;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Write the date
+ var lowBits = value.getLowBits();
+ var highBits = value.getHighBits();
+ // Encode low bits
+ buffer[index++] = lowBits & 0xff;
+ buffer[index++] = (lowBits >> 8) & 0xff;
+ buffer[index++] = (lowBits >> 16) & 0xff;
+ buffer[index++] = (lowBits >> 24) & 0xff;
+ // Encode high bits
+ buffer[index++] = highBits & 0xff;
+ buffer[index++] = (highBits >> 8) & 0xff;
+ buffer[index++] = (highBits >> 16) & 0xff;
+ buffer[index++] = (highBits >> 24) & 0xff;
+ return index;
+ } else if(value instanceof Double || value['_bsontype'] == 'Double') {
+ // Encode as double
+ buffer[index++] = BSON.BSON_DATA_NUMBER;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Write float
+ writeIEEE754(buffer, value, index, 'little', 52, 8);
+ // Ajust index
+ index = index + 8;
+ return index;
+ } else if(value instanceof Code || value['_bsontype'] == 'Code') {
+ if(value.scope != null && Object.keys(value.scope).length > 0) {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_CODE_W_SCOPE;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Calculate the scope size
+ var scopeSize = BSON.calculateObjectSize(value.scope, serializeFunctions);
+ // Function string
+ var functionString = value.code.toString();
+ // Function Size
+ var codeSize = supportsBuffer ? Buffer.byteLength(functionString) + 1 : numberOfBytes(functionString) + 1;
+
+ // Calculate full size of the object
+ var totalSize = 4 + codeSize + scopeSize + 4;
+
+ // Write the total size of the object
+ buffer[index++] = totalSize & 0xff;
+ buffer[index++] = (totalSize >> 8) & 0xff;
+ buffer[index++] = (totalSize >> 16) & 0xff;
+ buffer[index++] = (totalSize >> 24) & 0xff;
+
+ // Write the size of the string to buffer
+ buffer[index++] = codeSize & 0xff;
+ buffer[index++] = (codeSize >> 8) & 0xff;
+ buffer[index++] = (codeSize >> 16) & 0xff;
+ buffer[index++] = (codeSize >> 24) & 0xff;
+
+ // Write the string
+ supportsBuffer ? buffer.write(functionString, index, 'utf8') : writeToTypedArray(buffer, functionString, index);
+ // Update index
+ index = index + codeSize - 1;
+ // Write zero
+ buffer[index++] = 0;
+ // Serialize the scope object
+ var scopeObjectBuffer = supportsBuffer ? new Buffer(scopeSize) : new Uint8Array(new ArrayBuffer(scopeSize));
+ // Execute the serialization into a seperate buffer
+ serializeObject(value.scope, checkKeys, scopeObjectBuffer, 0, serializeFunctions);
+
+ // Adjusted scope Size (removing the header)
+ var scopeDocSize = scopeSize;
+ // Write scope object size
+ buffer[index++] = scopeDocSize & 0xff;
+ buffer[index++] = (scopeDocSize >> 8) & 0xff;
+ buffer[index++] = (scopeDocSize >> 16) & 0xff;
+ buffer[index++] = (scopeDocSize >> 24) & 0xff;
+
+ // Write the scopeObject into the buffer
+ supportsBuffer ? scopeObjectBuffer.copy(buffer, index, 0, scopeSize) : buffer.set(scopeObjectBuffer, index);
+ // Adjust index, removing the empty size of the doc (5 bytes 0000000005)
+ index = index + scopeDocSize - 5;
+ // Write trailing zero
+ buffer[index++] = 0;
+ return index
+ } else {
+ buffer[index++] = BSON.BSON_DATA_CODE;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Function string
+ var functionString = value.code.toString();
+ // Function Size
+ var size = supportsBuffer ? Buffer.byteLength(functionString) + 1 : numberOfBytes(functionString) + 1;
+ // Write the size of the string to buffer
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ // Write the string
+ supportsBuffer ? buffer.write(functionString, index, 'utf8') : writeToTypedArray(buffer, functionString, index);
+ // Update index
+ index = index + size - 1;
+ // Write zero
+ buffer[index++] = 0;
+ return index;
+ }
+ } else if(value instanceof Binary || value['_bsontype'] == 'Binary') {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_BINARY;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Extract the buffer
+ var data = value.value(true);
+ // Calculate size
+ var size = value.position;
+ // Write the size of the string to buffer
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ // Write the subtype to the buffer
+ buffer[index++] = value.sub_type;
+
+ // If we have binary type 2 the 4 first bytes are the size
+ if(value.sub_type == Binary.SUBTYPE_BYTE_ARRAY) {
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ }
+
+ // Write the data to the object
+ supportsBuffer ? data.copy(buffer, index, 0, value.position) : buffer.set(data, index);
+ // Ajust index
+ index = index + value.position;
+ return index;
+ } else if(value instanceof Symbol || value['_bsontype'] == 'Symbol') {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_SYMBOL;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Calculate size
+ var size = supportsBuffer ? Buffer.byteLength(value.value) + 1 : numberOfBytes(value.value) + 1;
+ // Write the size of the string to buffer
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ // Write the string
+ buffer.write(value.value, index, 'utf8');
+ // Update index
+ index = index + size - 1;
+ // Write zero
+ buffer[index++] = 0x00;
+ return index;
+ } else if(value instanceof DBRef || value['_bsontype'] == 'DBRef') {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_OBJECT;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Set up correct object for serialization
+ var ordered_values = {
+ '$ref': value.namespace
+ , '$id' : value.oid
+ };
+
+ // Add db reference if it exists
+ if(null != value.db) {
+ ordered_values['$db'] = value.db;
+ }
+
+ // Message size
+ var size = BSON.calculateObjectSize(ordered_values, serializeFunctions);
+ // Serialize the object
+ var endIndex = BSON.serializeWithBufferAndIndex(ordered_values, checkKeys, buffer, index, serializeFunctions);
+ // Write the size of the string to buffer
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ // Write zero for object
+ buffer[endIndex++] = 0x00;
+ // Return the end index
+ return endIndex;
+ } else if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]') {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_REGEXP;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+
+ // Write the regular expression string
+ supportsBuffer ? buffer.write(value.source, index, 'utf8') : writeToTypedArray(buffer, value.source, index);
+ // Adjust the index
+ index = index + (supportsBuffer ? Buffer.byteLength(value.source) : numberOfBytes(value.source));
+ // Write zero
+ buffer[index++] = 0x00;
+ // Write the parameters
+ if(value.global) buffer[index++] = 0x73; // s
+ if(value.ignoreCase) buffer[index++] = 0x69; // i
+ if(value.multiline) buffer[index++] = 0x6d; // m
+ // Add ending zero
+ buffer[index++] = 0x00;
+ return index;
+ } else {
+ // Write the type
+ buffer[index++] = Array.isArray(value) ? BSON.BSON_DATA_ARRAY : BSON.BSON_DATA_OBJECT;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Adjust the index
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ var endIndex = serializeObject(value, checkKeys, buffer, index + 4, serializeFunctions);
+ // Write size
+ var size = endIndex - index;
+ // Write the size of the string to buffer
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ return endIndex;
+ }
+ case 'function':
+ // WTF for 0.4.X where typeof /someregexp/ === 'function'
+ if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]' || String.call(value) == '[object RegExp]') {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_REGEXP;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+
+ // Write the regular expression string
+ buffer.write(value.source, index, 'utf8');
+ // Adjust the index
+ index = index + (supportsBuffer ? Buffer.byteLength(value.source) : numberOfBytes(value.source));
+ // Write zero
+ buffer[index++] = 0x00;
+ // Write the parameters
+ if(value.global) buffer[index++] = 0x73; // s
+ if(value.ignoreCase) buffer[index++] = 0x69; // i
+ if(value.multiline) buffer[index++] = 0x6d; // m
+ // Add ending zero
+ buffer[index++] = 0x00;
+ return index;
+ } else {
+ if(serializeFunctions && value.scope != null && Object.keys(value.scope).length > 0) {
+ // Write the type
+ buffer[index++] = BSON.BSON_DATA_CODE_W_SCOPE;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Calculate the scope size
+ var scopeSize = BSON.calculateObjectSize(value.scope, serializeFunctions);
+ // Function string
+ var functionString = value.toString();
+ // Function Size
+ var codeSize = supportsBuffer ? Buffer.byteLength(functionString) + 1 : numberOfBytes(functionString) + 1;
+
+ // Calculate full size of the object
+ var totalSize = 4 + codeSize + scopeSize;
+
+ // Write the total size of the object
+ buffer[index++] = totalSize & 0xff;
+ buffer[index++] = (totalSize >> 8) & 0xff;
+ buffer[index++] = (totalSize >> 16) & 0xff;
+ buffer[index++] = (totalSize >> 24) & 0xff;
+
+ // Write the size of the string to buffer
+ buffer[index++] = codeSize & 0xff;
+ buffer[index++] = (codeSize >> 8) & 0xff;
+ buffer[index++] = (codeSize >> 16) & 0xff;
+ buffer[index++] = (codeSize >> 24) & 0xff;
+
+ // Write the string
+ supportsBuffer ? buffer.write(functionString, index, 'utf8') : writeToTypedArray(buffer, functionString, index);
+ // Update index
+ index = index + codeSize - 1;
+ // Write zero
+ buffer[index++] = 0;
+ // Serialize the scope object
+ var scopeObjectBuffer = new Buffer(scopeSize);
+ // Execute the serialization into a seperate buffer
+ serializeObject(value.scope, checkKeys, scopeObjectBuffer, 0, serializeFunctions);
+
+ // Adjusted scope Size (removing the header)
+ var scopeDocSize = scopeSize - 4;
+ // Write scope object size
+ buffer[index++] = scopeDocSize & 0xff;
+ buffer[index++] = (scopeDocSize >> 8) & 0xff;
+ buffer[index++] = (scopeDocSize >> 16) & 0xff;
+ buffer[index++] = (scopeDocSize >> 24) & 0xff;
+
+ // Write the scopeObject into the buffer
+ scopeObjectBuffer.copy(buffer, index, 0, scopeSize);
+
+ // Adjust index, removing the empty size of the doc (5 bytes 0000000005)
+ index = index + scopeDocSize - 5;
+ // Write trailing zero
+ buffer[index++] = 0;
+ return index
+ } else if(serializeFunctions) {
+ buffer[index++] = BSON.BSON_DATA_CODE;
+ // Number of written bytes
+ var numberOfWrittenBytes = supportsBuffer ? buffer.write(name, index, 'utf8') : writeToTypedArray(buffer, name, index);
+ // Encode the name
+ index = index + numberOfWrittenBytes + 1;
+ buffer[index - 1] = 0;
+ // Function string
+ var functionString = value.toString();
+ // Function Size
+ var size = supportsBuffer ? Buffer.byteLength(functionString) + 1 : numberOfBytes(functionString) + 1;
+ // Write the size of the string to buffer
+ buffer[index++] = size & 0xff;
+ buffer[index++] = (size >> 8) & 0xff;
+ buffer[index++] = (size >> 16) & 0xff;
+ buffer[index++] = (size >> 24) & 0xff;
+ // Write the string
+ supportsBuffer ? buffer.write(functionString, index, 'utf8') : writeToTypedArray(buffer, functionString, index);
+ // Update index
+ index = index + size - 1;
+ // Write zero
+ buffer[index++] = 0;
+ return index;
+ }
+ }
+ }
+
+ // If no value to serialize
+ return index;
+}
+
+/**
+ * Serialize a Javascript object.
+ *
+ * @param {Object} object the Javascript object to serialize.
+ * @param {Boolean} checkKeys the serializer will check if keys are valid.
+ * @param {Boolean} asBuffer return the serialized object as a Buffer object **(ignore)**.
+ * @param {Boolean} serializeFunctions serialize the javascript functions **(default:false)**.
+ * @return {Buffer} returns the Buffer object containing the serialized object.
+ * @api public
+ */
+BSON.serialize = function(object, checkKeys, asBuffer, serializeFunctions) {
+ // Throw error if we are trying serialize an illegal type
+ if(object == null || typeof object != 'object' || Array.isArray(object))
+ throw new Error("Only javascript objects supported");
+
+ // Emoty target buffer
+ var buffer = null;
+ // Calculate the size of the object
+ var size = BSON.calculateObjectSize(object, serializeFunctions);
+ // Fetch the best available type for storing the binary data
+ if(buffer = typeof Buffer != 'undefined') {
+ buffer = new Buffer(size);
+ asBuffer = true;
+ } else if(typeof Uint8Array != 'undefined') {
+ buffer = new Uint8Array(new ArrayBuffer(size));
+ } else {
+ buffer = new Array(size);
+ }
+
+ // If asBuffer is false use typed arrays
+ BSON.serializeWithBufferAndIndex(object, checkKeys, buffer, 0, serializeFunctions);
+ return buffer;
+}
+
+/**
+ * Contains the function cache if we have that enable to allow for avoiding the eval step on each deserialization, comparison is by md5
+ *
+ * @ignore
+ * @api private
+ */
+var functionCache = BSON.functionCache = {};
+
+/**
+ * Crc state variables shared by function
+ *
+ * @ignore
+ * @api private
+ */
+var table = [0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, 0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, 0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, 0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683, 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, 0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B, 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777, 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, 0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D];
+
+/**
+ * CRC32 hash method, Fast and enough versitility for our usage
+ *
+ * @ignore
+ * @api private
+ */
+var crc32 = function(string, start, end) {
+ var crc = 0
+ var x = 0;
+ var y = 0;
+ crc = crc ^ (-1);
+
+ for(var i = start, iTop = end; i < iTop;i++) {
+ y = (crc ^ string[i]) & 0xFF;
+ x = table[y];
+ crc = (crc >>> 8) ^ x;
+ }
+
+ return crc ^ (-1);
+}
+
+/**
+ * Deserialize stream data as BSON documents.
+ *
+ * Options
+ * - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.
+ * - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.
+ * - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.
+ * - **promoteLongs** {Boolean, default:true}, when deserializing a Long will fit it into a Number if it's smaller than 53 bits
+ *
+ * @param {Buffer} data the buffer containing the serialized set of BSON documents.
+ * @param {Number} startIndex the start index in the data Buffer where the deserialization is to start.
+ * @param {Number} numberOfDocuments number of documents to deserialize.
+ * @param {Array} documents an array where to store the deserialized documents.
+ * @param {Number} docStartIndex the index in the documents array from where to start inserting documents.
+ * @param {Object} [options] additional options used for the deserialization.
+ * @return {Number} returns the next index in the buffer after deserialization **x** numbers of documents.
+ * @api public
+ */
+BSON.deserializeStream = function(data, startIndex, numberOfDocuments, documents, docStartIndex, options) {
+ // if(numberOfDocuments !== documents.length) throw new Error("Number of expected results back is less than the number of documents");
+ options = options != null ? options : {};
+ var index = startIndex;
+ // Loop over all documents
+ for(var i = 0; i < numberOfDocuments; i++) {
+ // Find size of the document
+ var size = data[index] | data[index + 1] << 8 | data[index + 2] << 16 | data[index + 3] << 24;
+ // Update options with index
+ options['index'] = index;
+ // Parse the document at this point
+ documents[docStartIndex + i] = BSON.deserialize(data, options);
+ // Adjust index by the document size
+ index = index + size;
+ }
+
+ // Return object containing end index of parsing and list of documents
+ return index;
+}
+
+/**
+ * Ensure eval is isolated.
+ *
+ * @ignore
+ * @api private
+ */
+var isolateEvalWithHash = function(functionCache, hash, functionString, object) {
+ // Contains the value we are going to set
+ var value = null;
+
+ // Check for cache hit, eval if missing and return cached function
+ if(functionCache[hash] == null) {
+ eval("value = " + functionString);
+ functionCache[hash] = value;
+ }
+ // Set the object
+ return functionCache[hash].bind(object);
+}
+
+/**
+ * Ensure eval is isolated.
+ *
+ * @ignore
+ * @api private
+ */
+var isolateEval = function(functionString) {
+ // Contains the value we are going to set
+ var value = null;
+ // Eval the function
+ eval("value = " + functionString);
+ return value;
+}
+
+/**
+ * Convert Uint8Array to String
+ *
+ * @ignore
+ * @api private
+ */
+var convertUint8ArrayToUtf8String = function(byteArray, startIndex, endIndex) {
+ return BinaryParser.decode_utf8(convertArraytoUtf8BinaryString(byteArray, startIndex, endIndex));
+}
+
+var convertArraytoUtf8BinaryString = function(byteArray, startIndex, endIndex) {
+ var result = "";
+ for(var i = startIndex; i < endIndex; i++) {
+ result = result + String.fromCharCode(byteArray[i]);
+ }
+
+ return result;
+};
+
+/**
+ * Deserialize data as BSON.
+ *
+ * Options
+ * - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.
+ * - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.
+ * - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.
+ * - **promoteLongs** {Boolean, default:true}, when deserializing a Long will fit it into a Number if it's smaller than 53 bits
+ *
+ * @param {Buffer} buffer the buffer containing the serialized set of BSON documents.
+ * @param {Object} [options] additional options used for the deserialization.
+ * @param {Boolean} [isArray] ignore used for recursive parsing.
+ * @return {Object} returns the deserialized Javascript Object.
+ * @api public
+ */
+BSON.deserialize = function(buffer, options, isArray) {
+ // Options
+ options = options == null ? {} : options;
+ var evalFunctions = options['evalFunctions'] == null ? false : options['evalFunctions'];
+ var cacheFunctions = options['cacheFunctions'] == null ? false : options['cacheFunctions'];
+ var cacheFunctionsCrc32 = options['cacheFunctionsCrc32'] == null ? false : options['cacheFunctionsCrc32'];
+ var promoteLongs = options['promoteLongs'] || true;
+
+ // Validate that we have at least 4 bytes of buffer
+ if(buffer.length < 5) throw new Error("corrupt bson message < 5 bytes long");
+
+ // Set up index
+ var index = typeof options['index'] == 'number' ? options['index'] : 0;
+ // Reads in a C style string
+ var readCStyleString = function() {
+ // Get the start search index
+ var i = index;
+ // Locate the end of the c string
+ while(buffer[i] !== 0x00) { i++ }
+ // Grab utf8 encoded string
+ var string = supportsBuffer && Buffer.isBuffer(buffer) ? buffer.toString('utf8', index, i) : convertUint8ArrayToUtf8String(buffer, index, i);
+ // Update index position
+ index = i + 1;
+ // Return string
+ return string;
+ }
+
+ // Create holding object
+ var object = isArray ? [] : {};
+
+ // Read the document size
+ var size = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+
+ // Ensure buffer is valid size
+ if(size < 5 || size > buffer.length) throw new Error("corrupt bson message");
+
+ // While we have more left data left keep parsing
+ while(true) {
+ // Read the type
+ var elementType = buffer[index++];
+ // If we get a zero it's the last byte, exit
+ if(elementType == 0) break;
+ // Read the name of the field
+ var name = readCStyleString();
+ // Switch on the type
+ switch(elementType) {
+ case BSON.BSON_DATA_OID:
+ var string = supportsBuffer && Buffer.isBuffer(buffer) ? buffer.toString('binary', index, index + 12) : convertArraytoUtf8BinaryString(buffer, index, index + 12);
+ // Decode the oid
+ object[name] = new ObjectID(string);
+ // Update index
+ index = index + 12;
+ break;
+ case BSON.BSON_DATA_STRING:
+ // Read the content of the field
+ var stringSize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Add string to object
+ object[name] = supportsBuffer && Buffer.isBuffer(buffer) ? buffer.toString('utf8', index, index + stringSize - 1) : convertUint8ArrayToUtf8String(buffer, index, index + stringSize - 1);
+ // Update parse index position
+ index = index + stringSize;
+ break;
+ case BSON.BSON_DATA_INT:
+ // Decode the 32bit value
+ object[name] = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ break;
+ case BSON.BSON_DATA_NUMBER:
+ // Decode the double value
+ object[name] = readIEEE754(buffer, index, 'little', 52, 8);
+ // Update the index
+ index = index + 8;
+ break;
+ case BSON.BSON_DATA_DATE:
+ // Unpack the low and high bits
+ var lowBits = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ var highBits = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Set date object
+ object[name] = new Date(new Long(lowBits, highBits).toNumber());
+ break;
+ case BSON.BSON_DATA_BOOLEAN:
+ // Parse the boolean value
+ object[name] = buffer[index++] == 1;
+ break;
+ case BSON.BSON_DATA_NULL:
+ // Parse the boolean value
+ object[name] = null;
+ break;
+ case BSON.BSON_DATA_BINARY:
+ // Decode the size of the binary blob
+ var binarySize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Decode the subtype
+ var subType = buffer[index++];
+ // Decode as raw Buffer object if options specifies it
+ if(buffer['slice'] != null) {
+ // If we have subtype 2 skip the 4 bytes for the size
+ if(subType == Binary.SUBTYPE_BYTE_ARRAY) {
+ binarySize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ }
+ // Slice the data
+ object[name] = new Binary(buffer.slice(index, index + binarySize), subType);
+ } else {
+ var _buffer = typeof Uint8Array != 'undefined' ? new Uint8Array(new ArrayBuffer(binarySize)) : new Array(binarySize);
+ // If we have subtype 2 skip the 4 bytes for the size
+ if(subType == Binary.SUBTYPE_BYTE_ARRAY) {
+ binarySize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ }
+ // Copy the data
+ for(var i = 0; i < binarySize; i++) {
+ _buffer[i] = buffer[index + i];
+ }
+ // Create the binary object
+ object[name] = new Binary(_buffer, subType);
+ }
+ // Update the index
+ index = index + binarySize;
+ break;
+ case BSON.BSON_DATA_ARRAY:
+ options['index'] = index;
+ // Decode the size of the array document
+ var objectSize = buffer[index] | buffer[index + 1] << 8 | buffer[index + 2] << 16 | buffer[index + 3] << 24;
+ // Set the array to the object
+ object[name] = BSON.deserialize(buffer, options, true);
+ // Adjust the index
+ index = index + objectSize;
+ break;
+ case BSON.BSON_DATA_OBJECT:
+ options['index'] = index;
+ // Decode the size of the object document
+ var objectSize = buffer[index] | buffer[index + 1] << 8 | buffer[index + 2] << 16 | buffer[index + 3] << 24;
+ // Set the array to the object
+ object[name] = BSON.deserialize(buffer, options, false);
+ // Adjust the index
+ index = index + objectSize;
+ break;
+ case BSON.BSON_DATA_REGEXP:
+ // Create the regexp
+ var source = readCStyleString();
+ var regExpOptions = readCStyleString();
+ // For each option add the corresponding one for javascript
+ var optionsArray = new Array(regExpOptions.length);
+
+ // Parse options
+ for(var i = 0; i < regExpOptions.length; i++) {
+ switch(regExpOptions[i]) {
+ case 'm':
+ optionsArray[i] = 'm';
+ break;
+ case 's':
+ optionsArray[i] = 'g';
+ break;
+ case 'i':
+ optionsArray[i] = 'i';
+ break;
+ }
+ }
+
+ object[name] = new RegExp(source, optionsArray.join(''));
+ break;
+ case BSON.BSON_DATA_LONG:
+ // Unpack the low and high bits
+ var lowBits = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ var highBits = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Create long object
+ var long = new Long(lowBits, highBits);
+ // Promote the long if possible
+ if(promoteLongs) {
+ object[name] = long.lessThanOrEqual(JS_INT_MAX_LONG) && long.greaterThanOrEqual(JS_INT_MIN_LONG) ? long.toNumber() : long;
+ } else {
+ object[name] = long;
+ }
+ break;
+ case BSON.BSON_DATA_SYMBOL:
+ // Read the content of the field
+ var stringSize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Add string to object
+ object[name] = new Symbol(buffer.toString('utf8', index, index + stringSize - 1));
+ // Update parse index position
+ index = index + stringSize;
+ break;
+ case BSON.BSON_DATA_TIMESTAMP:
+ // Unpack the low and high bits
+ var lowBits = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ var highBits = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Set the object
+ object[name] = new Timestamp(lowBits, highBits);
+ break;
+ case BSON.BSON_DATA_MIN_KEY:
+ // Parse the object
+ object[name] = new MinKey();
+ break;
+ case BSON.BSON_DATA_MAX_KEY:
+ // Parse the object
+ object[name] = new MaxKey();
+ break;
+ case BSON.BSON_DATA_CODE:
+ // Read the content of the field
+ var stringSize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Function string
+ var functionString = supportsBuffer && Buffer.isBuffer(buffer) ? buffer.toString('utf8', index, index + stringSize - 1) : convertUint8ArrayToUtf8String(buffer, index, index + stringSize - 1);
+
+ // If we are evaluating the functions
+ if(evalFunctions) {
+ // Contains the value we are going to set
+ var value = null;
+ // If we have cache enabled let's look for the md5 of the function in the cache
+ if(cacheFunctions) {
+ var hash = cacheFunctionsCrc32 ? crc32(functionString) : functionString;
+ // Got to do this to avoid V8 deoptimizing the call due to finding eval
+ object[name] = isolateEvalWithHash(functionCache, hash, functionString, object);
+ } else {
+ // Set directly
+ object[name] = isolateEval(functionString);
+ }
+ } else {
+ object[name] = new Code(functionString, {});
+ }
+
+ // Update parse index position
+ index = index + stringSize;
+ break;
+ case BSON.BSON_DATA_CODE_W_SCOPE:
+ // Read the content of the field
+ var totalSize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ var stringSize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
+ // Javascript function
+ var functionString = supportsBuffer && Buffer.isBuffer(buffer) ? buffer.toString('utf8', index, index + stringSize - 1) : convertUint8ArrayToUtf8String(buffer, index, index + stringSize - 1);
+ // Update parse index position
+ index = index + stringSize;
+ // Parse the element
+ options['index'] = index;
+ // Decode the size of the object document
+ var objectSize = buffer[index] | buffer[index + 1] << 8 | buffer[index + 2] << 16 | buffer[index + 3] << 24;
+ // Decode the scope object
+ var scopeObject = BSON.deserialize(buffer, options, false);
+ // Adjust the index
+ index = index + objectSize;
+
+ // If we are evaluating the functions
+ if(evalFunctions) {
+ // Contains the value we are going to set
+ var value = null;
+ // If we have cache enabled let's look for the md5 of the function in the cache
+ if(cacheFunctions) {
+ var hash = cacheFunctionsCrc32 ? crc32(functionString) : functionString;
+ // Got to do this to avoid V8 deoptimizing the call due to finding eval
+ object[name] = isolateEvalWithHash(functionCache, hash, functionString, object);
+ } else {
+ // Set directly
+ object[name] = isolateEval(functionString);
+ }
+
+ // Set the scope on the object
+ object[name].scope = scopeObject;
+ } else {
+ object[name] = new Code(functionString, scopeObject);
+ }
+
+ // Add string to object
+ break;
+ }
+ }
+
+ // Check if we have a db ref object
+ if(object['$id'] != null) object = new DBRef(object['$ref'], object['$id'], object['$db']);
+
+ // Return the final objects
+ return object;
+}
+
+/**
+ * Check if key name is valid.
+ *
+ * @ignore
+ * @api private
+ */
+BSON.checkKey = function checkKey (key, dollarsAndDotsOk) {
+ if (!key.length) return;
+ // Check if we have a legal key for the object
+ if (!!~key.indexOf("\x00")) {
+ // The BSON spec doesn't allow keys with null bytes because keys are
+ // null-terminated.
+ throw Error("key " + key + " must not contain null bytes");
+ }
+ if (!dollarsAndDotsOk) {
+ if('$' == key[0]) {
+ throw Error("key " + key + " must not start with '$'");
+ } else if (!!~key.indexOf('.')) {
+ throw Error("key " + key + " must not contain '.'");
+ }
+ }
+};
+
+/**
+ * Deserialize data as BSON.
+ *
+ * Options
+ * - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.
+ * - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.
+ * - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.
+ *
+ * @param {Buffer} buffer the buffer containing the serialized set of BSON documents.
+ * @param {Object} [options] additional options used for the deserialization.
+ * @param {Boolean} [isArray] ignore used for recursive parsing.
+ * @return {Object} returns the deserialized Javascript Object.
+ * @api public
+ */
+BSON.prototype.deserialize = function(data, options) {
+ return BSON.deserialize(data, options);
+}
+
+/**
+ * Deserialize stream data as BSON documents.
+ *
+ * Options
+ * - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.
+ * - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.
+ * - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.
+ *
+ * @param {Buffer} data the buffer containing the serialized set of BSON documents.
+ * @param {Number} startIndex the start index in the data Buffer where the deserialization is to start.
+ * @param {Number} numberOfDocuments number of documents to deserialize.
+ * @param {Array} documents an array where to store the deserialized documents.
+ * @param {Number} docStartIndex the index in the documents array from where to start inserting documents.
+ * @param {Object} [options] additional options used for the deserialization.
+ * @return {Number} returns the next index in the buffer after deserialization **x** numbers of documents.
+ * @api public
+ */
+BSON.prototype.deserializeStream = function(data, startIndex, numberOfDocuments, documents, docStartIndex, options) {
+ return BSON.deserializeStream(data, startIndex, numberOfDocuments, documents, docStartIndex, options);
+}
+
+/**
+ * Serialize a Javascript object.
+ *
+ * @param {Object} object the Javascript object to serialize.
+ * @param {Boolean} checkKeys the serializer will check if keys are valid.
+ * @param {Boolean} asBuffer return the serialized object as a Buffer object **(ignore)**.
+ * @param {Boolean} serializeFunctions serialize the javascript functions **(default:false)**.
+ * @return {Buffer} returns the Buffer object containing the serialized object.
+ * @api public
+ */
+BSON.prototype.serialize = function(object, checkKeys, asBuffer, serializeFunctions) {
+ return BSON.serialize(object, checkKeys, asBuffer, serializeFunctions);
+}
+
+/**
+ * Calculate the bson size for a passed in Javascript object.
+ *
+ * @param {Object} object the Javascript object to calculate the BSON byte size for.
+ * @param {Boolean} [serializeFunctions] serialize all functions in the object **(default:false)**.
+ * @return {Number} returns the number of bytes the BSON object will take up.
+ * @api public
+ */
+BSON.prototype.calculateObjectSize = function(object, serializeFunctions) {
+ return BSON.calculateObjectSize(object, serializeFunctions);
+}
+
+/**
+ * Serialize a Javascript object using a predefined Buffer and index into the buffer, useful when pre-allocating the space for serialization.
+ *
+ * @param {Object} object the Javascript object to serialize.
+ * @param {Boolean} checkKeys the serializer will check if keys are valid.
+ * @param {Buffer} buffer the Buffer you pre-allocated to store the serialized BSON object.
+ * @param {Number} index the index in the buffer where we wish to start serializing into.
+ * @param {Boolean} serializeFunctions serialize the javascript functions **(default:false)**.
+ * @return {Number} returns the new write index in the Buffer.
+ * @api public
+ */
+BSON.prototype.serializeWithBufferAndIndex = function(object, checkKeys, buffer, startIndex, serializeFunctions) {
+ return BSON.serializeWithBufferAndIndex(object, checkKeys, buffer, startIndex, serializeFunctions);
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+exports.Code = Code;
+exports.Symbol = Symbol;
+exports.BSON = BSON;
+exports.DBRef = DBRef;
+exports.Binary = Binary;
+exports.ObjectID = ObjectID;
+exports.Long = Long;
+exports.Timestamp = Timestamp;
+exports.Double = Double;
+exports.MinKey = MinKey;
+exports.MaxKey = MaxKey;
+
+},
+
+
+
+'code': function(module, exports, global, require, undefined){
+ /**
+ * A class representation of the BSON Code type.
+ *
+ * @class Represents the BSON Code type.
+ * @param {String|Function} code a string or function.
+ * @param {Object} [scope] an optional scope for the function.
+ * @return {Code}
+ */
+function Code(code, scope) {
+ if(!(this instanceof Code)) return new Code(code, scope);
+
+ this._bsontype = 'Code';
+ this.code = code;
+ this.scope = scope == null ? {} : scope;
+};
+
+/**
+ * @ignore
+ * @api private
+ */
+Code.prototype.toJSON = function() {
+ return {scope:this.scope, code:this.code};
+}
+
+exports.Code = Code;
+},
+
+
+
+'db_ref': function(module, exports, global, require, undefined){
+ /**
+ * A class representation of the BSON DBRef type.
+ *
+ * @class Represents the BSON DBRef type.
+ * @param {String} namespace the collection name.
+ * @param {ObjectID} oid the reference ObjectID.
+ * @param {String} [db] optional db name, if omitted the reference is local to the current db.
+ * @return {DBRef}
+ */
+function DBRef(namespace, oid, db) {
+ if(!(this instanceof DBRef)) return new DBRef(namespace, oid, db);
+
+ this._bsontype = 'DBRef';
+ this.namespace = namespace;
+ this.oid = oid;
+ this.db = db;
+};
+
+/**
+ * @ignore
+ * @api private
+ */
+DBRef.prototype.toJSON = function() {
+ return {
+ '$ref':this.namespace,
+ '$id':this.oid,
+ '$db':this.db == null ? '' : this.db
+ };
+}
+
+exports.DBRef = DBRef;
+},
+
+
+
+'double': function(module, exports, global, require, undefined){
+ /**
+ * A class representation of the BSON Double type.
+ *
+ * @class Represents the BSON Double type.
+ * @param {Number} value the number we want to represent as a double.
+ * @return {Double}
+ */
+function Double(value) {
+ if(!(this instanceof Double)) return new Double(value);
+
+ this._bsontype = 'Double';
+ this.value = value;
+}
+
+/**
+ * Access the number value.
+ *
+ * @return {Number} returns the wrapped double number.
+ * @api public
+ */
+Double.prototype.valueOf = function() {
+ return this.value;
+};
+
+/**
+ * @ignore
+ * @api private
+ */
+Double.prototype.toJSON = function() {
+ return this.value;
+}
+
+exports.Double = Double;
+},
+
+
+
+'float_parser': function(module, exports, global, require, undefined){
+ // Copyright (c) 2008, Fair Oaks Labs, Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+//
+// * Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// * Neither the name of Fair Oaks Labs, Inc. nor the names of its contributors
+// may be used to endorse or promote products derived from this software
+// without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+//
+// Modifications to writeIEEE754 to support negative zeroes made by Brian White
+
+var readIEEE754 = function(buffer, offset, endian, mLen, nBytes) {
+ var e, m,
+ bBE = (endian === 'big'),
+ eLen = nBytes * 8 - mLen - 1,
+ eMax = (1 << eLen) - 1,
+ eBias = eMax >> 1,
+ nBits = -7,
+ i = bBE ? 0 : (nBytes - 1),
+ d = bBE ? 1 : -1,
+ s = buffer[offset + i];
+
+ i += d;
+
+ e = s & ((1 << (-nBits)) - 1);
+ s >>= (-nBits);
+ nBits += eLen;
+ for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8);
+
+ m = e & ((1 << (-nBits)) - 1);
+ e >>= (-nBits);
+ nBits += mLen;
+ for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8);
+
+ if (e === 0) {
+ e = 1 - eBias;
+ } else if (e === eMax) {
+ return m ? NaN : ((s ? -1 : 1) * Infinity);
+ } else {
+ m = m + Math.pow(2, mLen);
+ e = e - eBias;
+ }
+ return (s ? -1 : 1) * m * Math.pow(2, e - mLen);
+};
+
+var writeIEEE754 = function(buffer, value, offset, endian, mLen, nBytes) {
+ var e, m, c,
+ bBE = (endian === 'big'),
+ eLen = nBytes * 8 - mLen - 1,
+ eMax = (1 << eLen) - 1,
+ eBias = eMax >> 1,
+ rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0),
+ i = bBE ? (nBytes-1) : 0,
+ d = bBE ? -1 : 1,
+ s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0;
+
+ value = Math.abs(value);
+
+ if (isNaN(value) || value === Infinity) {
+ m = isNaN(value) ? 1 : 0;
+ e = eMax;
+ } else {
+ e = Math.floor(Math.log(value) / Math.LN2);
+ if (value * (c = Math.pow(2, -e)) < 1) {
+ e--;
+ c *= 2;
+ }
+ if (e+eBias >= 1) {
+ value += rt / c;
+ } else {
+ value += rt * Math.pow(2, 1 - eBias);
+ }
+ if (value * c >= 2) {
+ e++;
+ c /= 2;
+ }
+
+ if (e + eBias >= eMax) {
+ m = 0;
+ e = eMax;
+ } else if (e + eBias >= 1) {
+ m = (value * c - 1) * Math.pow(2, mLen);
+ e = e + eBias;
+ } else {
+ m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen);
+ e = 0;
+ }
+ }
+
+ for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8);
+
+ e = (e << mLen) | m;
+ eLen += mLen;
+ for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8);
+
+ buffer[offset + i - d] |= s * 128;
+};
+
+exports.readIEEE754 = readIEEE754;
+exports.writeIEEE754 = writeIEEE754;
+},
+
+
+
+'index': function(module, exports, global, require, undefined){
+ try {
+ exports.BSONPure = require('./bson');
+ exports.BSONNative = require('../../ext');
+} catch(err) {
+ // do nothing
+}
+
+[ './binary_parser'
+ , './binary'
+ , './code'
+ , './db_ref'
+ , './double'
+ , './max_key'
+ , './min_key'
+ , './objectid'
+ , './symbol'
+ , './timestamp'
+ , './long'].forEach(function (path) {
+ var module = require('./' + path);
+ for (var i in module) {
+ exports[i] = module[i];
+ }
+});
+
+// Exports all the classes for the NATIVE JS BSON Parser
+exports.native = function() {
+ var classes = {};
+ // Map all the classes
+ [ './binary_parser'
+ , './binary'
+ , './code'
+ , './db_ref'
+ , './double'
+ , './max_key'
+ , './min_key'
+ , './objectid'
+ , './symbol'
+ , './timestamp'
+ , './long'
+ , '../../ext'
+].forEach(function (path) {
+ var module = require('./' + path);
+ for (var i in module) {
+ classes[i] = module[i];
+ }
+ });
+ // Return classes list
+ return classes;
+}
+
+// Exports all the classes for the PURE JS BSON Parser
+exports.pure = function() {
+ var classes = {};
+ // Map all the classes
+ [ './binary_parser'
+ , './binary'
+ , './code'
+ , './db_ref'
+ , './double'
+ , './max_key'
+ , './min_key'
+ , './objectid'
+ , './symbol'
+ , './timestamp'
+ , './long'
+ , '././bson'].forEach(function (path) {
+ var module = require('./' + path);
+ for (var i in module) {
+ classes[i] = module[i];
+ }
+ });
+ // Return classes list
+ return classes;
+}
+
+},
+
+
+
+'long': function(module, exports, global, require, undefined){
+ // Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// Copyright 2009 Google Inc. All Rights Reserved
+
+/**
+ * Defines a Long class for representing a 64-bit two's-complement
+ * integer value, which faithfully simulates the behavior of a Java "Long". This
+ * implementation is derived from LongLib in GWT.
+ *
+ * Constructs a 64-bit two's-complement integer, given its low and high 32-bit
+ * values as *signed* integers. See the from* functions below for more
+ * convenient ways of constructing Longs.
+ *
+ * The internal representation of a Long is the two given signed, 32-bit values.
+ * We use 32-bit pieces because these are the size of integers on which
+ * Javascript performs bit-operations. For operations like addition and
+ * multiplication, we split each number into 16-bit pieces, which can easily be
+ * multiplied within Javascript's floating-point representation without overflow
+ * or change in sign.
+ *
+ * In the algorithms below, we frequently reduce the negative case to the
+ * positive case by negating the input(s) and then post-processing the result.
+ * Note that we must ALWAYS check specially whether those values are MIN_VALUE
+ * (-2^63) because -MIN_VALUE == MIN_VALUE (since 2^63 cannot be represented as
+ * a positive number, it overflows back into a negative). Not handling this
+ * case would often result in infinite recursion.
+ *
+ * @class Represents the BSON Long type.
+ * @param {Number} low the low (signed) 32 bits of the Long.
+ * @param {Number} high the high (signed) 32 bits of the Long.
+ */
+function Long(low, high) {
+ if(!(this instanceof Long)) return new Long(low, high);
+
+ this._bsontype = 'Long';
+ /**
+ * @type {number}
+ * @api private
+ */
+ this.low_ = low | 0; // force into 32 signed bits.
+
+ /**
+ * @type {number}
+ * @api private
+ */
+ this.high_ = high | 0; // force into 32 signed bits.
+};
+
+/**
+ * Return the int value.
+ *
+ * @return {Number} the value, assuming it is a 32-bit integer.
+ * @api public
+ */
+Long.prototype.toInt = function() {
+ return this.low_;
+};
+
+/**
+ * Return the Number value.
+ *
+ * @return {Number} the closest floating-point representation to this value.
+ * @api public
+ */
+Long.prototype.toNumber = function() {
+ return this.high_ * Long.TWO_PWR_32_DBL_ +
+ this.getLowBitsUnsigned();
+};
+
+/**
+ * Return the JSON value.
+ *
+ * @return {String} the JSON representation.
+ * @api public
+ */
+Long.prototype.toJSON = function() {
+ return this.toString();
+}
+
+/**
+ * Return the String value.
+ *
+ * @param {Number} [opt_radix] the radix in which the text should be written.
+ * @return {String} the textual representation of this value.
+ * @api public
+ */
+Long.prototype.toString = function(opt_radix) {
+ var radix = opt_radix || 10;
+ if (radix < 2 || 36 < radix) {
+ throw Error('radix out of range: ' + radix);
+ }
+
+ if (this.isZero()) {
+ return '0';
+ }
+
+ if (this.isNegative()) {
+ if (this.equals(Long.MIN_VALUE)) {
+ // We need to change the Long value before it can be negated, so we remove
+ // the bottom-most digit in this base and then recurse to do the rest.
+ var radixLong = Long.fromNumber(radix);
+ var div = this.div(radixLong);
+ var rem = div.multiply(radixLong).subtract(this);
+ return div.toString(radix) + rem.toInt().toString(radix);
+ } else {
+ return '-' + this.negate().toString(radix);
+ }
+ }
+
+ // Do several (6) digits each time through the loop, so as to
+ // minimize the calls to the very expensive emulated div.
+ var radixToPower = Long.fromNumber(Math.pow(radix, 6));
+
+ var rem = this;
+ var result = '';
+ while (true) {
+ var remDiv = rem.div(radixToPower);
+ var intval = rem.subtract(remDiv.multiply(radixToPower)).toInt();
+ var digits = intval.toString(radix);
+
+ rem = remDiv;
+ if (rem.isZero()) {
+ return digits + result;
+ } else {
+ while (digits.length < 6) {
+ digits = '0' + digits;
+ }
+ result = '' + digits + result;
+ }
+ }
+};
+
+/**
+ * Return the high 32-bits value.
+ *
+ * @return {Number} the high 32-bits as a signed value.
+ * @api public
+ */
+Long.prototype.getHighBits = function() {
+ return this.high_;
+};
+
+/**
+ * Return the low 32-bits value.
+ *
+ * @return {Number} the low 32-bits as a signed value.
+ * @api public
+ */
+Long.prototype.getLowBits = function() {
+ return this.low_;
+};
+
+/**
+ * Return the low unsigned 32-bits value.
+ *
+ * @return {Number} the low 32-bits as an unsigned value.
+ * @api public
+ */
+Long.prototype.getLowBitsUnsigned = function() {
+ return (this.low_ >= 0) ?
+ this.low_ : Long.TWO_PWR_32_DBL_ + this.low_;
+};
+
+/**
+ * Returns the number of bits needed to represent the absolute value of this Long.
+ *
+ * @return {Number} Returns the number of bits needed to represent the absolute value of this Long.
+ * @api public
+ */
+Long.prototype.getNumBitsAbs = function() {
+ if (this.isNegative()) {
+ if (this.equals(Long.MIN_VALUE)) {
+ return 64;
+ } else {
+ return this.negate().getNumBitsAbs();
+ }
+ } else {
+ var val = this.high_ != 0 ? this.high_ : this.low_;
+ for (var bit = 31; bit > 0; bit--) {
+ if ((val & (1 << bit)) != 0) {
+ break;
+ }
+ }
+ return this.high_ != 0 ? bit + 33 : bit + 1;
+ }
+};
+
+/**
+ * Return whether this value is zero.
+ *
+ * @return {Boolean} whether this value is zero.
+ * @api public
+ */
+Long.prototype.isZero = function() {
+ return this.high_ == 0 && this.low_ == 0;
+};
+
+/**
+ * Return whether this value is negative.
+ *
+ * @return {Boolean} whether this value is negative.
+ * @api public
+ */
+Long.prototype.isNegative = function() {
+ return this.high_ < 0;
+};
+
+/**
+ * Return whether this value is odd.
+ *
+ * @return {Boolean} whether this value is odd.
+ * @api public
+ */
+Long.prototype.isOdd = function() {
+ return (this.low_ & 1) == 1;
+};
+
+/**
+ * Return whether this Long equals the other
+ *
+ * @param {Long} other Long to compare against.
+ * @return {Boolean} whether this Long equals the other
+ * @api public
+ */
+Long.prototype.equals = function(other) {
+ return (this.high_ == other.high_) && (this.low_ == other.low_);
+};
+
+/**
+ * Return whether this Long does not equal the other.
+ *
+ * @param {Long} other Long to compare against.
+ * @return {Boolean} whether this Long does not equal the other.
+ * @api public
+ */
+Long.prototype.notEquals = function(other) {
+ return (this.high_ != other.high_) || (this.low_ != other.low_);
+};
+
+/**
+ * Return whether this Long is less than the other.
+ *
+ * @param {Long} other Long to compare against.
+ * @return {Boolean} whether this Long is less than the other.
+ * @api public
+ */
+Long.prototype.lessThan = function(other) {
+ return this.compare(other) < 0;
+};
+
+/**
+ * Return whether this Long is less than or equal to the other.
+ *
+ * @param {Long} other Long to compare against.
+ * @return {Boolean} whether this Long is less than or equal to the other.
+ * @api public
+ */
+Long.prototype.lessThanOrEqual = function(other) {
+ return this.compare(other) <= 0;
+};
+
+/**
+ * Return whether this Long is greater than the other.
+ *
+ * @param {Long} other Long to compare against.
+ * @return {Boolean} whether this Long is greater than the other.
+ * @api public
+ */
+Long.prototype.greaterThan = function(other) {
+ return this.compare(other) > 0;
+};
+
+/**
+ * Return whether this Long is greater than or equal to the other.
+ *
+ * @param {Long} other Long to compare against.
+ * @return {Boolean} whether this Long is greater than or equal to the other.
+ * @api public
+ */
+Long.prototype.greaterThanOrEqual = function(other) {
+ return this.compare(other) >= 0;
+};
+
+/**
+ * Compares this Long with the given one.
+ *
+ * @param {Long} other Long to compare against.
+ * @return {Boolean} 0 if they are the same, 1 if the this is greater, and -1 if the given one is greater.
+ * @api public
+ */
+Long.prototype.compare = function(other) {
+ if (this.equals(other)) {
+ return 0;
+ }
+
+ var thisNeg = this.isNegative();
+ var otherNeg = other.isNegative();
+ if (thisNeg && !otherNeg) {
+ return -1;
+ }
+ if (!thisNeg && otherNeg) {
+ return 1;
+ }
+
+ // at this point, the signs are the same, so subtraction will not overflow
+ if (this.subtract(other).isNegative()) {
+ return -1;
+ } else {
+ return 1;
+ }
+};
+
+/**
+ * The negation of this value.
+ *
+ * @return {Long} the negation of this value.
+ * @api public
+ */
+Long.prototype.negate = function() {
+ if (this.equals(Long.MIN_VALUE)) {
+ return Long.MIN_VALUE;
+ } else {
+ return this.not().add(Long.ONE);
+ }
+};
+
+/**
+ * Returns the sum of this and the given Long.
+ *
+ * @param {Long} other Long to add to this one.
+ * @return {Long} the sum of this and the given Long.
+ * @api public
+ */
+Long.prototype.add = function(other) {
+ // Divide each number into 4 chunks of 16 bits, and then sum the chunks.
+
+ var a48 = this.high_ >>> 16;
+ var a32 = this.high_ & 0xFFFF;
+ var a16 = this.low_ >>> 16;
+ var a00 = this.low_ & 0xFFFF;
+
+ var b48 = other.high_ >>> 16;
+ var b32 = other.high_ & 0xFFFF;
+ var b16 = other.low_ >>> 16;
+ var b00 = other.low_ & 0xFFFF;
+
+ var c48 = 0, c32 = 0, c16 = 0, c00 = 0;
+ c00 += a00 + b00;
+ c16 += c00 >>> 16;
+ c00 &= 0xFFFF;
+ c16 += a16 + b16;
+ c32 += c16 >>> 16;
+ c16 &= 0xFFFF;
+ c32 += a32 + b32;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c48 += a48 + b48;
+ c48 &= 0xFFFF;
+ return Long.fromBits((c16 << 16) | c00, (c48 << 16) | c32);
+};
+
+/**
+ * Returns the difference of this and the given Long.
+ *
+ * @param {Long} other Long to subtract from this.
+ * @return {Long} the difference of this and the given Long.
+ * @api public
+ */
+Long.prototype.subtract = function(other) {
+ return this.add(other.negate());
+};
+
+/**
+ * Returns the product of this and the given Long.
+ *
+ * @param {Long} other Long to multiply with this.
+ * @return {Long} the product of this and the other.
+ * @api public
+ */
+Long.prototype.multiply = function(other) {
+ if (this.isZero()) {
+ return Long.ZERO;
+ } else if (other.isZero()) {
+ return Long.ZERO;
+ }
+
+ if (this.equals(Long.MIN_VALUE)) {
+ return other.isOdd() ? Long.MIN_VALUE : Long.ZERO;
+ } else if (other.equals(Long.MIN_VALUE)) {
+ return this.isOdd() ? Long.MIN_VALUE : Long.ZERO;
+ }
+
+ if (this.isNegative()) {
+ if (other.isNegative()) {
+ return this.negate().multiply(other.negate());
+ } else {
+ return this.negate().multiply(other).negate();
+ }
+ } else if (other.isNegative()) {
+ return this.multiply(other.negate()).negate();
+ }
+
+ // If both Longs are small, use float multiplication
+ if (this.lessThan(Long.TWO_PWR_24_) &&
+ other.lessThan(Long.TWO_PWR_24_)) {
+ return Long.fromNumber(this.toNumber() * other.toNumber());
+ }
+
+ // Divide each Long into 4 chunks of 16 bits, and then add up 4x4 products.
+ // We can skip products that would overflow.
+
+ var a48 = this.high_ >>> 16;
+ var a32 = this.high_ & 0xFFFF;
+ var a16 = this.low_ >>> 16;
+ var a00 = this.low_ & 0xFFFF;
+
+ var b48 = other.high_ >>> 16;
+ var b32 = other.high_ & 0xFFFF;
+ var b16 = other.low_ >>> 16;
+ var b00 = other.low_ & 0xFFFF;
+
+ var c48 = 0, c32 = 0, c16 = 0, c00 = 0;
+ c00 += a00 * b00;
+ c16 += c00 >>> 16;
+ c00 &= 0xFFFF;
+ c16 += a16 * b00;
+ c32 += c16 >>> 16;
+ c16 &= 0xFFFF;
+ c16 += a00 * b16;
+ c32 += c16 >>> 16;
+ c16 &= 0xFFFF;
+ c32 += a32 * b00;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c32 += a16 * b16;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c32 += a00 * b32;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48;
+ c48 &= 0xFFFF;
+ return Long.fromBits((c16 << 16) | c00, (c48 << 16) | c32);
+};
+
+/**
+ * Returns this Long divided by the given one.
+ *
+ * @param {Long} other Long by which to divide.
+ * @return {Long} this Long divided by the given one.
+ * @api public
+ */
+Long.prototype.div = function(other) {
+ if (other.isZero()) {
+ throw Error('division by zero');
+ } else if (this.isZero()) {
+ return Long.ZERO;
+ }
+
+ if (this.equals(Long.MIN_VALUE)) {
+ if (other.equals(Long.ONE) ||
+ other.equals(Long.NEG_ONE)) {
+ return Long.MIN_VALUE; // recall that -MIN_VALUE == MIN_VALUE
+ } else if (other.equals(Long.MIN_VALUE)) {
+ return Long.ONE;
+ } else {
+ // At this point, we have |other| >= 2, so |this/other| < |MIN_VALUE|.
+ var halfThis = this.shiftRight(1);
+ var approx = halfThis.div(other).shiftLeft(1);
+ if (approx.equals(Long.ZERO)) {
+ return other.isNegative() ? Long.ONE : Long.NEG_ONE;
+ } else {
+ var rem = this.subtract(other.multiply(approx));
+ var result = approx.add(rem.div(other));
+ return result;
+ }
+ }
+ } else if (other.equals(Long.MIN_VALUE)) {
+ return Long.ZERO;
+ }
+
+ if (this.isNegative()) {
+ if (other.isNegative()) {
+ return this.negate().div(other.negate());
+ } else {
+ return this.negate().div(other).negate();
+ }
+ } else if (other.isNegative()) {
+ return this.div(other.negate()).negate();
+ }
+
+ // Repeat the following until the remainder is less than other: find a
+ // floating-point that approximates remainder / other *from below*, add this
+ // into the result, and subtract it from the remainder. It is critical that
+ // the approximate value is less than or equal to the real value so that the
+ // remainder never becomes negative.
+ var res = Long.ZERO;
+ var rem = this;
+ while (rem.greaterThanOrEqual(other)) {
+ // Approximate the result of division. This may be a little greater or
+ // smaller than the actual value.
+ var approx = Math.max(1, Math.floor(rem.toNumber() / other.toNumber()));
+
+ // We will tweak the approximate result by changing it in the 48-th digit or
+ // the smallest non-fractional digit, whichever is larger.
+ var log2 = Math.ceil(Math.log(approx) / Math.LN2);
+ var delta = (log2 <= 48) ? 1 : Math.pow(2, log2 - 48);
+
+ // Decrease the approximation until it is smaller than the remainder. Note
+ // that if it is too large, the product overflows and is negative.
+ var approxRes = Long.fromNumber(approx);
+ var approxRem = approxRes.multiply(other);
+ while (approxRem.isNegative() || approxRem.greaterThan(rem)) {
+ approx -= delta;
+ approxRes = Long.fromNumber(approx);
+ approxRem = approxRes.multiply(other);
+ }
+
+ // We know the answer can't be zero... and actually, zero would cause
+ // infinite recursion since we would make no progress.
+ if (approxRes.isZero()) {
+ approxRes = Long.ONE;
+ }
+
+ res = res.add(approxRes);
+ rem = rem.subtract(approxRem);
+ }
+ return res;
+};
+
+/**
+ * Returns this Long modulo the given one.
+ *
+ * @param {Long} other Long by which to mod.
+ * @return {Long} this Long modulo the given one.
+ * @api public
+ */
+Long.prototype.modulo = function(other) {
+ return this.subtract(this.div(other).multiply(other));
+};
+
+/**
+ * The bitwise-NOT of this value.
+ *
+ * @return {Long} the bitwise-NOT of this value.
+ * @api public
+ */
+Long.prototype.not = function() {
+ return Long.fromBits(~this.low_, ~this.high_);
+};
+
+/**
+ * Returns the bitwise-AND of this Long and the given one.
+ *
+ * @param {Long} other the Long with which to AND.
+ * @return {Long} the bitwise-AND of this and the other.
+ * @api public
+ */
+Long.prototype.and = function(other) {
+ return Long.fromBits(this.low_ & other.low_, this.high_ & other.high_);
+};
+
+/**
+ * Returns the bitwise-OR of this Long and the given one.
+ *
+ * @param {Long} other the Long with which to OR.
+ * @return {Long} the bitwise-OR of this and the other.
+ * @api public
+ */
+Long.prototype.or = function(other) {
+ return Long.fromBits(this.low_ | other.low_, this.high_ | other.high_);
+};
+
+/**
+ * Returns the bitwise-XOR of this Long and the given one.
+ *
+ * @param {Long} other the Long with which to XOR.
+ * @return {Long} the bitwise-XOR of this and the other.
+ * @api public
+ */
+Long.prototype.xor = function(other) {
+ return Long.fromBits(this.low_ ^ other.low_, this.high_ ^ other.high_);
+};
+
+/**
+ * Returns this Long with bits shifted to the left by the given amount.
+ *
+ * @param {Number} numBits the number of bits by which to shift.
+ * @return {Long} this shifted to the left by the given amount.
+ * @api public
+ */
+Long.prototype.shiftLeft = function(numBits) {
+ numBits &= 63;
+ if (numBits == 0) {
+ return this;
+ } else {
+ var low = this.low_;
+ if (numBits < 32) {
+ var high = this.high_;
+ return Long.fromBits(
+ low << numBits,
+ (high << numBits) | (low >>> (32 - numBits)));
+ } else {
+ return Long.fromBits(0, low << (numBits - 32));
+ }
+ }
+};
+
+/**
+ * Returns this Long with bits shifted to the right by the given amount.
+ *
+ * @param {Number} numBits the number of bits by which to shift.
+ * @return {Long} this shifted to the right by the given amount.
+ * @api public
+ */
+Long.prototype.shiftRight = function(numBits) {
+ numBits &= 63;
+ if (numBits == 0) {
+ return this;
+ } else {
+ var high = this.high_;
+ if (numBits < 32) {
+ var low = this.low_;
+ return Long.fromBits(
+ (low >>> numBits) | (high << (32 - numBits)),
+ high >> numBits);
+ } else {
+ return Long.fromBits(
+ high >> (numBits - 32),
+ high >= 0 ? 0 : -1);
+ }
+ }
+};
+
+/**
+ * Returns this Long with bits shifted to the right by the given amount, with the new top bits matching the current sign bit.
+ *
+ * @param {Number} numBits the number of bits by which to shift.
+ * @return {Long} this shifted to the right by the given amount, with zeros placed into the new leading bits.
+ * @api public
+ */
+Long.prototype.shiftRightUnsigned = function(numBits) {
+ numBits &= 63;
+ if (numBits == 0) {
+ return this;
+ } else {
+ var high = this.high_;
+ if (numBits < 32) {
+ var low = this.low_;
+ return Long.fromBits(
+ (low >>> numBits) | (high << (32 - numBits)),
+ high >>> numBits);
+ } else if (numBits == 32) {
+ return Long.fromBits(high, 0);
+ } else {
+ return Long.fromBits(high >>> (numBits - 32), 0);
+ }
+ }
+};
+
+/**
+ * Returns a Long representing the given (32-bit) integer value.
+ *
+ * @param {Number} value the 32-bit integer in question.
+ * @return {Long} the corresponding Long value.
+ * @api public
+ */
+Long.fromInt = function(value) {
+ if (-128 <= value && value < 128) {
+ var cachedObj = Long.INT_CACHE_[value];
+ if (cachedObj) {
+ return cachedObj;
+ }
+ }
+
+ var obj = new Long(value | 0, value < 0 ? -1 : 0);
+ if (-128 <= value && value < 128) {
+ Long.INT_CACHE_[value] = obj;
+ }
+ return obj;
+};
+
+/**
+ * Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned.
+ *
+ * @param {Number} value the number in question.
+ * @return {Long} the corresponding Long value.
+ * @api public
+ */
+Long.fromNumber = function(value) {
+ if (isNaN(value) || !isFinite(value)) {
+ return Long.ZERO;
+ } else if (value <= -Long.TWO_PWR_63_DBL_) {
+ return Long.MIN_VALUE;
+ } else if (value + 1 >= Long.TWO_PWR_63_DBL_) {
+ return Long.MAX_VALUE;
+ } else if (value < 0) {
+ return Long.fromNumber(-value).negate();
+ } else {
+ return new Long(
+ (value % Long.TWO_PWR_32_DBL_) | 0,
+ (value / Long.TWO_PWR_32_DBL_) | 0);
+ }
+};
+
+/**
+ * Returns a Long representing the 64-bit integer that comes by concatenating the given high and low bits. Each is assumed to use 32 bits.
+ *
+ * @param {Number} lowBits the low 32-bits.
+ * @param {Number} highBits the high 32-bits.
+ * @return {Long} the corresponding Long value.
+ * @api public
+ */
+Long.fromBits = function(lowBits, highBits) {
+ return new Long(lowBits, highBits);
+};
+
+/**
+ * Returns a Long representation of the given string, written using the given radix.
+ *
+ * @param {String} str the textual representation of the Long.
+ * @param {Number} opt_radix the radix in which the text is written.
+ * @return {Long} the corresponding Long value.
+ * @api public
+ */
+Long.fromString = function(str, opt_radix) {
+ if (str.length == 0) {
+ throw Error('number format error: empty string');
+ }
+
+ var radix = opt_radix || 10;
+ if (radix < 2 || 36 < radix) {
+ throw Error('radix out of range: ' + radix);
+ }
+
+ if (str.charAt(0) == '-') {
+ return Long.fromString(str.substring(1), radix).negate();
+ } else if (str.indexOf('-') >= 0) {
+ throw Error('number format error: interior "-" character: ' + str);
+ }
+
+ // Do several (8) digits each time through the loop, so as to
+ // minimize the calls to the very expensive emulated div.
+ var radixToPower = Long.fromNumber(Math.pow(radix, 8));
+
+ var result = Long.ZERO;
+ for (var i = 0; i < str.length; i += 8) {
+ var size = Math.min(8, str.length - i);
+ var value = parseInt(str.substring(i, i + size), radix);
+ if (size < 8) {
+ var power = Long.fromNumber(Math.pow(radix, size));
+ result = result.multiply(power).add(Long.fromNumber(value));
+ } else {
+ result = result.multiply(radixToPower);
+ result = result.add(Long.fromNumber(value));
+ }
+ }
+ return result;
+};
+
+// NOTE: Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the
+// from* methods on which they depend.
+
+
+/**
+ * A cache of the Long representations of small integer values.
+ * @type {Object}
+ * @api private
+ */
+Long.INT_CACHE_ = {};
+
+// NOTE: the compiler should inline these constant values below and then remove
+// these variables, so there should be no runtime penalty for these.
+
+/**
+ * Number used repeated below in calculations. This must appear before the
+ * first call to any from* function below.
+ * @type {number}
+ * @api private
+ */
+Long.TWO_PWR_16_DBL_ = 1 << 16;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Long.TWO_PWR_24_DBL_ = 1 << 24;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Long.TWO_PWR_32_DBL_ = Long.TWO_PWR_16_DBL_ * Long.TWO_PWR_16_DBL_;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Long.TWO_PWR_31_DBL_ = Long.TWO_PWR_32_DBL_ / 2;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Long.TWO_PWR_48_DBL_ = Long.TWO_PWR_32_DBL_ * Long.TWO_PWR_16_DBL_;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Long.TWO_PWR_64_DBL_ = Long.TWO_PWR_32_DBL_ * Long.TWO_PWR_32_DBL_;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Long.TWO_PWR_63_DBL_ = Long.TWO_PWR_64_DBL_ / 2;
+
+/** @type {Long} */
+Long.ZERO = Long.fromInt(0);
+
+/** @type {Long} */
+Long.ONE = Long.fromInt(1);
+
+/** @type {Long} */
+Long.NEG_ONE = Long.fromInt(-1);
+
+/** @type {Long} */
+Long.MAX_VALUE =
+ Long.fromBits(0xFFFFFFFF | 0, 0x7FFFFFFF | 0);
+
+/** @type {Long} */
+Long.MIN_VALUE = Long.fromBits(0, 0x80000000 | 0);
+
+/**
+ * @type {Long}
+ * @api private
+ */
+Long.TWO_PWR_24_ = Long.fromInt(1 << 24);
+
+/**
+ * Expose.
+ */
+exports.Long = Long;
+},
+
+
+
+'max_key': function(module, exports, global, require, undefined){
+ /**
+ * A class representation of the BSON MaxKey type.
+ *
+ * @class Represents the BSON MaxKey type.
+ * @return {MaxKey}
+ */
+function MaxKey() {
+ if(!(this instanceof MaxKey)) return new MaxKey();
+
+ this._bsontype = 'MaxKey';
+}
+
+exports.MaxKey = MaxKey;
+},
+
+
+
+'min_key': function(module, exports, global, require, undefined){
+ /**
+ * A class representation of the BSON MinKey type.
+ *
+ * @class Represents the BSON MinKey type.
+ * @return {MinKey}
+ */
+function MinKey() {
+ if(!(this instanceof MinKey)) return new MinKey();
+
+ this._bsontype = 'MinKey';
+}
+
+exports.MinKey = MinKey;
+},
+
+
+
+'objectid': function(module, exports, global, require, undefined){
+ /**
+ * Module dependencies.
+ */
+var BinaryParser = require('./binary_parser').BinaryParser;
+
+/**
+ * Machine id.
+ *
+ * Create a random 3-byte value (i.e. unique for this
+ * process). Other drivers use a md5 of the machine id here, but
+ * that would mean an asyc call to gethostname, so we don't bother.
+ */
+var MACHINE_ID = parseInt(Math.random() * 0xFFFFFF, 10);
+
+// Regular expression that checks for hex value
+var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
+
+/**
+* Create a new ObjectID instance
+*
+* @class Represents the BSON ObjectID type
+* @param {String|Number} id Can be a 24 byte hex string, 12 byte binary string or a Number.
+* @return {Object} instance of ObjectID.
+*/
+var ObjectID = function ObjectID(id, _hex) {
+ if(!(this instanceof ObjectID)) return new ObjectID(id, _hex);
+
+ this._bsontype = 'ObjectID';
+ var __id = null;
+
+ // Throw an error if it's not a valid setup
+ if(id != null && 'number' != typeof id && (id.length != 12 && id.length != 24))
+ throw new Error("Argument passed in must be a single String of 12 bytes or a string of 24 hex characters");
+
+ // Generate id based on the input
+ if(id == null || typeof id == 'number') {
+ // convert to 12 byte binary string
+ this.id = this.generate(id);
+ } else if(id != null && id.length === 12) {
+ // assume 12 byte string
+ this.id = id;
+ } else if(checkForHexRegExp.test(id)) {
+ return ObjectID.createFromHexString(id);
+ } else {
+ throw new Error("Value passed in is not a valid 24 character hex string");
+ }
+
+ if(ObjectID.cacheHexString) this.__id = this.toHexString();
+};
+
+// Allow usage of ObjectId aswell as ObjectID
+var ObjectId = ObjectID;
+
+/**
+* Return the ObjectID id as a 24 byte hex string representation
+*
+* @return {String} return the 24 byte hex string representation.
+* @api public
+*/
+ObjectID.prototype.toHexString = function() {
+ if(ObjectID.cacheHexString && this.__id) return this.__id;
+
+ var hexString = ''
+ , number
+ , value;
+
+ for (var index = 0, len = this.id.length; index < len; index++) {
+ value = BinaryParser.toByte(this.id[index]);
+ number = value <= 15
+ ? '0' + value.toString(16)
+ : value.toString(16);
+ hexString = hexString + number;
+ }
+
+ if(ObjectID.cacheHexString) this.__id = hexString;
+ return hexString;
+};
+
+/**
+* Update the ObjectID index used in generating new ObjectID's on the driver
+*
+* @return {Number} returns next index value.
+* @api private
+*/
+ObjectID.prototype.get_inc = function() {
+ return ObjectID.index = (ObjectID.index + 1) % 0xFFFFFF;
+};
+
+/**
+* Update the ObjectID index used in generating new ObjectID's on the driver
+*
+* @return {Number} returns next index value.
+* @api private
+*/
+ObjectID.prototype.getInc = function() {
+ return this.get_inc();
+};
+
+/**
+* Generate a 12 byte id string used in ObjectID's
+*
+* @param {Number} [time] optional parameter allowing to pass in a second based timestamp.
+* @return {String} return the 12 byte id binary string.
+* @api private
+*/
+ObjectID.prototype.generate = function(time) {
+ if ('number' == typeof time) {
+ var time4Bytes = BinaryParser.encodeInt(time, 32, true, true);
+ /* for time-based ObjectID the bytes following the time will be zeroed */
+ var machine3Bytes = BinaryParser.encodeInt(MACHINE_ID, 24, false);
+ var pid2Bytes = BinaryParser.fromShort(typeof process === 'undefined' ? Math.floor(Math.random() * 100000) : process.pid);
+ var index3Bytes = BinaryParser.encodeInt(this.get_inc(), 24, false, true);
+ } else {
+ var unixTime = parseInt(Date.now()/1000,10);
+ var time4Bytes = BinaryParser.encodeInt(unixTime, 32, true, true);
+ var machine3Bytes = BinaryParser.encodeInt(MACHINE_ID, 24, false);
+ var pid2Bytes = BinaryParser.fromShort(typeof process === 'undefined' ? Math.floor(Math.random() * 100000) : process.pid);
+ var index3Bytes = BinaryParser.encodeInt(this.get_inc(), 24, false, true);
+ }
+
+ return time4Bytes + machine3Bytes + pid2Bytes + index3Bytes;
+};
+
+/**
+* Converts the id into a 24 byte hex string for printing
+*
+* @return {String} return the 24 byte hex string representation.
+* @api private
+*/
+ObjectID.prototype.toString = function() {
+ return this.toHexString();
+};
+
+/**
+* Converts to a string representation of this Id.
+*
+* @return {String} return the 24 byte hex string representation.
+* @api private
+*/
+ObjectID.prototype.inspect = ObjectID.prototype.toString;
+
+/**
+* Converts to its JSON representation.
+*
+* @return {String} return the 24 byte hex string representation.
+* @api private
+*/
+ObjectID.prototype.toJSON = function() {
+ return this.toHexString();
+};
+
+/**
+* Compares the equality of this ObjectID with `otherID`.
+*
+* @param {Object} otherID ObjectID instance to compare against.
+* @return {Bool} the result of comparing two ObjectID's
+* @api public
+*/
+ObjectID.prototype.equals = function equals (otherID) {
+ var id = (otherID instanceof ObjectID || otherID.toHexString)
+ ? otherID.id
+ : ObjectID.createFromHexString(otherID).id;
+
+ return this.id === id;
+}
+
+/**
+* Returns the generation date (accurate up to the second) that this ID was generated.
+*
+* @return {Date} the generation date
+* @api public
+*/
+ObjectID.prototype.getTimestamp = function() {
+ var timestamp = new Date();
+ timestamp.setTime(Math.floor(BinaryParser.decodeInt(this.id.substring(0,4), 32, true, true)) * 1000);
+ return timestamp;
+}
+
+/**
+* @ignore
+* @api private
+*/
+ObjectID.index = parseInt(Math.random() * 0xFFFFFF, 10);
+
+ObjectID.createPk = function createPk () {
+ return new ObjectID();
+};
+
+/**
+* Creates an ObjectID from a second based number, with the rest of the ObjectID zeroed out. Used for comparisons or sorting the ObjectID.
+*
+* @param {Number} time an integer number representing a number of seconds.
+* @return {ObjectID} return the created ObjectID
+* @api public
+*/
+ObjectID.createFromTime = function createFromTime (time) {
+ var id = BinaryParser.encodeInt(time, 32, true, true) +
+ BinaryParser.encodeInt(0, 64, true, true);
+ return new ObjectID(id);
+};
+
+/**
+* Creates an ObjectID from a hex string representation of an ObjectID.
+*
+* @param {String} hexString create a ObjectID from a passed in 24 byte hexstring.
+* @return {ObjectID} return the created ObjectID
+* @api public
+*/
+ObjectID.createFromHexString = function createFromHexString (hexString) {
+ // Throw an error if it's not a valid setup
+ if(typeof hexString === 'undefined' || hexString != null && hexString.length != 24)
+ throw new Error("Argument passed in must be a single String of 12 bytes or a string of 24 hex characters");
+
+ var len = hexString.length;
+
+ if(len > 12*2) {
+ throw new Error('Id cannot be longer than 12 bytes');
+ }
+
+ var result = ''
+ , string
+ , number;
+
+ for (var index = 0; index < len; index += 2) {
+ string = hexString.substr(index, 2);
+ number = parseInt(string, 16);
+ result += BinaryParser.fromByte(number);
+ }
+
+ return new ObjectID(result, hexString);
+};
+
+/**
+* @ignore
+*/
+Object.defineProperty(ObjectID.prototype, "generationTime", {
+ enumerable: true
+ , get: function () {
+ return Math.floor(BinaryParser.decodeInt(this.id.substring(0,4), 32, true, true));
+ }
+ , set: function (value) {
+ var value = BinaryParser.encodeInt(value, 32, true, true);
+ this.id = value + this.id.substr(4);
+ // delete this.__id;
+ this.toHexString();
+ }
+});
+
+/**
+ * Expose.
+ */
+exports.ObjectID = ObjectID;
+exports.ObjectId = ObjectID;
+
+},
+
+
+
+'symbol': function(module, exports, global, require, undefined){
+ /**
+ * A class representation of the BSON Symbol type.
+ *
+ * @class Represents the BSON Symbol type.
+ * @param {String} value the string representing the symbol.
+ * @return {Symbol}
+ */
+function Symbol(value) {
+ if(!(this instanceof Symbol)) return new Symbol(value);
+ this._bsontype = 'Symbol';
+ this.value = value;
+}
+
+/**
+ * Access the wrapped string value.
+ *
+ * @return {String} returns the wrapped string.
+ * @api public
+ */
+Symbol.prototype.valueOf = function() {
+ return this.value;
+};
+
+/**
+ * @ignore
+ * @api private
+ */
+Symbol.prototype.toString = function() {
+ return this.value;
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+Symbol.prototype.inspect = function() {
+ return this.value;
+}
+
+/**
+ * @ignore
+ * @api private
+ */
+Symbol.prototype.toJSON = function() {
+ return this.value;
+}
+
+exports.Symbol = Symbol;
+},
+
+
+
+'timestamp': function(module, exports, global, require, undefined){
+ // Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// Copyright 2009 Google Inc. All Rights Reserved
+
+/**
+ * Defines a Timestamp class for representing a 64-bit two's-complement
+ * integer value, which faithfully simulates the behavior of a Java "Timestamp". This
+ * implementation is derived from TimestampLib in GWT.
+ *
+ * Constructs a 64-bit two's-complement integer, given its low and high 32-bit
+ * values as *signed* integers. See the from* functions below for more
+ * convenient ways of constructing Timestamps.
+ *
+ * The internal representation of a Timestamp is the two given signed, 32-bit values.
+ * We use 32-bit pieces because these are the size of integers on which
+ * Javascript performs bit-operations. For operations like addition and
+ * multiplication, we split each number into 16-bit pieces, which can easily be
+ * multiplied within Javascript's floating-point representation without overflow
+ * or change in sign.
+ *
+ * In the algorithms below, we frequently reduce the negative case to the
+ * positive case by negating the input(s) and then post-processing the result.
+ * Note that we must ALWAYS check specially whether those values are MIN_VALUE
+ * (-2^63) because -MIN_VALUE == MIN_VALUE (since 2^63 cannot be represented as
+ * a positive number, it overflows back into a negative). Not handling this
+ * case would often result in infinite recursion.
+ *
+ * @class Represents the BSON Timestamp type.
+ * @param {Number} low the low (signed) 32 bits of the Timestamp.
+ * @param {Number} high the high (signed) 32 bits of the Timestamp.
+ */
+function Timestamp(low, high) {
+ if(!(this instanceof Timestamp)) return new Timestamp(low, high);
+ this._bsontype = 'Timestamp';
+ /**
+ * @type {number}
+ * @api private
+ */
+ this.low_ = low | 0; // force into 32 signed bits.
+
+ /**
+ * @type {number}
+ * @api private
+ */
+ this.high_ = high | 0; // force into 32 signed bits.
+};
+
+/**
+ * Return the int value.
+ *
+ * @return {Number} the value, assuming it is a 32-bit integer.
+ * @api public
+ */
+Timestamp.prototype.toInt = function() {
+ return this.low_;
+};
+
+/**
+ * Return the Number value.
+ *
+ * @return {Number} the closest floating-point representation to this value.
+ * @api public
+ */
+Timestamp.prototype.toNumber = function() {
+ return this.high_ * Timestamp.TWO_PWR_32_DBL_ +
+ this.getLowBitsUnsigned();
+};
+
+/**
+ * Return the JSON value.
+ *
+ * @return {String} the JSON representation.
+ * @api public
+ */
+Timestamp.prototype.toJSON = function() {
+ return this.toString();
+}
+
+/**
+ * Return the String value.
+ *
+ * @param {Number} [opt_radix] the radix in which the text should be written.
+ * @return {String} the textual representation of this value.
+ * @api public
+ */
+Timestamp.prototype.toString = function(opt_radix) {
+ var radix = opt_radix || 10;
+ if (radix < 2 || 36 < radix) {
+ throw Error('radix out of range: ' + radix);
+ }
+
+ if (this.isZero()) {
+ return '0';
+ }
+
+ if (this.isNegative()) {
+ if (this.equals(Timestamp.MIN_VALUE)) {
+ // We need to change the Timestamp value before it can be negated, so we remove
+ // the bottom-most digit in this base and then recurse to do the rest.
+ var radixTimestamp = Timestamp.fromNumber(radix);
+ var div = this.div(radixTimestamp);
+ var rem = div.multiply(radixTimestamp).subtract(this);
+ return div.toString(radix) + rem.toInt().toString(radix);
+ } else {
+ return '-' + this.negate().toString(radix);
+ }
+ }
+
+ // Do several (6) digits each time through the loop, so as to
+ // minimize the calls to the very expensive emulated div.
+ var radixToPower = Timestamp.fromNumber(Math.pow(radix, 6));
+
+ var rem = this;
+ var result = '';
+ while (true) {
+ var remDiv = rem.div(radixToPower);
+ var intval = rem.subtract(remDiv.multiply(radixToPower)).toInt();
+ var digits = intval.toString(radix);
+
+ rem = remDiv;
+ if (rem.isZero()) {
+ return digits + result;
+ } else {
+ while (digits.length < 6) {
+ digits = '0' + digits;
+ }
+ result = '' + digits + result;
+ }
+ }
+};
+
+/**
+ * Return the high 32-bits value.
+ *
+ * @return {Number} the high 32-bits as a signed value.
+ * @api public
+ */
+Timestamp.prototype.getHighBits = function() {
+ return this.high_;
+};
+
+/**
+ * Return the low 32-bits value.
+ *
+ * @return {Number} the low 32-bits as a signed value.
+ * @api public
+ */
+Timestamp.prototype.getLowBits = function() {
+ return this.low_;
+};
+
+/**
+ * Return the low unsigned 32-bits value.
+ *
+ * @return {Number} the low 32-bits as an unsigned value.
+ * @api public
+ */
+Timestamp.prototype.getLowBitsUnsigned = function() {
+ return (this.low_ >= 0) ?
+ this.low_ : Timestamp.TWO_PWR_32_DBL_ + this.low_;
+};
+
+/**
+ * Returns the number of bits needed to represent the absolute value of this Timestamp.
+ *
+ * @return {Number} Returns the number of bits needed to represent the absolute value of this Timestamp.
+ * @api public
+ */
+Timestamp.prototype.getNumBitsAbs = function() {
+ if (this.isNegative()) {
+ if (this.equals(Timestamp.MIN_VALUE)) {
+ return 64;
+ } else {
+ return this.negate().getNumBitsAbs();
+ }
+ } else {
+ var val = this.high_ != 0 ? this.high_ : this.low_;
+ for (var bit = 31; bit > 0; bit--) {
+ if ((val & (1 << bit)) != 0) {
+ break;
+ }
+ }
+ return this.high_ != 0 ? bit + 33 : bit + 1;
+ }
+};
+
+/**
+ * Return whether this value is zero.
+ *
+ * @return {Boolean} whether this value is zero.
+ * @api public
+ */
+Timestamp.prototype.isZero = function() {
+ return this.high_ == 0 && this.low_ == 0;
+};
+
+/**
+ * Return whether this value is negative.
+ *
+ * @return {Boolean} whether this value is negative.
+ * @api public
+ */
+Timestamp.prototype.isNegative = function() {
+ return this.high_ < 0;
+};
+
+/**
+ * Return whether this value is odd.
+ *
+ * @return {Boolean} whether this value is odd.
+ * @api public
+ */
+Timestamp.prototype.isOdd = function() {
+ return (this.low_ & 1) == 1;
+};
+
+/**
+ * Return whether this Timestamp equals the other
+ *
+ * @param {Timestamp} other Timestamp to compare against.
+ * @return {Boolean} whether this Timestamp equals the other
+ * @api public
+ */
+Timestamp.prototype.equals = function(other) {
+ return (this.high_ == other.high_) && (this.low_ == other.low_);
+};
+
+/**
+ * Return whether this Timestamp does not equal the other.
+ *
+ * @param {Timestamp} other Timestamp to compare against.
+ * @return {Boolean} whether this Timestamp does not equal the other.
+ * @api public
+ */
+Timestamp.prototype.notEquals = function(other) {
+ return (this.high_ != other.high_) || (this.low_ != other.low_);
+};
+
+/**
+ * Return whether this Timestamp is less than the other.
+ *
+ * @param {Timestamp} other Timestamp to compare against.
+ * @return {Boolean} whether this Timestamp is less than the other.
+ * @api public
+ */
+Timestamp.prototype.lessThan = function(other) {
+ return this.compare(other) < 0;
+};
+
+/**
+ * Return whether this Timestamp is less than or equal to the other.
+ *
+ * @param {Timestamp} other Timestamp to compare against.
+ * @return {Boolean} whether this Timestamp is less than or equal to the other.
+ * @api public
+ */
+Timestamp.prototype.lessThanOrEqual = function(other) {
+ return this.compare(other) <= 0;
+};
+
+/**
+ * Return whether this Timestamp is greater than the other.
+ *
+ * @param {Timestamp} other Timestamp to compare against.
+ * @return {Boolean} whether this Timestamp is greater than the other.
+ * @api public
+ */
+Timestamp.prototype.greaterThan = function(other) {
+ return this.compare(other) > 0;
+};
+
+/**
+ * Return whether this Timestamp is greater than or equal to the other.
+ *
+ * @param {Timestamp} other Timestamp to compare against.
+ * @return {Boolean} whether this Timestamp is greater than or equal to the other.
+ * @api public
+ */
+Timestamp.prototype.greaterThanOrEqual = function(other) {
+ return this.compare(other) >= 0;
+};
+
+/**
+ * Compares this Timestamp with the given one.
+ *
+ * @param {Timestamp} other Timestamp to compare against.
+ * @return {Boolean} 0 if they are the same, 1 if the this is greater, and -1 if the given one is greater.
+ * @api public
+ */
+Timestamp.prototype.compare = function(other) {
+ if (this.equals(other)) {
+ return 0;
+ }
+
+ var thisNeg = this.isNegative();
+ var otherNeg = other.isNegative();
+ if (thisNeg && !otherNeg) {
+ return -1;
+ }
+ if (!thisNeg && otherNeg) {
+ return 1;
+ }
+
+ // at this point, the signs are the same, so subtraction will not overflow
+ if (this.subtract(other).isNegative()) {
+ return -1;
+ } else {
+ return 1;
+ }
+};
+
+/**
+ * The negation of this value.
+ *
+ * @return {Timestamp} the negation of this value.
+ * @api public
+ */
+Timestamp.prototype.negate = function() {
+ if (this.equals(Timestamp.MIN_VALUE)) {
+ return Timestamp.MIN_VALUE;
+ } else {
+ return this.not().add(Timestamp.ONE);
+ }
+};
+
+/**
+ * Returns the sum of this and the given Timestamp.
+ *
+ * @param {Timestamp} other Timestamp to add to this one.
+ * @return {Timestamp} the sum of this and the given Timestamp.
+ * @api public
+ */
+Timestamp.prototype.add = function(other) {
+ // Divide each number into 4 chunks of 16 bits, and then sum the chunks.
+
+ var a48 = this.high_ >>> 16;
+ var a32 = this.high_ & 0xFFFF;
+ var a16 = this.low_ >>> 16;
+ var a00 = this.low_ & 0xFFFF;
+
+ var b48 = other.high_ >>> 16;
+ var b32 = other.high_ & 0xFFFF;
+ var b16 = other.low_ >>> 16;
+ var b00 = other.low_ & 0xFFFF;
+
+ var c48 = 0, c32 = 0, c16 = 0, c00 = 0;
+ c00 += a00 + b00;
+ c16 += c00 >>> 16;
+ c00 &= 0xFFFF;
+ c16 += a16 + b16;
+ c32 += c16 >>> 16;
+ c16 &= 0xFFFF;
+ c32 += a32 + b32;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c48 += a48 + b48;
+ c48 &= 0xFFFF;
+ return Timestamp.fromBits((c16 << 16) | c00, (c48 << 16) | c32);
+};
+
+/**
+ * Returns the difference of this and the given Timestamp.
+ *
+ * @param {Timestamp} other Timestamp to subtract from this.
+ * @return {Timestamp} the difference of this and the given Timestamp.
+ * @api public
+ */
+Timestamp.prototype.subtract = function(other) {
+ return this.add(other.negate());
+};
+
+/**
+ * Returns the product of this and the given Timestamp.
+ *
+ * @param {Timestamp} other Timestamp to multiply with this.
+ * @return {Timestamp} the product of this and the other.
+ * @api public
+ */
+Timestamp.prototype.multiply = function(other) {
+ if (this.isZero()) {
+ return Timestamp.ZERO;
+ } else if (other.isZero()) {
+ return Timestamp.ZERO;
+ }
+
+ if (this.equals(Timestamp.MIN_VALUE)) {
+ return other.isOdd() ? Timestamp.MIN_VALUE : Timestamp.ZERO;
+ } else if (other.equals(Timestamp.MIN_VALUE)) {
+ return this.isOdd() ? Timestamp.MIN_VALUE : Timestamp.ZERO;
+ }
+
+ if (this.isNegative()) {
+ if (other.isNegative()) {
+ return this.negate().multiply(other.negate());
+ } else {
+ return this.negate().multiply(other).negate();
+ }
+ } else if (other.isNegative()) {
+ return this.multiply(other.negate()).negate();
+ }
+
+ // If both Timestamps are small, use float multiplication
+ if (this.lessThan(Timestamp.TWO_PWR_24_) &&
+ other.lessThan(Timestamp.TWO_PWR_24_)) {
+ return Timestamp.fromNumber(this.toNumber() * other.toNumber());
+ }
+
+ // Divide each Timestamp into 4 chunks of 16 bits, and then add up 4x4 products.
+ // We can skip products that would overflow.
+
+ var a48 = this.high_ >>> 16;
+ var a32 = this.high_ & 0xFFFF;
+ var a16 = this.low_ >>> 16;
+ var a00 = this.low_ & 0xFFFF;
+
+ var b48 = other.high_ >>> 16;
+ var b32 = other.high_ & 0xFFFF;
+ var b16 = other.low_ >>> 16;
+ var b00 = other.low_ & 0xFFFF;
+
+ var c48 = 0, c32 = 0, c16 = 0, c00 = 0;
+ c00 += a00 * b00;
+ c16 += c00 >>> 16;
+ c00 &= 0xFFFF;
+ c16 += a16 * b00;
+ c32 += c16 >>> 16;
+ c16 &= 0xFFFF;
+ c16 += a00 * b16;
+ c32 += c16 >>> 16;
+ c16 &= 0xFFFF;
+ c32 += a32 * b00;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c32 += a16 * b16;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c32 += a00 * b32;
+ c48 += c32 >>> 16;
+ c32 &= 0xFFFF;
+ c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48;
+ c48 &= 0xFFFF;
+ return Timestamp.fromBits((c16 << 16) | c00, (c48 << 16) | c32);
+};
+
+/**
+ * Returns this Timestamp divided by the given one.
+ *
+ * @param {Timestamp} other Timestamp by which to divide.
+ * @return {Timestamp} this Timestamp divided by the given one.
+ * @api public
+ */
+Timestamp.prototype.div = function(other) {
+ if (other.isZero()) {
+ throw Error('division by zero');
+ } else if (this.isZero()) {
+ return Timestamp.ZERO;
+ }
+
+ if (this.equals(Timestamp.MIN_VALUE)) {
+ if (other.equals(Timestamp.ONE) ||
+ other.equals(Timestamp.NEG_ONE)) {
+ return Timestamp.MIN_VALUE; // recall that -MIN_VALUE == MIN_VALUE
+ } else if (other.equals(Timestamp.MIN_VALUE)) {
+ return Timestamp.ONE;
+ } else {
+ // At this point, we have |other| >= 2, so |this/other| < |MIN_VALUE|.
+ var halfThis = this.shiftRight(1);
+ var approx = halfThis.div(other).shiftLeft(1);
+ if (approx.equals(Timestamp.ZERO)) {
+ return other.isNegative() ? Timestamp.ONE : Timestamp.NEG_ONE;
+ } else {
+ var rem = this.subtract(other.multiply(approx));
+ var result = approx.add(rem.div(other));
+ return result;
+ }
+ }
+ } else if (other.equals(Timestamp.MIN_VALUE)) {
+ return Timestamp.ZERO;
+ }
+
+ if (this.isNegative()) {
+ if (other.isNegative()) {
+ return this.negate().div(other.negate());
+ } else {
+ return this.negate().div(other).negate();
+ }
+ } else if (other.isNegative()) {
+ return this.div(other.negate()).negate();
+ }
+
+ // Repeat the following until the remainder is less than other: find a
+ // floating-point that approximates remainder / other *from below*, add this
+ // into the result, and subtract it from the remainder. It is critical that
+ // the approximate value is less than or equal to the real value so that the
+ // remainder never becomes negative.
+ var res = Timestamp.ZERO;
+ var rem = this;
+ while (rem.greaterThanOrEqual(other)) {
+ // Approximate the result of division. This may be a little greater or
+ // smaller than the actual value.
+ var approx = Math.max(1, Math.floor(rem.toNumber() / other.toNumber()));
+
+ // We will tweak the approximate result by changing it in the 48-th digit or
+ // the smallest non-fractional digit, whichever is larger.
+ var log2 = Math.ceil(Math.log(approx) / Math.LN2);
+ var delta = (log2 <= 48) ? 1 : Math.pow(2, log2 - 48);
+
+ // Decrease the approximation until it is smaller than the remainder. Note
+ // that if it is too large, the product overflows and is negative.
+ var approxRes = Timestamp.fromNumber(approx);
+ var approxRem = approxRes.multiply(other);
+ while (approxRem.isNegative() || approxRem.greaterThan(rem)) {
+ approx -= delta;
+ approxRes = Timestamp.fromNumber(approx);
+ approxRem = approxRes.multiply(other);
+ }
+
+ // We know the answer can't be zero... and actually, zero would cause
+ // infinite recursion since we would make no progress.
+ if (approxRes.isZero()) {
+ approxRes = Timestamp.ONE;
+ }
+
+ res = res.add(approxRes);
+ rem = rem.subtract(approxRem);
+ }
+ return res;
+};
+
+/**
+ * Returns this Timestamp modulo the given one.
+ *
+ * @param {Timestamp} other Timestamp by which to mod.
+ * @return {Timestamp} this Timestamp modulo the given one.
+ * @api public
+ */
+Timestamp.prototype.modulo = function(other) {
+ return this.subtract(this.div(other).multiply(other));
+};
+
+/**
+ * The bitwise-NOT of this value.
+ *
+ * @return {Timestamp} the bitwise-NOT of this value.
+ * @api public
+ */
+Timestamp.prototype.not = function() {
+ return Timestamp.fromBits(~this.low_, ~this.high_);
+};
+
+/**
+ * Returns the bitwise-AND of this Timestamp and the given one.
+ *
+ * @param {Timestamp} other the Timestamp with which to AND.
+ * @return {Timestamp} the bitwise-AND of this and the other.
+ * @api public
+ */
+Timestamp.prototype.and = function(other) {
+ return Timestamp.fromBits(this.low_ & other.low_, this.high_ & other.high_);
+};
+
+/**
+ * Returns the bitwise-OR of this Timestamp and the given one.
+ *
+ * @param {Timestamp} other the Timestamp with which to OR.
+ * @return {Timestamp} the bitwise-OR of this and the other.
+ * @api public
+ */
+Timestamp.prototype.or = function(other) {
+ return Timestamp.fromBits(this.low_ | other.low_, this.high_ | other.high_);
+};
+
+/**
+ * Returns the bitwise-XOR of this Timestamp and the given one.
+ *
+ * @param {Timestamp} other the Timestamp with which to XOR.
+ * @return {Timestamp} the bitwise-XOR of this and the other.
+ * @api public
+ */
+Timestamp.prototype.xor = function(other) {
+ return Timestamp.fromBits(this.low_ ^ other.low_, this.high_ ^ other.high_);
+};
+
+/**
+ * Returns this Timestamp with bits shifted to the left by the given amount.
+ *
+ * @param {Number} numBits the number of bits by which to shift.
+ * @return {Timestamp} this shifted to the left by the given amount.
+ * @api public
+ */
+Timestamp.prototype.shiftLeft = function(numBits) {
+ numBits &= 63;
+ if (numBits == 0) {
+ return this;
+ } else {
+ var low = this.low_;
+ if (numBits < 32) {
+ var high = this.high_;
+ return Timestamp.fromBits(
+ low << numBits,
+ (high << numBits) | (low >>> (32 - numBits)));
+ } else {
+ return Timestamp.fromBits(0, low << (numBits - 32));
+ }
+ }
+};
+
+/**
+ * Returns this Timestamp with bits shifted to the right by the given amount.
+ *
+ * @param {Number} numBits the number of bits by which to shift.
+ * @return {Timestamp} this shifted to the right by the given amount.
+ * @api public
+ */
+Timestamp.prototype.shiftRight = function(numBits) {
+ numBits &= 63;
+ if (numBits == 0) {
+ return this;
+ } else {
+ var high = this.high_;
+ if (numBits < 32) {
+ var low = this.low_;
+ return Timestamp.fromBits(
+ (low >>> numBits) | (high << (32 - numBits)),
+ high >> numBits);
+ } else {
+ return Timestamp.fromBits(
+ high >> (numBits - 32),
+ high >= 0 ? 0 : -1);
+ }
+ }
+};
+
+/**
+ * Returns this Timestamp with bits shifted to the right by the given amount, with the new top bits matching the current sign bit.
+ *
+ * @param {Number} numBits the number of bits by which to shift.
+ * @return {Timestamp} this shifted to the right by the given amount, with zeros placed into the new leading bits.
+ * @api public
+ */
+Timestamp.prototype.shiftRightUnsigned = function(numBits) {
+ numBits &= 63;
+ if (numBits == 0) {
+ return this;
+ } else {
+ var high = this.high_;
+ if (numBits < 32) {
+ var low = this.low_;
+ return Timestamp.fromBits(
+ (low >>> numBits) | (high << (32 - numBits)),
+ high >>> numBits);
+ } else if (numBits == 32) {
+ return Timestamp.fromBits(high, 0);
+ } else {
+ return Timestamp.fromBits(high >>> (numBits - 32), 0);
+ }
+ }
+};
+
+/**
+ * Returns a Timestamp representing the given (32-bit) integer value.
+ *
+ * @param {Number} value the 32-bit integer in question.
+ * @return {Timestamp} the corresponding Timestamp value.
+ * @api public
+ */
+Timestamp.fromInt = function(value) {
+ if (-128 <= value && value < 128) {
+ var cachedObj = Timestamp.INT_CACHE_[value];
+ if (cachedObj) {
+ return cachedObj;
+ }
+ }
+
+ var obj = new Timestamp(value | 0, value < 0 ? -1 : 0);
+ if (-128 <= value && value < 128) {
+ Timestamp.INT_CACHE_[value] = obj;
+ }
+ return obj;
+};
+
+/**
+ * Returns a Timestamp representing the given value, provided that it is a finite number. Otherwise, zero is returned.
+ *
+ * @param {Number} value the number in question.
+ * @return {Timestamp} the corresponding Timestamp value.
+ * @api public
+ */
+Timestamp.fromNumber = function(value) {
+ if (isNaN(value) || !isFinite(value)) {
+ return Timestamp.ZERO;
+ } else if (value <= -Timestamp.TWO_PWR_63_DBL_) {
+ return Timestamp.MIN_VALUE;
+ } else if (value + 1 >= Timestamp.TWO_PWR_63_DBL_) {
+ return Timestamp.MAX_VALUE;
+ } else if (value < 0) {
+ return Timestamp.fromNumber(-value).negate();
+ } else {
+ return new Timestamp(
+ (value % Timestamp.TWO_PWR_32_DBL_) | 0,
+ (value / Timestamp.TWO_PWR_32_DBL_) | 0);
+ }
+};
+
+/**
+ * Returns a Timestamp representing the 64-bit integer that comes by concatenating the given high and low bits. Each is assumed to use 32 bits.
+ *
+ * @param {Number} lowBits the low 32-bits.
+ * @param {Number} highBits the high 32-bits.
+ * @return {Timestamp} the corresponding Timestamp value.
+ * @api public
+ */
+Timestamp.fromBits = function(lowBits, highBits) {
+ return new Timestamp(lowBits, highBits);
+};
+
+/**
+ * Returns a Timestamp representation of the given string, written using the given radix.
+ *
+ * @param {String} str the textual representation of the Timestamp.
+ * @param {Number} opt_radix the radix in which the text is written.
+ * @return {Timestamp} the corresponding Timestamp value.
+ * @api public
+ */
+Timestamp.fromString = function(str, opt_radix) {
+ if (str.length == 0) {
+ throw Error('number format error: empty string');
+ }
+
+ var radix = opt_radix || 10;
+ if (radix < 2 || 36 < radix) {
+ throw Error('radix out of range: ' + radix);
+ }
+
+ if (str.charAt(0) == '-') {
+ return Timestamp.fromString(str.substring(1), radix).negate();
+ } else if (str.indexOf('-') >= 0) {
+ throw Error('number format error: interior "-" character: ' + str);
+ }
+
+ // Do several (8) digits each time through the loop, so as to
+ // minimize the calls to the very expensive emulated div.
+ var radixToPower = Timestamp.fromNumber(Math.pow(radix, 8));
+
+ var result = Timestamp.ZERO;
+ for (var i = 0; i < str.length; i += 8) {
+ var size = Math.min(8, str.length - i);
+ var value = parseInt(str.substring(i, i + size), radix);
+ if (size < 8) {
+ var power = Timestamp.fromNumber(Math.pow(radix, size));
+ result = result.multiply(power).add(Timestamp.fromNumber(value));
+ } else {
+ result = result.multiply(radixToPower);
+ result = result.add(Timestamp.fromNumber(value));
+ }
+ }
+ return result;
+};
+
+// NOTE: Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the
+// from* methods on which they depend.
+
+
+/**
+ * A cache of the Timestamp representations of small integer values.
+ * @type {Object}
+ * @api private
+ */
+Timestamp.INT_CACHE_ = {};
+
+// NOTE: the compiler should inline these constant values below and then remove
+// these variables, so there should be no runtime penalty for these.
+
+/**
+ * Number used repeated below in calculations. This must appear before the
+ * first call to any from* function below.
+ * @type {number}
+ * @api private
+ */
+Timestamp.TWO_PWR_16_DBL_ = 1 << 16;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Timestamp.TWO_PWR_24_DBL_ = 1 << 24;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Timestamp.TWO_PWR_32_DBL_ = Timestamp.TWO_PWR_16_DBL_ * Timestamp.TWO_PWR_16_DBL_;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Timestamp.TWO_PWR_31_DBL_ = Timestamp.TWO_PWR_32_DBL_ / 2;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Timestamp.TWO_PWR_48_DBL_ = Timestamp.TWO_PWR_32_DBL_ * Timestamp.TWO_PWR_16_DBL_;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Timestamp.TWO_PWR_64_DBL_ = Timestamp.TWO_PWR_32_DBL_ * Timestamp.TWO_PWR_32_DBL_;
+
+/**
+ * @type {number}
+ * @api private
+ */
+Timestamp.TWO_PWR_63_DBL_ = Timestamp.TWO_PWR_64_DBL_ / 2;
+
+/** @type {Timestamp} */
+Timestamp.ZERO = Timestamp.fromInt(0);
+
+/** @type {Timestamp} */
+Timestamp.ONE = Timestamp.fromInt(1);
+
+/** @type {Timestamp} */
+Timestamp.NEG_ONE = Timestamp.fromInt(-1);
+
+/** @type {Timestamp} */
+Timestamp.MAX_VALUE =
+ Timestamp.fromBits(0xFFFFFFFF | 0, 0x7FFFFFFF | 0);
+
+/** @type {Timestamp} */
+Timestamp.MIN_VALUE = Timestamp.fromBits(0, 0x80000000 | 0);
+
+/**
+ * @type {Timestamp}
+ * @api private
+ */
+Timestamp.TWO_PWR_24_ = Timestamp.fromInt(1 << 24);
+
+/**
+ * Expose.
+ */
+exports.Timestamp = Timestamp;
+},
+
+ });
+
+
+if(typeof module != 'undefined' && module.exports ){
+ module.exports = bson;
+
+ if( !module.parent ){
+ bson();
+ }
+}
+
+if(typeof window != 'undefined' && typeof require == 'undefined'){
+ window.require = bson.require;
+}
diff --git a/node_modules/bson/browser_build/package.json b/node_modules/bson/browser_build/package.json
new file mode 100644
index 000000000..3ebb58761
--- /dev/null
+++ b/node_modules/bson/browser_build/package.json
@@ -0,0 +1,8 @@
+{ "name" : "bson"
+, "description" : "A bson parser for node.js and the browser"
+, "main": "../lib/bson/bson"
+, "directories" : { "lib" : "../lib/bson" }
+, "engines" : { "node" : ">=0.6.0" }
+, "licenses" : [ { "type" : "Apache License, Version 2.0"
+ , "url" : "http://www.apache.org/licenses/LICENSE-2.0" } ]
+}
diff --git a/node_modules/bson/build/Makefile b/node_modules/bson/build/Makefile
new file mode 100644
index 000000000..04cc2741c
--- /dev/null
+++ b/node_modules/bson/build/Makefile
@@ -0,0 +1,320 @@
+# We borrow heavily from the kernel build setup, though we are simpler since
+# we don't have Kconfig tweaking settings on us.
+
+# The implicit make rules have it looking for RCS files, among other things.
+# We instead explicitly write all the rules we care about.
+# It's even quicker (saves ~200ms) to pass -r on the command line.
+MAKEFLAGS=-r
+
+# The source directory tree.
+srcdir := ..
+abs_srcdir := $(abspath $(srcdir))
+
+# The name of the builddir.
+builddir_name ?= .
+
+# The V=1 flag on command line makes us verbosely print command lines.
+ifdef V
+ quiet=
+else
+ quiet=quiet_
+endif
+
+# Specify BUILDTYPE=Release on the command line for a release build.
+BUILDTYPE ?= Release
+
+# Directory all our build output goes into.
+# Note that this must be two directories beneath src/ for unit tests to pass,
+# as they reach into the src/ directory for data with relative paths.
+builddir ?= $(builddir_name)/$(BUILDTYPE)
+abs_builddir := $(abspath $(builddir))
+depsdir := $(builddir)/.deps
+
+# Object output directory.
+obj := $(builddir)/obj
+abs_obj := $(abspath $(obj))
+
+# We build up a list of every single one of the targets so we can slurp in the
+# generated dependency rule Makefiles in one pass.
+all_deps :=
+
+
+
+CC.target ?= $(CC)
+CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
+CXX.target ?= $(CXX)
+CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
+LINK.target ?= $(LINK)
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+
+# C++ apps need to be linked with g++.
+LINK ?= $(CXX.target)
+
+# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
+# to replicate this environment fallback in make as well.
+CC.host ?= gcc
+CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
+CXX.host ?= g++
+CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
+LINK.host ?= $(CXX.host)
+LDFLAGS.host ?=
+AR.host ?= ar
+
+# Define a dir function that can handle spaces.
+# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
+# "leading spaces cannot appear in the text of the first argument as written.
+# These characters can be put into the argument value by variable substitution."
+empty :=
+space := $(empty) $(empty)
+
+# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
+replace_spaces = $(subst $(space),?,$1)
+unreplace_spaces = $(subst ?,$(space),$1)
+dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
+
+# Flags to make gcc output dependency info. Note that you need to be
+# careful here to use the flags that ccache and distcc can understand.
+# We write to a dep file on the side first and then rename at the end
+# so we can't end up with a broken dep file.
+depfile = $(depsdir)/$(call replace_spaces,$@).d
+DEPFLAGS = -MMD -MF $(depfile).raw
+
+# We have to fixup the deps output in a few ways.
+# (1) the file output should mention the proper .o file.
+# ccache or distcc lose the path to the target, so we convert a rule of
+# the form:
+# foobar.o: DEP1 DEP2
+# into
+# path/to/foobar.o: DEP1 DEP2
+# (2) we want missing files not to cause us to fail to build.
+# We want to rewrite
+# foobar.o: DEP1 DEP2 \
+# DEP3
+# to
+# DEP1:
+# DEP2:
+# DEP3:
+# so if the files are missing, they're just considered phony rules.
+# We have to do some pretty insane escaping to get those backslashes
+# and dollar signs past make, the shell, and sed at the same time.
+# Doesn't work with spaces, but that's fine: .d files have spaces in
+# their names replaced with other characters.
+define fixup_dep
+# The depfile may not exist if the input file didn't have any #includes.
+touch $(depfile).raw
+# Fixup path as in (1).
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
+# Add extra rules as in (2).
+# We remove slashes and replace spaces with new lines;
+# remove blank lines;
+# delete the first line and append a colon to the remaining lines.
+sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
+ grep -v '^$$' |\
+ sed -e 1d -e 's|$$|:|' \
+ >> $(depfile)
+rm $(depfile).raw
+endef
+
+# Command definitions:
+# - cmd_foo is the actual command to run;
+# - quiet_cmd_foo is the brief-output summary of the command.
+
+quiet_cmd_cc = CC($(TOOLSET)) $@
+cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_cxx = CXX($(TOOLSET)) $@
+cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_objc = CXX($(TOOLSET)) $@
+cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+quiet_cmd_objcxx = CXX($(TOOLSET)) $@
+cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# Commands for precompiled header files.
+quiet_cmd_pch_c = CXX($(TOOLSET)) $@
+cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
+cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_m = CXX($(TOOLSET)) $@
+cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
+cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# gyp-mac-tool is written next to the root Makefile by gyp.
+# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
+# already.
+quiet_cmd_mac_tool = MACTOOL $(4) $<
+cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
+
+quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
+cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
+
+quiet_cmd_infoplist = INFOPLIST $@
+cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
+
+quiet_cmd_touch = TOUCH $@
+cmd_touch = touch $@
+
+quiet_cmd_copy = COPY $@
+# send stderr to /dev/null to ignore messages when linking directories.
+cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
+
+quiet_cmd_alink = LIBTOOL-STATIC $@
+cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+
+
+# Define an escape_quotes function to escape single quotes.
+# This allows us to handle quotes properly as long as we always use
+# use single quotes and escape_quotes.
+escape_quotes = $(subst ','\'',$(1))
+# This comment is here just to include a ' to unconfuse syntax highlighting.
+# Define an escape_vars function to escape '$' variable syntax.
+# This allows us to read/write command lines with shell variables (e.g.
+# $LD_LIBRARY_PATH), without triggering make substitution.
+escape_vars = $(subst $$,$$$$,$(1))
+# Helper that expands to a shell command to echo a string exactly as it is in
+# make. This uses printf instead of echo because printf's behaviour with respect
+# to escape sequences is more portable than echo's across different shells
+# (e.g., dash, bash).
+exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
+
+# Helper to compare the command we're about to run against the command
+# we logged the last time we ran the command. Produces an empty
+# string (false) when the commands match.
+# Tricky point: Make has no string-equality test function.
+# The kernel uses the following, but it seems like it would have false
+# positives, where one string reordered its arguments.
+# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
+# $(filter-out $(cmd_$@), $(cmd_$(1))))
+# We instead substitute each for the empty string into the other, and
+# say they're equal if both substitutions produce the empty string.
+# .d files contain ? instead of spaces, take that into account.
+command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
+ $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
+
+# Helper that is non-empty when a prerequisite changes.
+# Normally make does this implicitly, but we force rules to always run
+# so we can check their command lines.
+# $? -- new prerequisites
+# $| -- order-only dependencies
+prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
+
+# Helper that executes all postbuilds until one fails.
+define do_postbuilds
+ @E=0;\
+ for p in $(POSTBUILDS); do\
+ eval $$p;\
+ E=$$?;\
+ if [ $$E -ne 0 ]; then\
+ break;\
+ fi;\
+ done;\
+ if [ $$E -ne 0 ]; then\
+ rm -rf "$@";\
+ exit $$E;\
+ fi
+endef
+
+# do_cmd: run a command via the above cmd_foo names, if necessary.
+# Should always run for a given target to handle command-line changes.
+# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+# Third argument, if non-zero, makes it do POSTBUILDS processing.
+# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
+# spaces already and dirx strips the ? characters.
+define do_cmd
+$(if $(or $(command_changed),$(prereq_changed)),
+ @$(call exact_echo, $($(quiet)cmd_$(1)))
+ @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
+ $(if $(findstring flock,$(word 2,$(cmd_$1))),
+ @$(cmd_$(1))
+ @echo " $(quiet_cmd_$(1)): Finished",
+ @$(cmd_$(1))
+ )
+ @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
+ @$(if $(2),$(fixup_dep))
+ $(if $(and $(3), $(POSTBUILDS)),
+ $(call do_postbuilds)
+ )
+)
+endef
+
+# Declare the "all" target first so it is the default,
+# even though we don't have the deps yet.
+.PHONY: all
+all:
+
+# make looks for ways to re-generate included makefiles, but in our case, we
+# don't have a direct way. Explicitly telling make that it has nothing to do
+# for them makes it go faster.
+%.d: ;
+
+# Use FORCE_DO_CMD to force a target to run. Should be coupled with
+# do_cmd.
+.PHONY: FORCE_DO_CMD
+FORCE_DO_CMD:
+
+TOOLSET := target
+# Suffix rules, putting all outputs into $(obj).
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD
+ @$(call do_cmd,objc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD
+ @$(call do_cmd,objcxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+
+# Try building from generated source, too.
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD
+ @$(call do_cmd,objc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD
+ @$(call do_cmd,objcxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+
+$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
+ @$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD
+ @$(call do_cmd,objc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD
+ @$(call do_cmd,objcxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
+ @$(call do_cmd,cc,1)
+
diff --git a/node_modules/bson/build/bson.target.mk b/node_modules/bson/build/bson.target.mk
new file mode 100644
index 000000000..179639b07
--- /dev/null
+++ b/node_modules/bson/build/bson.target.mk
@@ -0,0 +1,21 @@
+# This file is generated by gyp; do not edit.
+
+TOOLSET := target
+TARGET := bson
+DEFS_Debug := \
+ '-DNODE_GYP_MODULE_NAME=bson' \
+ '-DUSING_UV_SHARED=1' \
+ '-DUSING_V8_SHARED=1' \
+ '-DV8_DEPRECATION_WARNINGS=1' \
+ '-DV8_DEPRECATION_WARNINGS' \
+ '-DV8_IMMINENT_DEPRECATION_WARNINGS' \
+ '-D_DARWIN_USE_64_BIT_INODE=1' \
+ '-D_LARGEFILE_SOURCE' \
+ '-D_FILE_OFFSET_BITS=64' \
+ '-DOPENSSL_NO_PINSHARED' \
+ '-DOPENSSL_THREADS' \
+ '-DBUILDING_NODE_EXTENSION' \
+ '-DDEBUG' \
+ '-D_DEBUG' \
+ '-DV8_ENABLE_CHECKS'
+
diff --git a/node_modules/bson/build/config.gypi b/node_modules/bson/build/config.gypi
new file mode 100644
index 000000000..dc8578d23
--- /dev/null
+++ b/node_modules/bson/build/config.gypi
@@ -0,0 +1,203 @@
+# Do not edit. File was generated by node-gyp's "configure" step
+{
+ "target_defaults": {
+ "cflags": [],
+ "default_configuration": "Release",
+ "defines": [],
+ "include_dirs": [],
+ "libraries": []
+ },
+ "variables": {
+ "asan": 0,
+ "build_v8_with_gn": "false",
+ "coverage": "false",
+ "dcheck_always_on": 0,
+ "debug_nghttp2": "false",
+ "debug_node": "false",
+ "enable_lto": "false",
+ "enable_pgo_generate": "false",
+ "enable_pgo_use": "false",
+ "error_on_warn": "false",
+ "force_dynamic_crt": 0,
+ "host_arch": "x64",
+ "icu_data_in": "../../deps/icu-tmp/icudt67l.dat",
+ "icu_endianness": "l",
+ "icu_gyp_path": "tools/icu/icu-generic.gyp",
+ "icu_path": "deps/icu-small",
+ "icu_small": "false",
+ "icu_ver_major": "67",
+ "is_debug": 0,
+ "llvm_version": "11.0",
+ "napi_build_version": "7",
+ "node_byteorder": "little",
+ "node_debug_lib": "false",
+ "node_enable_d8": "false",
+ "node_install_npm": "true",
+ "node_module_version": 83,
+ "node_no_browser_globals": "false",
+ "node_prefix": "/usr/local",
+ "node_release_urlbase": "https://nodejs.org/download/release/",
+ "node_shared": "false",
+ "node_shared_brotli": "false",
+ "node_shared_cares": "false",
+ "node_shared_http_parser": "false",
+ "node_shared_libuv": "false",
+ "node_shared_nghttp2": "false",
+ "node_shared_openssl": "false",
+ "node_shared_zlib": "false",
+ "node_tag": "",
+ "node_target_type": "executable",
+ "node_use_bundled_v8": "true",
+ "node_use_dtrace": "true",
+ "node_use_etw": "false",
+ "node_use_node_code_cache": "true",
+ "node_use_node_snapshot": "true",
+ "node_use_openssl": "true",
+ "node_use_v8_platform": "true",
+ "node_with_ltcg": "false",
+ "node_without_node_options": "false",
+ "openssl_fips": "",
+ "openssl_is_fips": "false",
+ "ossfuzz": "false",
+ "shlib_suffix": "83.dylib",
+ "target_arch": "x64",
+ "v8_enable_31bit_smis_on_64bit_arch": 0,
+ "v8_enable_gdbjit": 0,
+ "v8_enable_i18n_support": 1,
+ "v8_enable_inspector": 1,
+ "v8_enable_lite_mode": 0,
+ "v8_enable_object_print": 1,
+ "v8_enable_pointer_compression": 0,
+ "v8_no_strict_aliasing": 1,
+ "v8_optimized_debug": 1,
+ "v8_promise_internal_field_count": 1,
+ "v8_random_seed": 0,
+ "v8_trace_maps": 0,
+ "v8_use_siphash": 1,
+ "want_separate_host_toolset": 0,
+ "xcode_version": "11.0",
+ "nodedir": "/Users/haleyhammock/Library/Caches/node-gyp/14.15.4",
+ "standalone_static_library": 1,
+ "dry_run": "",
+ "legacy_bundling": "",
+ "save_dev": "",
+ "browser": "",
+ "commit_hooks": "true",
+ "only": "",
+ "viewer": "man",
+ "also": "",
+ "rollback": "true",
+ "sign_git_commit": "",
+ "audit": "true",
+ "usage": "",
+ "globalignorefile": "/usr/local/etc/npmignore",
+ "init_author_url": "",
+ "maxsockets": "50",
+ "shell": "/bin/bash",
+ "metrics_registry": "https://registry.npmjs.org/",
+ "parseable": "",
+ "shrinkwrap": "true",
+ "init_license": "ISC",
+ "timing": "",
+ "if_present": "",
+ "cache_max": "Infinity",
+ "init_author_email": "",
+ "sign_git_tag": "",
+ "cert": "",
+ "git_tag_version": "true",
+ "local_address": "",
+ "long": "",
+ "preid": "",
+ "fetch_retries": "2",
+ "registry": "https://registry.npmjs.org/",
+ "key": "",
+ "message": "%s",
+ "versions": "",
+ "globalconfig": "/usr/local/etc/npmrc",
+ "always_auth": "",
+ "logs_max": "10",
+ "prefer_online": "",
+ "cache_lock_retries": "10",
+ "global_style": "",
+ "update_notifier": "true",
+ "audit_level": "low",
+ "heading": "npm",
+ "fetch_retry_mintimeout": "10000",
+ "offline": "",
+ "read_only": "",
+ "searchlimit": "20",
+ "access": "",
+ "json": "",
+ "allow_same_version": "",
+ "description": "true",
+ "engine_strict": "",
+ "https_proxy": "",
+ "init_module": "/Users/haleyhammock/.npm-init.js",
+ "userconfig": "/Users/haleyhammock/.npmrc",
+ "cidr": "",
+ "node_version": "14.15.4",
+ "user": "",
+ "auth_type": "legacy",
+ "editor": "vi",
+ "ignore_prepublish": "",
+ "save": "true",
+ "script_shell": "",
+ "tag": "latest",
+ "before": "",
+ "global": "",
+ "progress": "true",
+ "ham_it_up": "",
+ "optional": "true",
+ "searchstaleness": "900",
+ "bin_links": "true",
+ "force": "",
+ "save_prod": "",
+ "searchopts": "",
+ "depth": "Infinity",
+ "node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
+ "rebuild_bundle": "true",
+ "sso_poll_frequency": "500",
+ "unicode": "true",
+ "fetch_retry_maxtimeout": "60000",
+ "ca": "",
+ "save_prefix": "^",
+ "scripts_prepend_node_path": "warn-only",
+ "sso_type": "oauth",
+ "strict_ssl": "true",
+ "tag_version_prefix": "v",
+ "dev": "",
+ "fetch_retry_factor": "10",
+ "group": "20",
+ "save_exact": "",
+ "cache_lock_stale": "60000",
+ "prefer_offline": "",
+ "version": "",
+ "cache_min": "10",
+ "otp": "",
+ "cache": "/Users/haleyhammock/.npm",
+ "searchexclude": "",
+ "color": "true",
+ "package_lock": "true",
+ "fund": "true",
+ "package_lock_only": "",
+ "save_optional": "",
+ "user_agent": "npm/6.14.10 node/v14.15.4 darwin x64",
+ "ignore_scripts": "",
+ "cache_lock_wait": "10000",
+ "production": "",
+ "save_bundle": "",
+ "send_metrics": "",
+ "init_version": "1.0.0",
+ "node_options": "",
+ "umask": "0022",
+ "scope": "",
+ "git": "git",
+ "init_author_name": "",
+ "onload_script": "",
+ "tmp": "/var/folders/qc/0jfblfrd41b6hs2b8nd2g49h0000gn/T",
+ "unsafe_perm": "true",
+ "format_package_lock": "true",
+ "link": "",
+ "prefix": "/usr/local"
+ }
+}
diff --git a/node_modules/bson/build/gyp-mac-tool b/node_modules/bson/build/gyp-mac-tool
new file mode 100755
index 000000000..033b4e538
--- /dev/null
+++ b/node_modules/bson/build/gyp-mac-tool
@@ -0,0 +1,615 @@
+#!/usr/bin/env python
+# Generated by gyp. Do not edit.
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions to perform Xcode-style build steps.
+
+These functions are executed via gyp-mac-tool when using the Makefile generator.
+"""
+
+from __future__ import print_function
+
+import fcntl
+import fnmatch
+import glob
+import json
+import os
+import plistlib
+import re
+import shutil
+import string
+import subprocess
+import sys
+import tempfile
+
+PY3 = bytes != str
+
+
+def main(args):
+ executor = MacTool()
+ exit_code = executor.Dispatch(args)
+ if exit_code is not None:
+ sys.exit(exit_code)
+
+
+class MacTool(object):
+ """This class performs all the Mac tooling steps. The methods can either be
+ executed directly, or dispatched from an argument list."""
+
+ def Dispatch(self, args):
+ """Dispatches a string command to a method."""
+ if len(args) < 1:
+ raise Exception("Not enough arguments")
+
+ method = "Exec%s" % self._CommandifyName(args[0])
+ return getattr(self, method)(*args[1:])
+
+ def _CommandifyName(self, name_string):
+ """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
+ return name_string.title().replace('-', '')
+
+ def ExecCopyBundleResource(self, source, dest, convert_to_binary):
+ """Copies a resource file to the bundle/Resources directory, performing any
+ necessary compilation on each resource."""
+ extension = os.path.splitext(source)[1].lower()
+ if os.path.isdir(source):
+ # Copy tree.
+ # TODO(thakis): This copies file attributes like mtime, while the
+ # single-file branch below doesn't. This should probably be changed to
+ # be consistent with the single-file branch.
+ if os.path.exists(dest):
+ shutil.rmtree(dest)
+ shutil.copytree(source, dest)
+ elif extension == '.xib':
+ return self._CopyXIBFile(source, dest)
+ elif extension == '.storyboard':
+ return self._CopyXIBFile(source, dest)
+ elif extension == '.strings':
+ self._CopyStringsFile(source, dest, convert_to_binary)
+ else:
+ shutil.copy(source, dest)
+
+ def _CopyXIBFile(self, source, dest):
+ """Compiles a XIB file with ibtool into a binary plist in the bundle."""
+
+ # ibtool sometimes crashes with relative paths. See crbug.com/314728.
+ base = os.path.dirname(os.path.realpath(__file__))
+ if os.path.relpath(source):
+ source = os.path.join(base, source)
+ if os.path.relpath(dest):
+ dest = os.path.join(base, dest)
+
+ args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
+ '--output-format', 'human-readable-text', '--compile', dest, source]
+ ibtool_section_re = re.compile(r'/\*.*\*/')
+ ibtool_re = re.compile(r'.*note:.*is clipping its content')
+ ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
+ current_section_header = None
+ for line in ibtoolout.stdout:
+ if ibtool_section_re.match(line):
+ current_section_header = line
+ elif not ibtool_re.match(line):
+ if current_section_header:
+ sys.stdout.write(current_section_header)
+ current_section_header = None
+ sys.stdout.write(line)
+ return ibtoolout.returncode
+
+ def _ConvertToBinary(self, dest):
+ subprocess.check_call([
+ 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
+
+ def _CopyStringsFile(self, source, dest, convert_to_binary):
+ """Copies a .strings file using iconv to reconvert the input into UTF-16."""
+ input_code = self._DetectInputEncoding(source) or "UTF-8"
+
+ # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
+ # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
+ # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
+ # semicolon in dictionary.
+ # on invalid files. Do the same kind of validation.
+ import CoreFoundation
+ s = open(source, 'rb').read()
+ d = CoreFoundation.CFDataCreate(None, s, len(s))
+ _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
+ if error:
+ return
+
+ fp = open(dest, 'wb')
+ fp.write(s.decode(input_code).encode('UTF-16'))
+ fp.close()
+
+ if convert_to_binary == 'True':
+ self._ConvertToBinary(dest)
+
+ def _DetectInputEncoding(self, file_name):
+ """Reads the first few bytes from file_name and tries to guess the text
+ encoding. Returns None as a guess if it can't detect it."""
+ fp = open(file_name, 'rb')
+ try:
+ header = fp.read(3)
+ except Exception:
+ fp.close()
+ return None
+ fp.close()
+ if header.startswith("\xFE\xFF"):
+ return "UTF-16"
+ elif header.startswith("\xFF\xFE"):
+ return "UTF-16"
+ elif header.startswith("\xEF\xBB\xBF"):
+ return "UTF-8"
+ else:
+ return None
+
+ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
+ """Copies the |source| Info.plist to the destination directory |dest|."""
+ # Read the source Info.plist into memory.
+ fd = open(source, 'r')
+ lines = fd.read()
+ fd.close()
+
+ # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
+ plist = plistlib.readPlistFromString(lines)
+ if keys:
+ plist = dict(plist.items() + json.loads(keys[0]).items())
+ lines = plistlib.writePlistToString(plist)
+
+ # Go through all the environment variables and replace them as variables in
+ # the file.
+ IDENT_RE = re.compile(r'[/\s]')
+ for key in os.environ:
+ if key.startswith('_'):
+ continue
+ evar = '${%s}' % key
+ evalue = os.environ[key]
+ lines = string.replace(lines, evar, evalue)
+
+ # Xcode supports various suffices on environment variables, which are
+ # all undocumented. :rfc1034identifier is used in the standard project
+ # template these days, and :identifier was used earlier. They are used to
+ # convert non-url characters into things that look like valid urls --
+ # except that the replacement character for :identifier, '_' isn't valid
+ # in a URL either -- oops, hence :rfc1034identifier was born.
+ evar = '${%s:identifier}' % key
+ evalue = IDENT_RE.sub('_', os.environ[key])
+ lines = string.replace(lines, evar, evalue)
+
+ evar = '${%s:rfc1034identifier}' % key
+ evalue = IDENT_RE.sub('-', os.environ[key])
+ lines = string.replace(lines, evar, evalue)
+
+ # Remove any keys with values that haven't been replaced.
+ lines = lines.split('\n')
+ for i in range(len(lines)):
+ if lines[i].strip().startswith("${"):
+ lines[i] = None
+ lines[i - 1] = None
+ lines = '\n'.join(filter(lambda x: x is not None, lines))
+
+ # Write out the file with variables replaced.
+ fd = open(dest, 'w')
+ fd.write(lines)
+ fd.close()
+
+ # Now write out PkgInfo file now that the Info.plist file has been
+ # "compiled".
+ self._WritePkgInfo(dest)
+
+ if convert_to_binary == 'True':
+ self._ConvertToBinary(dest)
+
+ def _WritePkgInfo(self, info_plist):
+ """This writes the PkgInfo file from the data stored in Info.plist."""
+ plist = plistlib.readPlist(info_plist)
+ if not plist:
+ return
+
+ # Only create PkgInfo for executable types.
+ package_type = plist['CFBundlePackageType']
+ if package_type != 'APPL':
+ return
+
+ # The format of PkgInfo is eight characters, representing the bundle type
+ # and bundle signature, each four characters. If that is missing, four
+ # '?' characters are used instead.
+ signature_code = plist.get('CFBundleSignature', '????')
+ if len(signature_code) != 4: # Wrong length resets everything, too.
+ signature_code = '?' * 4
+
+ dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
+ fp = open(dest, 'w')
+ fp.write('%s%s' % (package_type, signature_code))
+ fp.close()
+
+ def ExecFlock(self, lockfile, *cmd_list):
+ """Emulates the most basic behavior of Linux's flock(1)."""
+ # Rely on exception handling to report errors.
+ fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
+ fcntl.flock(fd, fcntl.LOCK_EX)
+ return subprocess.call(cmd_list)
+
+ def ExecFilterLibtool(self, *cmd_list):
+ """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
+ symbols'."""
+ libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
+ libtool_re5 = re.compile(
+ r'^.*libtool: warning for library: ' +
+ r'.* the table of contents is empty ' +
+ r'\(no object file members in the library define global symbols\)$')
+ env = os.environ.copy()
+ # Ref:
+ # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+ # The problem with this flag is that it resets the file mtime on the file to
+ # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+ env['ZERO_AR_DATE'] = '1'
+ libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+ _, err = libtoolout.communicate()
+ if PY3:
+ err = err.decode('utf-8')
+ for line in err.splitlines():
+ if not libtool_re.match(line) and not libtool_re5.match(line):
+ print(line, file=sys.stderr)
+ # Unconditionally touch the output .a file on the command line if present
+ # and the command succeeded. A bit hacky.
+ if not libtoolout.returncode:
+ for i in range(len(cmd_list) - 1):
+ if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
+ os.utime(cmd_list[i+1], None)
+ break
+ return libtoolout.returncode
+
+ def ExecPackageFramework(self, framework, version):
+ """Takes a path to Something.framework and the Current version of that and
+ sets up all the symlinks."""
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split('.')[0]
+
+ CURRENT = 'Current'
+ RESOURCES = 'Resources'
+ VERSIONS = 'Versions'
+
+ if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
+ # Binary-less frameworks don't seem to contain symlinks (see e.g.
+ # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
+ return
+
+ # Move into the framework directory to set the symlinks correctly.
+ pwd = os.getcwd()
+ os.chdir(framework)
+
+ # Set up the Current version.
+ self._Relink(version, os.path.join(VERSIONS, CURRENT))
+
+ # Set up the root symlinks.
+ self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
+ self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
+
+ # Back to where we were before!
+ os.chdir(pwd)
+
+ def _Relink(self, dest, link):
+ """Creates a symlink to |dest| named |link|. If |link| already exists,
+ it is overwritten."""
+ if os.path.lexists(link):
+ os.remove(link)
+ os.symlink(dest, link)
+
+ def ExecCompileXcassets(self, keys, *inputs):
+ """Compiles multiple .xcassets files into a single .car file.
+
+ This invokes 'actool' to compile all the inputs .xcassets files. The
+ |keys| arguments is a json-encoded dictionary of extra arguments to
+ pass to 'actool' when the asset catalogs contains an application icon
+ or a launch image.
+
+ Note that 'actool' does not create the Assets.car file if the asset
+ catalogs does not contains imageset.
+ """
+ command_line = [
+ 'xcrun', 'actool', '--output-format', 'human-readable-text',
+ '--compress-pngs', '--notices', '--warnings', '--errors',
+ ]
+ is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
+ if is_iphone_target:
+ platform = os.environ['CONFIGURATION'].split('-')[-1]
+ if platform not in ('iphoneos', 'iphonesimulator'):
+ platform = 'iphonesimulator'
+ command_line.extend([
+ '--platform', platform, '--target-device', 'iphone',
+ '--target-device', 'ipad', '--minimum-deployment-target',
+ os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
+ os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
+ ])
+ else:
+ command_line.extend([
+ '--platform', 'macosx', '--target-device', 'mac',
+ '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
+ '--compile',
+ os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
+ ])
+ if keys:
+ keys = json.loads(keys)
+ for key, value in keys.items():
+ arg_name = '--' + key
+ if isinstance(value, bool):
+ if value:
+ command_line.append(arg_name)
+ elif isinstance(value, list):
+ for v in value:
+ command_line.append(arg_name)
+ command_line.append(str(v))
+ else:
+ command_line.append(arg_name)
+ command_line.append(str(value))
+ # Note: actool crashes if inputs path are relative, so use os.path.abspath
+ # to get absolute path name for inputs.
+ command_line.extend(map(os.path.abspath, inputs))
+ subprocess.check_call(command_line)
+
+ def ExecMergeInfoPlist(self, output, *inputs):
+ """Merge multiple .plist files into a single .plist file."""
+ merged_plist = {}
+ for path in inputs:
+ plist = self._LoadPlistMaybeBinary(path)
+ self._MergePlist(merged_plist, plist)
+ plistlib.writePlist(merged_plist, output)
+
+ def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
+ """Code sign a bundle.
+
+ This function tries to code sign an iOS bundle, following the same
+ algorithm as Xcode:
+ 1. copy ResourceRules.plist from the user or the SDK into the bundle,
+ 2. pick the provisioning profile that best match the bundle identifier,
+ and copy it into the bundle as embedded.mobileprovision,
+ 3. copy Entitlements.plist from user or SDK next to the bundle,
+ 4. code sign the bundle.
+ """
+ resource_rules_path = self._InstallResourceRules(resource_rules)
+ substitutions, overrides = self._InstallProvisioningProfile(
+ provisioning, self._GetCFBundleIdentifier())
+ entitlements_path = self._InstallEntitlements(
+ entitlements, substitutions, overrides)
+ subprocess.check_call([
+ 'codesign', '--force', '--sign', key, '--resource-rules',
+ resource_rules_path, '--entitlements', entitlements_path,
+ os.path.join(
+ os.environ['TARGET_BUILD_DIR'],
+ os.environ['FULL_PRODUCT_NAME'])])
+
+ def _InstallResourceRules(self, resource_rules):
+ """Installs ResourceRules.plist from user or SDK into the bundle.
+
+ Args:
+ resource_rules: string, optional, path to the ResourceRules.plist file
+ to use, default to "${SDKROOT}/ResourceRules.plist"
+
+ Returns:
+ Path to the copy of ResourceRules.plist into the bundle.
+ """
+ source_path = resource_rules
+ target_path = os.path.join(
+ os.environ['BUILT_PRODUCTS_DIR'],
+ os.environ['CONTENTS_FOLDER_PATH'],
+ 'ResourceRules.plist')
+ if not source_path:
+ source_path = os.path.join(
+ os.environ['SDKROOT'], 'ResourceRules.plist')
+ shutil.copy2(source_path, target_path)
+ return target_path
+
+ def _InstallProvisioningProfile(self, profile, bundle_identifier):
+ """Installs embedded.mobileprovision into the bundle.
+
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+ Returns:
+ A tuple containing two dictionary: variables substitutions and values
+ to overrides when generating the entitlements file.
+ """
+ source_path, provisioning_data, team_id = self._FindProvisioningProfile(
+ profile, bundle_identifier)
+ target_path = os.path.join(
+ os.environ['BUILT_PRODUCTS_DIR'],
+ os.environ['CONTENTS_FOLDER_PATH'],
+ 'embedded.mobileprovision')
+ shutil.copy2(source_path, target_path)
+ substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
+ return substitutions, provisioning_data['Entitlements']
+
+ def _FindProvisioningProfile(self, profile, bundle_identifier):
+ """Finds the .mobileprovision file to use for signing the bundle.
+
+ Checks all the installed provisioning profiles (or if the user specified
+ the PROVISIONING_PROFILE variable, only consult it) and select the most
+ specific that correspond to the bundle identifier.
+
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+ Returns:
+ A tuple of the path to the selected provisioning profile, the data of
+ the embedded plist in the provisioning profile and the team identifier
+ to use for code signing.
+
+ Raises:
+ SystemExit: if no .mobileprovision can be used to sign the bundle.
+ """
+ profiles_dir = os.path.join(
+ os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+ if not os.path.isdir(profiles_dir):
+ print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr)
+ sys.exit(1)
+ provisioning_profiles = None
+ if profile:
+ profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
+ if os.path.exists(profile_path):
+ provisioning_profiles = [profile_path]
+ if not provisioning_profiles:
+ provisioning_profiles = glob.glob(
+ os.path.join(profiles_dir, '*.mobileprovision'))
+ valid_provisioning_profiles = {}
+ for profile_path in provisioning_profiles:
+ profile_data = self._LoadProvisioningProfile(profile_path)
+ app_id_pattern = profile_data.get(
+ 'Entitlements', {}).get('application-identifier', '')
+ for team_identifier in profile_data.get('TeamIdentifier', []):
+ app_id = '%s.%s' % (team_identifier, bundle_identifier)
+ if fnmatch.fnmatch(app_id, app_id_pattern):
+ valid_provisioning_profiles[app_id_pattern] = (
+ profile_path, profile_data, team_identifier)
+ if not valid_provisioning_profiles:
+ print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr)
+ sys.exit(1)
+ # If the user has multiple provisioning profiles installed that can be
+ # used for ${bundle_identifier}, pick the most specific one (ie. the
+ # provisioning profile whose pattern is the longest).
+ selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
+ return valid_provisioning_profiles[selected_key]
+
+ def _LoadProvisioningProfile(self, profile_path):
+ """Extracts the plist embedded in a provisioning profile.
+
+ Args:
+ profile_path: string, path to the .mobileprovision file
+
+ Returns:
+ Content of the plist embedded in the provisioning profile as a dictionary.
+ """
+ with tempfile.NamedTemporaryFile() as temp:
+ subprocess.check_call([
+ 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
+ return self._LoadPlistMaybeBinary(temp.name)
+
+ def _MergePlist(self, merged_plist, plist):
+ """Merge |plist| into |merged_plist|."""
+ for key, value in plist.items():
+ if isinstance(value, dict):
+ merged_value = merged_plist.get(key, {})
+ if isinstance(merged_value, dict):
+ self._MergePlist(merged_value, value)
+ merged_plist[key] = merged_value
+ else:
+ merged_plist[key] = value
+ else:
+ merged_plist[key] = value
+
+ def _LoadPlistMaybeBinary(self, plist_path):
+ """Loads into a memory a plist possibly encoded in binary format.
+
+ This is a wrapper around plistlib.readPlist that tries to convert the
+ plist to the XML format if it can't be parsed (assuming that it is in
+ the binary format).
+
+ Args:
+ plist_path: string, path to a plist file, in XML or binary format
+
+ Returns:
+ Content of the plist as a dictionary.
+ """
+ try:
+ # First, try to read the file using plistlib that only supports XML,
+ # and if an exception is raised, convert a temporary copy to XML and
+ # load that copy.
+ return plistlib.readPlist(plist_path)
+ except:
+ pass
+ with tempfile.NamedTemporaryFile() as temp:
+ shutil.copy2(plist_path, temp.name)
+ subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
+ return plistlib.readPlist(temp.name)
+
+ def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
+ """Constructs a dictionary of variable substitutions for Entitlements.plist.
+
+ Args:
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+ app_identifier_prefix: string, value for AppIdentifierPrefix
+
+ Returns:
+ Dictionary of substitutions to apply when generating Entitlements.plist.
+ """
+ return {
+ 'CFBundleIdentifier': bundle_identifier,
+ 'AppIdentifierPrefix': app_identifier_prefix,
+ }
+
+ def _GetCFBundleIdentifier(self):
+ """Extracts CFBundleIdentifier value from Info.plist in the bundle.
+
+ Returns:
+ Value of CFBundleIdentifier in the Info.plist located in the bundle.
+ """
+ info_plist_path = os.path.join(
+ os.environ['TARGET_BUILD_DIR'],
+ os.environ['INFOPLIST_PATH'])
+ info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
+ return info_plist_data['CFBundleIdentifier']
+
+ def _InstallEntitlements(self, entitlements, substitutions, overrides):
+ """Generates and install the ${BundleName}.xcent entitlements file.
+
+ Expands variables "$(variable)" pattern in the source entitlements file,
+ add extra entitlements defined in the .mobileprovision file and the copy
+ the generated plist to "${BundlePath}.xcent".
+
+ Args:
+ entitlements: string, optional, path to the Entitlements.plist template
+ to use, defaults to "${SDKROOT}/Entitlements.plist"
+ substitutions: dictionary, variable substitutions
+ overrides: dictionary, values to add to the entitlements
+
+ Returns:
+ Path to the generated entitlements file.
+ """
+ source_path = entitlements
+ target_path = os.path.join(
+ os.environ['BUILT_PRODUCTS_DIR'],
+ os.environ['PRODUCT_NAME'] + '.xcent')
+ if not source_path:
+ source_path = os.path.join(
+ os.environ['SDKROOT'],
+ 'Entitlements.plist')
+ shutil.copy2(source_path, target_path)
+ data = self._LoadPlistMaybeBinary(target_path)
+ data = self._ExpandVariables(data, substitutions)
+ if overrides:
+ for key in overrides:
+ if key not in data:
+ data[key] = overrides[key]
+ plistlib.writePlist(data, target_path)
+ return target_path
+
+ def _ExpandVariables(self, data, substitutions):
+ """Expands variables "$(variable)" in data.
+
+ Args:
+ data: object, can be either string, list or dictionary
+ substitutions: dictionary, variable substitutions to perform
+
+ Returns:
+ Copy of data where each references to "$(variable)" has been replaced
+ by the corresponding value found in substitutions, or left intact if
+ the key was not found.
+ """
+ if isinstance(data, str):
+ for key, value in substitutions.items():
+ data = data.replace('$(%s)' % key, value)
+ return data
+ if isinstance(data, list):
+ return [self._ExpandVariables(v, substitutions) for v in data]
+ if isinstance(data, dict):
+ return {k: self._ExpandVariables(data[k], substitutions) for k in data}
+ return data
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/node_modules/bson/build_browser.js b/node_modules/bson/build_browser.js
new file mode 100644
index 000000000..bb8023844
--- /dev/null
+++ b/node_modules/bson/build_browser.js
@@ -0,0 +1,7 @@
+require('one');
+
+one('./package.json')
+ .tie('bson', BSON)
+ // .exclude('buffer')
+ .tie('buffer', {})
+ .save('./browser_build/bson.js')
\ No newline at end of file
diff --git a/node_modules/bson/ext/Makefile b/node_modules/bson/ext/Makefile
new file mode 100644
index 000000000..435999ee9
--- /dev/null
+++ b/node_modules/bson/ext/Makefile
@@ -0,0 +1,28 @@
+NODE = node
+name = all
+JOBS = 1
+
+all:
+ rm -rf build .lock-wscript bson.node
+ node-waf configure build
+ cp -R ./build/Release/bson.node . || true
+
+all_debug:
+ rm -rf build .lock-wscript bson.node
+ node-waf --debug configure build
+ cp -R ./build/Release/bson.node . || true
+
+clang:
+ rm -rf build .lock-wscript bson.node
+ CXX=clang node-waf configure build
+ cp -R ./build/Release/bson.node . || true
+
+clang_debug:
+ rm -rf build .lock-wscript bson.node
+ CXX=clang node-waf --debug configure build
+ cp -R ./build/Release/bson.node . || true
+
+clean:
+ rm -rf build .lock-wscript bson.node
+
+.PHONY: all
\ No newline at end of file
diff --git a/node_modules/bson/ext/bson.cc b/node_modules/bson/ext/bson.cc
new file mode 100644
index 000000000..1ec303163
--- /dev/null
+++ b/node_modules/bson/ext/bson.cc
@@ -0,0 +1,1045 @@
+//===========================================================================
+
+#include
+#include
+#include
+#include
+#include
+
+#ifdef __clang__
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wunused-parameter"
+#endif
+
+#include
+
+// this and the above block must be around the v8.h header otherwise
+// v8 is not happy
+#ifdef __clang__
+#pragma clang diagnostic pop
+#endif
+
+#include
+#include
+#include
+
+#include
+#include
+#include
+#include
+
+#ifdef __sun
+ #include
+#endif
+
+#include "bson.h"
+
+using namespace v8;
+using namespace node;
+
+//===========================================================================
+
+void DataStream::WriteObjectId(const Handle