diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 00000000000..d43c10694e7 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: d4a1ef2e8fee8941792720296198c690 +tags: fbb0d17656682115ca4d033fb2f83ba1 diff --git a/.gitignore b/.gitignore index ead9da69cc7..f82f9a21e97 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,10 @@ *.seed .project .settings +<<<<<<< HEAD +======= data node_modules/ output build +>>>>>>> efddd763a3fd570178896ad748101797f10ec5b8 diff --git a/docs/sphinx-docs/source/static/.mongodb b/.nojekyll similarity index 100% rename from docs/sphinx-docs/source/static/.mongodb rename to .nojekyll diff --git a/.npmignore b/.npmignore deleted file mode 100644 index ed901d90e73..00000000000 --- a/.npmignore +++ /dev/null @@ -1,12 +0,0 @@ -.git* -.buildinfo -.mongodb -docs/ -docs/sphinx-docs -data/ -dev/ -examples/ -test/ -.DS_Store - - diff --git a/Makefile b/Makefile deleted file mode 100644 index 26d9dd2142f..00000000000 --- a/Makefile +++ /dev/null @@ -1,67 +0,0 @@ -NODE = node -NPM = npm -NODEUNIT = deps/nodeunit/bin/nodeunit -DOX = node_modules/dox/bin/dox -name = all - -total: build_native - -build_native: - $(MAKE) -C ./external-libs/bson all - -build_native_debug: - $(MAKE) -C ./external-libs/bson all_debug - -build_native_clang: - $(MAKE) -C ./external-libs/bson clang - -build_native_clang_debug: - $(MAKE) -C ./external-libs/bson clang_debug - -clean_native: - $(MAKE) -C ./external-libs/bson clean - -test: build_native - @echo "\n == Run All tests minus replicaset tests==" - $(NODE) dev/tools/test_all.js --noreplicaset --boot - -test_junit: build_native - @echo "\n == Run All tests minus replicaset tests==" - $(NODE) dev/tools/test_all.js --junit --noreplicaset - -test_nodeunit_pure: - @echo "\n == Execute Test Suite using Pure JS BSON Parser == " - @$(NODEUNIT) test/ test/gridstore test/bson - -test_js: - @$(NODEUNIT) $(TESTS) - -test_nodeunit_replicaset_pure: - @echo "\n == Execute Test Suite using Pure JS BSON Parser == " - @$(NODEUNIT) test/replicaset - -test_nodeunit_native: - @echo "\n == Execute Test Suite using Native BSON Parser == " - @TEST_NATIVE=TRUE $(NODEUNIT) test/ test/gridstore test/bson - -test_nodeunit_replicaset_native: - @echo "\n == Execute Test Suite using Native BSON Parser == " - @TEST_NATIVE=TRUE $(NODEUNIT) test/replicaset - -test_all: build_native - @echo "\n == Run All tests ==" - $(NODE) dev/tools/test_all.js --boot - -test_all_junit: build_native - @echo "\n == Run All tests ==" - $(NODE) dev/tools/test_all.js --junit --boot - -clean: - rm ./external-libs/bson/bson.node - rm -r ./external-libs/bson/build - -generate_docs: - $(NODE) dev/tools/build-docs.js - make --directory=./docs/sphinx-docs --file=Makefile html - -.PHONY: total diff --git a/Readme.md b/Readme.md deleted file mode 100644 index 0008875e597..00000000000 --- a/Readme.md +++ /dev/null @@ -1,408 +0,0 @@ -Install -======== - -To install the most recent release from npm, run: - - npm install mongodb - -That may give you a warning telling you that bugs['web'] should be bugs['url'], it would be safe to ignore it (this has been fixed in the development version) - -To install from the latest from the repository, run:: - - npm install path/to/node-mongodb-native - -Community -======== -Check out the google group [node-mongodb-native](http://groups.google.com/group/node-mongodb-native) for questions/answers from users of the driver. - -Introduction -======== - -This is a node.js driver for MongoDB. It's a port (or close to a port) of the library for ruby at http://github.com/mongodb/mongo-ruby-driver/. - -A simple example of inserting a document. - - var client = new Db('test', new Server("127.0.0.1", 27017, {})), - test = function (err, collection) { - collection.insert({a:2}, function(err, docs) { - - collection.count(function(err, count) { - test.assertEquals(1, count); - }); - - // Locate all the entries using find - collection.find().toArray(function(err, results) { - test.assertEquals(1, results.length); - test.assertTrue(results.a === 2); - - // Let's close the db - client.close(); - }); - }); - }; - - client.open(function(err, p_client) { - client.collection('test_insert', test); - }); - -Data types -======== - -To store and retrieve the non-JSON MongoDb primitives ([ObjectID](http://www.mongodb.org/display/DOCS/Object+IDs), Long, Binary, [Timestamp](http://www.mongodb.org/display/DOCS/Timestamp+data+type), [DBRef](http://www.mongodb.org/display/DOCS/Database+References#DatabaseReferences-DBRef), Code). - -In particular, every document has a unique `_id` which can be almost any type, and by default a 12-byte ObjectID is created. ObjectIDs can be represented as 24-digit hexadecimal strings, but you must convert the string back into an ObjectID before you can use it in the database. For example: - - // Get the objectID type - var ObjectID = require('mongodb').ObjectID; - - var idString = '4e4e1638c85e808431000003'; - collection.findOne({_id: new ObjectID(idString)}, console.log) // ok - collection.findOne({_id: idString}, console.log) // wrong! callback gets undefined - -Here are the constructors the non-Javascript BSON primitive types: - - // Fetch the library - var mongo = require('mongodb'); - // Create new instances of BSON types - new mongo.Long(numberString) - new mongo.ObjectID(hexString) - new mongo.Timestamp() // the actual unique number is generated on insert. - new mongo.DBRef(collectionName, id, dbName) - new mongo.Binary(buffer) // takes a string or Buffer - new mongo.Code(code, [context]) - new mongo.Symbol(string) - new mongo.MinKey() - new mongo.MaxKey() - new mongo.Double(number) // Force double storage - -The C/C++ bson parser/serializer --------- - -From V0.8.0 to V0.9.6.9, the Javascript bson parser was slower than an optional C/C++ bson parser. As of V0.9.6.9+, due to performance improvements in the Javascript parser, the C/C++ parser is deprecated and is not installed by default anymore. - -If you are running a version of this library has the C/C++ parser compiled, to enable the driver to use the C/C++ bson parser pass it the option native_parser:true like below - - // using Deprecated native_parser: - var client = new Db('integration_tests_20', - new Server("127.0.0.1", 27017), - {native_parser:true}); - -The C++ parser uses the js objects both for serialization and deserialization. - -GitHub information -======== - -The source code is available at http://github.com/christkv/node-mongodb-native. -You can either clone the repository or download a tarball of the latest release. - -Once you have the source you can test the driver by running - - $ make test - -in the main directory. You will need to have a mongo instance running on localhost for the integration tests to pass. - -Examples -======== - -For examples look in the examples/ directory. You can execute the examples using node. - - $ cd examples - $ node queries.js - -GridStore -========= - -The GridStore class allows for storage of binary files in mongoDB using the mongoDB defined files and chunks collection definition. - -For more information have a look at [Gridstore](https://github.com/christkv/node-mongodb-native/blob/master/docs/gridfs.md) - -Replicasets -=========== -For more information about how to connect to a replicaset have a look at [Replicasets](https://github.com/christkv/node-mongodb-native/blob/master/docs/replicaset.md) - -Primary Key Factories --------- - -Defining your own primary key factory allows you to generate your own series of id's -(this could f.ex be to use something like ISBN numbers). The generated the id needs to be a 12 byte long "string". - -Simple example below - - // Custom factory (need to provide a 12 byte array); - CustomPKFactory = function() {} - CustomPKFactory.prototype = new Object(); - CustomPKFactory.createPk = function() { - return new ObjectID("aaaaaaaaaaaa"); - } - - var p_client = new Db('integration_tests_20', new Server("127.0.0.1", 27017, {}), {'pk':CustomPKFactory}); - p_client.open(function(err, p_client) { - p_client.dropDatabase(function(err, done) { - p_client.createCollection('test_custom_key', function(err, collection) { - collection.insert({'a':1}, function(err, docs) { - collection.find({'_id':new ObjectID("aaaaaaaaaaaa")}, function(err, cursor) { - cursor.toArray(function(err, items) { - test.assertEquals(1, items.length); - - // Let's close the db - p_client.close(); - }); - }); - }); - }); - }); - }); - -Strict mode --------- - -Each database has an optional strict mode. If it is set then asking for a collection -that does not exist will return an Error object in the callback. Similarly if you -attempt to create a collection that already exists. Strict is provided for convenience. - - var error_client = new Db('integration_tests_', new Server("127.0.0.1", 27017, {auto_reconnect: false}), {strict:true}); - test.assertEquals(true, error_client.strict); - - error_client.open(function(err, error_client) { - error_client.collection('does-not-exist', function(err, collection) { - test.assertTrue(err instanceof Error); - test.assertEquals("Collection does-not-exist does not exist. Currently in strict mode.", err.message); - }); - - error_client.createCollection('test_strict_access_collection', function(err, collection) { - error_client.collection('test_strict_access_collection', function(err, collection) { - test.assertTrue(collection instanceof Collection); - // Let's close the db - error_client.close(); - }); - }); - }); - -Documentation -======== - -If this document doesn't answer your questions, see the source of -[Collection](https://github.com/christkv/node-mongodb-native/blob/master/lib/mongodb/collection.js) -or [Cursor](https://github.com/christkv/node-mongodb-native/blob/master/lib/mongodb/cursor.js), -or the documentation at MongoDB for query and update formats. - -Find --------- - -The find method is actually a factory method to create -Cursor objects. A Cursor lazily uses the connection the first time -you call `nextObject`, `each`, or `toArray`. - -The basic operation on a cursor is the `nextObject` method -that fetches the next matching document from the database. The convenience -methods `each` and `toArray` call `nextObject` until the cursor is exhausted. - -Signatures: - - var cursor = collection.find(query, [fields], options); - cursor.sort(fields).limit(n).skip(m). - - cursor.nextObject(function(err, doc) {}); - cursor.each(function(err, doc) {}); - cursor.toArray(function(err, docs) {}); - - cursor.rewind() // reset the cursor to its initial state. - -Useful chainable methods of cursor. These can optionally be options of `find` instead of method calls: - -* `.limit(n).skip(m)` to control paging. -* `.sort(fields)` Order by the given fields. There are several equivalent syntaxes: - * `.sort({field1: -1, field2: 1})` descending by field1, then ascending by field2. - * `.sort([['field1', 'desc'], ['field2', 'asc']])` same as above - * `.sort([['field1', 'desc'], 'field2'])` same as above - * `.sort('field1')` ascending by field1 - -Other options of `find`: - -* `fields` the fields to fetch (to avoid transferring the entire document) -* `tailable` if true, makes the cursor [tailable](http://www.mongodb.org/display/DOCS/Tailable+Cursors). -* `batchSize` The number of the subset of results to request the database -to return for every request. This should initially be greater than 1 otherwise -the database will automatically close the cursor. The batch size can be set to 1 -with `batchSize(n, function(err){})` after performing the initial query to the database. -* `hint` See [Optimization: hint](http://www.mongodb.org/display/DOCS/Optimization#Optimization-Hint). -* `explain` turns this into an explain query. You can also call -`explain()` on any cursor to fetch the explanation. -* `snapshot` prevents documents that are updated while the query is active -from being returned multiple times. See more -[details about query snapshots](http://www.mongodb.org/display/DOCS/How+to+do+Snapshotted+Queries+in+the+Mongo+Database). -* `timeout` if false, asks MongoDb not to time out this cursor after an -inactivity period. - - -For information on how to create queries, see the -[MongoDB section on querying](http://www.mongodb.org/display/DOCS/Querying). - - var mongodb = require('mongodb'); - var server = new mongodb.Server("127.0.0.1", 27017, {}); - new mongodb.Db('test', server, {}).open(function (error, client) { - if (error) throw error; - var collection = new mongodb.Collection(client, 'test_collection'); - collection.find({}, {limit:10}).toArray(function(err, docs) { - console.dir(docs); - }); - }); - -Insert --------- - -Signature: - - collection.insert(docs, options, [callback]); - -where `docs` can be a single document or an array of documents. - -Useful options: - -* `safe:true` Should always set if you have a callback. - -See also: [MongoDB docs for insert](http://www.mongodb.org/display/DOCS/Inserting). - - var mongodb = require('mongodb'); - var server = new mongodb.Server("127.0.0.1", 27017, {}); - new mongodb.Db('test', server, {}).open(function (error, client) { - if (error) throw error; - var collection = new mongodb.Collection(client, 'test_collection'); - collection.insert({hello: 'world'}, {safe:true}, - function(err, objects) { - if (err) console.warn(err.message); - if (err && err.message.indexOf('E11000 ') !== -1) { - // this _id was already inserted in the database - } - }); - }); - -Note that there's no reason to pass a callback to the insert or update commands -unless you use the `safe:true` option. If you don't specify `safe:true`, then -your callback will be called immediately. - -Update; update and insert (upsert) --------- - -The update operation will update the first document that matches your query -(or all documents that match if you use `multi:true`). -If `safe:true`, `upsert` is not set, and no documents match, your callback -will be given an error. - -See the [MongoDB docs](http://www.mongodb.org/display/DOCS/Updating) for -the modifier (`$inc`, `$set`, `$push`, etc.) formats. - -Signature: - - collection.update(criteria, objNew, options, [callback]); - -Useful options: - -* `safe:true` Should always set if you have a callback. -* `multi:true` If set, all matching documents are updated, not just the first. -* `upsert:true` Atomically inserts the document if no documents matched. - -Example for `update`: - - var mongodb = require('mongodb'); - var server = new mongodb.Server("127.0.0.1", 27017, {}); - new mongodb.Db('test', server, {}).open(function (error, client) { - if (error) throw error; - var collection = new mongodb.Collection(client, 'test_collection'); - collection.update({hi: 'here'}, {$set: {hi: 'there'}}, {safe:true}, - function(err) { - if (err) console.warn(err.message); - else console.log('successfully updated'); - }); - }); - -Find and modify --------- - -`findAndModify` is like `update`, but it also gives the updated document to -your callback. But there are a few key differences between findAndModify and -update: - - 1. The signatures differ. - 2. You can only findAndModify a single item, not multiple items. - -Signature: - - collection.findAndModify(query, sort, update, options, callback) - -The sort parameter is used to specify which object to operate on, if more than -one document matches. It takes the same format as the cursor sort (see -Connection.find above). - -See the -[MongoDB docs for findAndModify](http://www.mongodb.org/display/DOCS/findAndModify+Command) -for more details. - -Useful options: - -* `remove:true` set to a true to remove the object before returning -* `new:true` set to true if you want to return the modified object rather than the original. Ignored for remove. -* `upsert:true` Atomically inserts the document if no documents matched. - -Example for `findAndModify`: - - var mongodb = require('mongodb'); - var server = new mongodb.Server("127.0.0.1", 27017, {}); - new mongodb.Db('test', server, {}).open(function (error, client) { - if (error) throw error; - var collection = new mongodb.Collection(client, 'test_collection'); - collection.findAndModify({hello: 'world'}, [['_id','asc']], {$set: {hi: 'there'}}, {}, - function(err, object) { - if (err) console.warn(err.message); - else console.dir(object); // undefined if no matching object exists. - }); - }); - -Save --------- - -The `save` method is a shorthand for upsert if the document contains an -`_id`, or an insert if there is no `_id`. - -Sponsors -======== -Just as Felix Geisendörfer I'm also working on the driver for my own startup and this driver is a big project that also benefits other companies who are using MongoDB. - -If your company could benefit from a even better-engineered node.js mongodb driver I would appreciate any type of sponsorship you may be able to provide. All the sponsors will get a lifetime display in this readme, priority support and help on problems and votes on the roadmap decisions for the driver. If you are interested contact me on [christkv AT g m a i l.com](mailto:christkv@gmail.com) for details. - -And I'm very thankful for code contributions. If you are interested in working on features please contact me so we can discuss API design and testing. - -Release Notes -============= - -See HISTORY - -Credits -======== - -1. [10gen](http://github.com/mongodb/mongo-ruby-driver/) -2. [Google Closure Library](http://code.google.com/closure/library/) -3. [Jonas Raoni Soares Silva](http://jsfromhell.com/classes/binary-parser) - -Contributors -============= - -Aaron Heckmann, Christoph Pojer, Pau Ramon Revilla, Nathan White, Emmerman, Seth LaForge, Boris Filipov, Stefan Schärmeli, Tedde Lundgren, renctan, Sergey Ukustov, Ciaran Jessup, kuno, srimonti, Erik Abele, Pratik Daga, Slobodan Utvic, Kristina Chodorow, Yonathan Randolph, Brian Noguchi, Sam Epstein, James Harrison Fisher, Vladimir Dronnikov, Ben Hockey, Henrik Johansson, Simon Weare, Alex Gorbatchev, Shimon Doodkin, Kyle Mueller, Eran Hammer-Lahav, Marcin Ciszak, François de Metz, Vinay Pulim, nstielau, Adam Wiggins, entrinzikyl, Jeremy Selier, Ian Millington, Public Keating, andrewjstone, Christopher Stott, Corey Jewett, brettkiefer, Rob Holland, Senmiao Liu, heroic, gitfy - -License -======== - - Copyright 2009 - 2010 Christian Amor Kvalheim. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/TODO b/TODO deleted file mode 100644 index 1ed86e8de1f..00000000000 --- a/TODO +++ /dev/null @@ -1,25 +0,0 @@ -TODO for jan 4 - 2011 -- Chase down potential memory leak in findAndModify -- Check compatibility for gridFS between python and js -- Ensure Gridfs speed is comparable to other solutions -- Map out python replicaset driver functionality - -ACCOMPLISHED jan 4 - 2011 -+ Chase down potential memory leak in findAndModify -+ Check compatibility for gridFS between python and js -+ Ensure Gridfs speed is comparable to other solutions - -0.9.7.4-dev -- Amortize documents (only deserialize when accessed) -- Mongo tests to handle special Mongos situations -- If a secondary server disappears don't kill the driver connection -- Check for new servers coming online (a new secondary server etc) -- http://api.mongodb.org/python/current/api/pymongo/index.html#pymongo.ReadPreference - ---------------------------------------------------------------------------------------------* Allow secondary read with no master -* Add lowest response time selection of read server for replicaset using a selectable strategy. First one being using the ping command response time -* Allow execution of multiple commands against the same server connection when having a connection pool -* Implement tag support for replicasets -* Change bson c++ parser to use js objects instead of native c++ objects -* Whole stack benchmark with profiling to locate where the driver spends time -* Change bson c++ parser to be stackless to look at performance difference \ No newline at end of file diff --git a/docs/sphinx-docs/source/static/active.png b/_images/active.png similarity index 100% rename from docs/sphinx-docs/source/static/active.png rename to _images/active.png diff --git a/docs/sphinx-docs/source/static/nonactive.png b/_images/nonactive.png similarity index 100% rename from docs/sphinx-docs/source/static/nonactive.png rename to _images/nonactive.png diff --git a/_sources/api-articles/index.txt b/_sources/api-articles/index.txt new file mode 100644 index 00000000000..6509b1aaf72 --- /dev/null +++ b/_sources/api-articles/index.txt @@ -0,0 +1,9 @@ +================== +Articles +================== + +.. toctree:: + :maxdepth: 2 + + nodekoarticle1 + nodekoarticle2 diff --git a/_sources/api-articles/nodekoarticle1.txt b/_sources/api-articles/nodekoarticle1.txt new file mode 100644 index 00000000000..915b318e4ba --- /dev/null +++ b/_sources/api-articles/nodekoarticle1.txt @@ -0,0 +1,456 @@ +================================ +A Basic introduction to Mongo DB +================================ + + Mongo DB has rapidly grown to become a popular database for web applications and is a perfect fit for Node.JS applications, letting you write Javascript for the client, backend and database layer. Its schemaless nature is a better match to our constantly evolving data structures in web applications, and the integrated support for location queries is a bonus that's hard to ignore. Throw in Replica Sets for scaling, and we're looking at really nice platform to grow your storage needs now and in the future. + + + + Now to shamelessly plug my driver. It can be downloaded via npm, or fetched from the github repository. To install via npm, do the following: + + + + ``npm install mongodb`` + + + + or go fetch it from github at `https://github.com/christkv/node-mongodb-native `_ + + + + Once this business is taken care of, let's move through the types available for the driver and then how to connect to your Mongo DB instance before facing the usage of some CRUD operations. + + +------------------- +Mongo DB data types +------------------- + + So there is an important thing to keep in mind when working with Mongo DB, and that is the slight mapping difference between types Mongo DB supports and native Javascript data types. Let's have a look at the types supported out of the box and then how types are promoted by the driver to fit as close to native Javascript types as possible. + + + * **Float** is a 8 byte and is directly convertible to the Javascript type Number + * **Double class** a special class representing a float value, this is especially useful when using capped collections where you need to ensure your values are always floats. + * **Integers** is a bit trickier due to the fact that Javascript represents all Numbers as 64 bit floats meaning that the maximum integer value is at a 53 bit. Mongo has two types for integers, a 32 bit and a 64 bit. The driver will try to fit the value into 32 bits if it can and promote it to 64 bits if it has to. Similarly it will deserialize attempting to fit it into 53 bits if it can. If it cannot it will return an instance of **Long** to avoid loosing precession. + * **Long class** a special class that let's you store 64 bit integers and also let's you operate on the 64 bits integers. + * **Date** maps directly to a Javascript Date + * **RegExp** maps directly to a Javascript RegExp + * **String** maps directly to a Javascript String (encoded in utf8) + * **Binary class** a special class that let's you store data in Mongo DB + * **Code class** a special class that let's you store javascript functions in Mongo DB, can also provide a scope to run the method in + * **ObjectID class** a special class that holds a MongoDB document identifier (the equivalent to a Primary key) + * **DbRef class** a special class that let's you include a reference in a document pointing to another object + * **Symbol class** a special class that let's you specify a symbol, not really relevant for javascript but for languages that supports the concept of symbols. + + + + As we see the number type can be a little tricky due to the way integers are implemented in Javascript. The latest driver will do correct conversion up to 53 bit's of complexity. If you need to handle big integers the recommendation is to use the Long class to operate on the numbers. + + +--------------------------------------- +Getting that connection to the database +--------------------------------------- + + Let's get around to setting up a connection with the Mongo DB database. Jumping straight into the code let's do direct connection and then look at the code. + + + .. code-block:: javascript + + // Retrieve + var MongoClient = require('mongodb').MongoClient; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(!err) { + console.log("We are connected"); + } + }); + + + + Let's have a quick look at how the connection code works. The **Db.connect** +method let's use use a uri to connect to the Mongo database, where + **localhost:27017** is the server host and port and **exampleDb** the db +we wish to connect to. After the url notice the hash containing the + **auto_reconnect** key. Auto reconnect tells the driver to retry sending +a command to the server if there is a failure during it's execution. + + + + Another useful option you can pass in is + + + + **poolSize** , this allows you to control how many tcp connections are +opened in parallel. The default value for this is 5 but you can set it +as high as you want. The driver will use a round-robin strategy to +dispatch and read from the tcp connection. + + + + We are up and running with a connection to the database. Let's move on +and look at what collections are and how they work. + + +------------------------ +Mongo DB and Collections +------------------------ + + Collections are the equivalent of tables in traditional databases and contain all your documents. A database can have many collections. So how do we go about defining and using collections. Well there are a couple of methods that we can use. Let's jump straight into code and then look at the code. + + + + **the requires and and other initializing stuff omitted for brevity** + + + .. code-block:: javascript + + // Retrieve + var MongoClient = require('mongodb').MongoClient; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(err) { return console.dir(err); } + + db.collection('test', function(err, collection) {}); + + db.collection('test', {w:1}, function(err, collection) {}); + + db.createCollection('test', function(err, collection) {}); + + db.createCollection('test', {w:1}, function(err, collection) {}); + + }); + + + + Three different ways of creating a collection object but slightly different in behavior. Let's go through them and see what they do + + + .. code-block:: javascript + + db.collection('test', function(err, collection) {}); + + + + This function will not actually create a collection on the database until you actually insert the first document. + + + .. code-block:: javascript + + db.collection('test', {w:1}, function(err, collection) {}); + + + + Notice the **{w:1}** option. This option will make the driver check if the collection exists and issue an error if it does not. + + + .. code-block:: javascript + + db.createCollection('test', function(err, collection) {}); + + + + This command will create the collection on the Mongo DB database before returning the collection object. If the collection already exists it will ignore the creation of the collection. + + + .. code-block:: javascript + + db.createCollection('test', {w:1}, function(err, collection) {}); + + + + The **{w:1}** option will make the method return an error if the collection already exists. + + + + With an open db connection and a collection defined we are ready to do some CRUD operation on the data. + + +----------------------- +And then there was CRUD +----------------------- + + So let's get dirty with the basic operations for Mongo DB. The Mongo DB wire protocol is built around 4 main operations **insert/update/remove/query** . Most operations on the database are actually queries with special json objects defining the operation on the database. But I'm getting ahead of myself. Let's go back and look at insert first and do it with some code. + + + + **the requires and and other initializing stuff omitted for brevity** + + + .. code-block:: javascript + + // Retrieve + var MongoClient = require('mongodb').MongoClient; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(err) { return console.dir(err); } + + var collection = db.collection('test'); + var doc1 = {'hello':'doc1'}; + var doc2 = {'hello':'doc2'}; + var lotsOfDocs = [{'hello':'doc3'}, {'hello':'doc4'}]; + + collection.insert(doc1); + + collection.insert(doc2, {w:1}, function(err, result) {}); + + collection.insert(lotsOfDocs, {w:1}, function(err, result) {}); + + }); + + + + A couple of variations on the theme of inserting a document as we can see. To understand why it's important to understand how Mongo DB works during inserts of documents. + + + + Mongo DB has asynchronous **insert/update/remove** operations. This means that when you issue an **insert** operation its a fire and forget operation where the database does not reply with the status of the insert operation. To retrieve the status of the operation you have to issue a query to retrieve the last error status of the connection. To make it simpler to the developer the driver implements the **{w:1}** options so that this is done automatically when inserting the document. **{w:1}** becomes especially important when you do **update** or **remove** as otherwise it's not possible to determine the amount of documents modified or removed. + + + + Now let's go through the different types of inserts shown in the code above. + + + .. code-block:: javascript + + collection.insert(doc1); + + + + Taking advantage of the async behavior and not needing confirmation about the persisting of the data to Mongo DB we just fire off the insert (we are doing live analytics, loosing a couple of records does not matter). + + + .. code-block:: javascript + + collection.insert(doc2, {w:1}, function(err, result) {}); + + + + That document needs to stick. Using the **{w:1}** option ensure you get the error back if the document fails to insert correctly. + + + .. code-block:: javascript + + collection.insert(lotsOfDocs, {w:1}, function(err, result) {}); + + + + A batch insert of document with any errors being reported. This is much more efficient if you need to insert large batches of documents as you incur a lot less overhead. + + + + Right that's the basics of insert's ironed out. We got some documents in there but want to update them as we need to change the content of a field. Let's have a look at a simple example and then we will dive into how Mongo DB updates work and how to do them efficiently. + + + + **the requires and and other initializing stuff omitted for brevity** + + + .. code-block:: javascript + + // Retrieve + var MongoClient = require('mongodb').MongoClient; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(err) { return console.dir(err); } + + var collection = db.collection('test'); + var doc = {mykey:1, fieldtoupdate:1}; + + collection.insert(doc, {w:1}, function(err, result) { + collection.update({mykey:1}, {$set:{fieldtoupdate:2}}, {w:1}, function(err, result) {}); + }); + + var doc2 = {mykey:2, docs:[{doc1:1}]}; + + collection.insert(doc2, {w:1}, function(err, result) { + collection.update({mykey:2}, {$push:{docs:{doc2:1}}}, {w:1}, function(err, result) {}); + }); + }); + + + + Alright before we look at the code we want to understand how document updates work and how to do the efficiently. The most basic and less efficient way is to replace the whole document, this is not really the way to go if you want to change just a field in your document. Luckily Mongo DB provides a whole set of operations that let you modify just pieces of the document `Atomic operations documentation `_ . Basically outlined below. + + + * $inc - increment a particular value by a certain amount + * $set - set a particular value + * $unset - delete a particular field (v1.3+) + * $push - append a value to an array + * $pushAll - append several values to an array + * $addToSet - adds value to the array only if its not in the array already + * $pop - removes the last element in an array + * $pull - remove a value(s) from an existing array + * $pullAll - remove several value(s) from an existing array + * $rename - renames the field + * $bit - bitwise operations + + + + Now that the operations are outline let's dig into the specific cases show in the code example. + + + .. code-block:: javascript + + collection.update({mykey:1}, {$set:{fieldtoupdate:2}}, {w:1}, function(err, result) {}); + + + + Right so this update will look for the document that has a field **mykey** equal to **1** and apply an update to the field **fieldtoupdate** setting the value to **2** . Since we are using the **{w:1}** option the result parameter in the callback will return the value **1** indicating that 1 document was modified by the update statement. + + + .. code-block:: javascript + + collection.update({mykey:2}, {$push:{docs:{doc2:1}}}, {w:1}, function(err, result) {}); + + + + This updates adds another document to the field **docs** in the document identified by **{mykey:2}** using the atomic operation **$push** . This allows you to modify keep such structures as queues in Mongo DB. + + + + Let's have a look at the remove operation for the driver. As before let's start with a piece of code. + + + + **the requires and and other initializing stuff omitted for brevity** + + + .. code-block:: javascript + + // Retrieve + var MongoClient = require('mongodb').MongoClient; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(err) { return console.dir(err); } + + var collection = db.collection('test'); + var docs = [{mykey:1}, {mykey:2}, {mykey:3}]; + + collection.insert(docs, {w:1}, function(err, result) { + + collection.remove({mykey:1}); + + collection.remove({mykey:2}, {w:1}, function(err, result) {}); + + collection.remove(); + }); + }); + + + + Let's examine the 3 remove variants and what they do. + + + .. code-block:: javascript + + collection.remove({mykey:1}); + + + + This leverages the fact that Mongo DB is asynchronous and that it does not return a result for **insert/update/remove** to allow for **synchronous** style execution. This particular remove query will remove the document where **mykey** equals **1** . + + + .. code-block:: javascript + + collection.remove({mykey:2}, {w:1}, function(err, result) {}); + + + + This remove statement removes the document where **mykey** equals **2** but since we are using **{w:1}** it will back to Mongo DB to get the status of the remove operation and return the number of documents removed in the result variable. + + + .. code-block:: javascript + + collection.remove(); + + + + This last one will remove all documents in the collection. + + +------------- +Time to Query +------------- + + Queries is of course a fundamental part of interacting with a database and Mongo DB is no exception. Fortunately for us it has a rich query interface with cursors and close to SQL concepts for slicing and dicing your datasets. To build queries we have lots of operators to choose from `Mongo DB advanced queries `_ . There are literarily tons of ways to search and ways to limit the query. Let's look at some simple code for dealing with queries in different ways. + + + + **the requires and and other initializing stuff omitted for brevity** + + + .. code-block:: javascript + + // Retrieve + var MongoClient = require('mongodb').MongoClient; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(err) { return console.dir(err); } + + var collection = db.collection('test'); + var docs = [{mykey:1}, {mykey:2}, {mykey:3}]; + + collection.insert(docs, {w:1}, function(err, result) { + + collection.find().toArray(function(err, items) {}); + + var stream = collection.find({mykey:{$ne:2}}).stream(); + stream.on("data", function(item) {}); + stream.on("end", function() {}); + + collection.findOne({mykey:1}, function(err, item) {}); + + }); + }); + + + + Before we start picking apart the code there is one thing that needs to be understood, the **find** method does not execute the actual query. It builds an instance of **Cursor** that you then use to retrieve the data. This lets you manage how you retrieve the data from Mongo DB and keeps state about your current Cursor state on Mongo DB. Now let's pick apart the queries we have here and look at what they do. + + + .. code-block:: javascript + + collection.find().toArray(function(err, items) {}); + + + + This query will fetch all the document in the collection and return them as an array of items. Be careful with the function **toArray** as it might cause a lot of memory usage as it will instantiate all the document into memory before returning the final array of items. If you have a big resultset you could run into memory issues. + + + .. code-block:: javascript + + var stream = collection.find({mykey:{$ne:2}}).stream(); + stream.on("data", function(item) {}); + stream.on("end", function() {}); + + + + This is the preferred way if you have to retrieve a lot of data for streaming, as data is deserialized a **data** event is emitted. This keeps the resident memory usage low as the documents are streamed to you. Very useful if you are pushing documents out via websockets or some other streaming socket protocol. Once there is no more document the driver will emit the **end** event to notify the application that it's done. + + + .. code-block:: javascript + + collection.findOne({mykey:1}, function(err, item) {}); + + + + This is special supported function to retrieve just one specific document bypassing the need for a cursor object. + + + + That's pretty much it for the quick intro on how to use the database. I have also included a list of links to where to go to find more information and also a sample crude location application I wrote using express JS and mongo DB. + + +--------------- +Links and stuff +--------------- + * `The driver examples, good starting point for basic usage `_ + * `All the integration tests, they have tons of different usage cases `_ + * `The Mongo DB wiki pages such as the advanced query link `_ + * `A silly simple location based application using Express JS and Mongo DB `_ + diff --git a/_sources/api-articles/nodekoarticle2.txt b/_sources/api-articles/nodekoarticle2.txt new file mode 100644 index 00000000000..454cba1683c --- /dev/null +++ b/_sources/api-articles/nodekoarticle2.txt @@ -0,0 +1,326 @@ +============================================= +A primer for GridFS using the Mongo DB driver +============================================= + + In the first tutorial we targeted general usage of the database. But Mongo DB is much more than this. One of the additional very useful features is to act as a file storage system. This is accomplish in Mongo by having a file collection and a chunks collection where each document in the chunks collection makes up a **Block** of the file. In this tutorial we will look at how to use the GridFS functionality and what functions are available. + + +---------------- +A simple example +---------------- + + Let's dive straight into a simple example on how to write a file to the grid using the simplified Grid class. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient, + Grid = mongo.Grid; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(err) return console.dir(err); + + var grid = new Grid(db, 'fs'); + var buffer = new Buffer("Hello world"); + grid.put(buffer, {metadata:{category:'text'}, content_type: 'text'}, function(err, fileInfo) { + if(!err) { + console.log("Finished writing file to Mongo"); + } + }); + }); + + + + All right let's dissect the example. The first thing you'll notice is the statement + + + .. code-block:: javascript + + var grid = new Grid(db, 'fs'); + + + + Since GridFS is actually a special structure stored as collections you'll notice that we are using the db connection that we used in the previous tutorial to operate on collections and documents. The second parameter **'fs'** allows you to change the collections you want to store the data in. In this example the collections would be **fs_files** and **fs_chunks** . + + + + Having a live grid instance we now go ahead and create some test data stored in a Buffer instance, although you can pass in a string instead. We then write our data to disk. + + + .. code-block:: javascript + + var buffer = new Buffer("Hello world"); + grid.put(buffer, {metadata:{category:'text'}, content_type: 'text'}, function(err, fileInfo) { + if(!err) { + console.log("Finished writing file to Mongo"); + } + }); + + + + Let's deconstruct the call we just made. The **put** call will write the data you passed in as one or more chunks. The second parameter is a hash of options for the Grid class. In this case we wish to annotate the file we are writing to Mongo DB with some metadata and also specify a content type. Each file entry in GridFS has support for metadata documents which might be very useful if you are for example storing images in you Mongo DB and need to store all the data associated with the image. + + + + One important thing is to take not that the put method return a document containing a **_id** , this is an **ObjectID** identifier that you'll need to use if you wish to retrieve the file contents later. + + + + Right so we have written out first file, let's look at the other two simple functions supported by the Grid class. + + + + **the requires and and other initializing stuff omitted for brevity** + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient, + Grid = mongo.Grid; + + // Connect to the db + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + if(err) return console.dir(err); + + var grid = new Grid(db, 'fs'); + var buffer = new Buffer("Hello world"); + grid.put.(buffer, {metadata:{category:'text'}, content_type: 'text'}, function(err, fileInfo) { + grid.get(fileInfo._id, function(err, data) { + console.log("Retrieved data: " + data.toString()); + grid.delete(fileInfo._id, function(err, result) { + }); + }); + }); + }); + + + + Let's have a look at the two operations **get** and **delete** + + + .. code-block:: javascript + + grid.get(fileInfo._id, function(err, data) {}); + + + + The **get** method takes an ObjectID as the first argument and as we can se in the code we are using the one provided in **fileInfo._id** . This will read all the chunks for the file and return it as a Buffer object. + + + + The **delete** method also takes an ObjectID as the first argument but will delete the file entry and the chunks associated with the file in Mongo. + + + + This **api** is the simplest one you can use to interact with GridFS but it's not suitable for all kinds of files. One of it's main drawbacks is you are trying to write large files to Mongo. This api will require you to read the entire file into memory when writing and reading from Mongo which most likely is not feasible if you have to store large files like Video or RAW Pictures. Luckily this is not the only way to work with GridFS. That's not to say this api is not useful. If you are storing tons of small files the memory usage vs the simplicity might be a worthwhile tradeoff. Let's dive into some of the more advanced ways of using GridFS. + + +----------------------------------------------- +Advanced GridFS or how not to run out of memory +----------------------------------------------- + + As we just said controlling memory consumption for you file writing and reading is key if you want to scale up the application. That means not reading in entire files before either writing or reading from Mongo DB. The good news is, it's supported. Let's throw some code out there straight away and look at how to do chunk sized streaming writes and reads. + + + + **the requires and and other initializing stuff omitted for brevity** + + + .. code-block:: javascript + + var fileId = new ObjectID(); + var gridStore = new GridStore(db, fileId, "w", {root:'fs'}); + gridStore.chunkSize = 1024 * 256; + + gridStore.open(function(err, gridStore) { + Step( + function writeData() { + var group = this.group(); + + for(var i = 0; i < 1000000; i += 5000) { + gridStore.write(new Buffer(5000), group()); + } + }, + + function doneWithWrite() { + gridStore.close(function(err, result) { + console.log("File has been written to GridFS"); + }); + } + ) + }); + + + + Before we jump into picking apart the code let's look at + + + .. code-block:: javascript + + var gridStore = new GridStore(db, fileId, "w", {root:'fs'}); + + + + Notice the parameter **"w"** this is important. It tells the driver that you are planning to write a new file. The parameters you can use here are. + + + * **"r"** - read only. This is the default mode + * **"w"** - write in truncate mode. Existing data will be overwritten + * **"w+"** - write in edit mode + + + + Right so there is a fair bit to digest here. We are simulating writing a file that's about 1MB big to Mongo DB using GridFS. To do this we are writing it in chunks of 5000 bytes. So to not live with a difficult callback setup we are using the Step library with its' group functionality to ensure that we are notified when all of the writes are done. After all the writes are done Step will invoke the next function (or step) called **doneWithWrite** where we finish up by closing the file that flushes out any remaining data to Mongo DB and updates the file document. + + + + As we are doing it in chunks of 5000 bytes we will notice that memory consumption is low. This is the trick to write large files to GridFS. In pieces. Also notice this line. + + + .. code-block:: javascript + + gridStore.chunkSize = 1024 * 256; + + + + This allows you to adjust how big the chunks are in bytes that Mongo DB will write. You can tune the Chunk Size to your needs. If you need to write large files to GridFS it might be worthwhile to trade of memory for CPU by setting a larger Chunk Size. + + + + Now let's see how the actual streaming read works. + + + .. code-block:: javascript + + new GridStore(db, fileId, "r").open(function(err, gridStore) { + var stream = gridStore.stream(true); + + stream.on("data", function(chunk) { + console.log("Chunk of file data"); + }); + + stream.on("end", function() { + console.log("EOF of file"); + }); + + stream.on("close", function() { + console.log("Finished reading the file"); + }); + }); + + + + Right let's have a quick lock at the streaming functionality supplied with the driver **(make sure you are using 0.9.6-12 or higher as there is a bug fix for custom chunksizes that you need)** + + + .. code-block:: javascript + + var stream = gridStore.stream(true); + + + + This opens a stream to our file, you can pass in a boolean parameter to tell the driver to close the file automatically when it reaches the end. This will fire the **close** event automatically. Otherwise you'll have to handle cleanup when you receive the **end** event. Let's have a look at the events supported. + + + .. code-block:: javascript + + stream.on("data", function(chunk) { + console.log("Chunk of file data"); + }); + + + + The **data** event is called for each chunk read. This means that it's by the chunk size of the written file. So if you file is 1MB big and the file has chunkSize 256K then you'll get 4 calls to the event handler for **data** . The chunk returned is a **Buffer** object. + + + .. code-block:: javascript + + stream.on("end", function() { + console.log("EOF of file"); + }); + + + + The **end** event is called when the driver reaches the end of data for the file. + + + .. code-block:: javascript + + stream.on("close", function() { + console.log("Finished reading the file"); + }); + + + + The **close** event is only called if you the **autoclose** parameter on the **gridStore.stream** method as shown above. If it's false or not set handle cleanup of the streaming in the **end** event handler. + + + + Right that's it for writing to GridFS in an efficient Manner. I'll outline some other useful function on the Gridstore object. + + +-------------------------------------------- +Other useful methods on the Gridstore object +-------------------------------------------- + + There are some other methods that are useful + + + .. code-block:: javascript + + gridStore.writeFile(filename/filedescriptor, function(err fileInfo) {}); + + + + **writeFile** takes either a file name or a file descriptor and writes it to GridFS. It does this in chunks to ensure the Eventloop is not tied up. + + + .. code-block:: javascript + + gridStore.read(length, function(err, data) {}); + + + + **read/readBuffer** lets you read a #length number of bytes from the current position in the file. + + + .. code-block:: javascript + + gridStore.seek(position, seekLocation, function(err, gridStore) {}); + + + + **seek** lets you navigate the file to read from different positions inside the chunks. The seekLocation allows you to specify how to seek. It can be one of three values. + + + * GridStore.IO *SEEK* SET Seek mode where the given length is absolute + * GridStore.IO *SEEK* CUR Seek mode where the given length is an offset to the current read/write head + * GridStore.IO *SEEK* END Seek mode where the given length is an offset to the end of the file + + GridStore.list(dbInstance, collectionName, {id:true}, function(err, files) {}) + + + + **list** lists all the files in the collection in GridFS. If you have a lot of files the current version will not work very well as it's getting all files into memory first. You can have it return either the filenames or the ids for the files using option. + + + .. code-block:: javascript + + gridStore.unlink(function(err, result) {}); + + + + **unlink** deletes the file from Mongo DB, that's to say all the file info and all the chunks. + + + + This should be plenty to get you on your way building your first GridFS based application. As in the previous article the following links might be useful for you. Good luck and have fun. + + +--------------- +Links and stuff +--------------- + * `The driver examples, good starting point for basic usage `_ + * `All the integration tests, they have tons of different usage cases `_ + diff --git a/_sources/api-bson-generated/binary.txt b/_sources/api-bson-generated/binary.txt new file mode 100644 index 00000000000..7509eb9ce7d --- /dev/null +++ b/_sources/api-bson-generated/binary.txt @@ -0,0 +1,104 @@ +======== +Binary() +======== + +------------------ +Constructor +------------------ + + +A class representation of the BSON Binary type. + + + .. js:class:: Binary() + + :param buffer buffer: a buffer object containing the binary data. + :param number [subType]: the option binary type. + :returns: grid + + +Sub types + - **BSON.BSON_BINARY_SUBTYPE_DEFAULT**, default BSON type. + - **BSON.BSON_BINARY_SUBTYPE_FUNCTION**, BSON function type. + - **BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY**, BSON byte array type. + - **BSON.BSON_BINARY_SUBTYPE_UUID**, BSON uuid type. + - **BSON.BSON_BINARY_SUBTYPE_MD5**, BSON md5 type. + - **BSON.BSON_BINARY_SUBTYPE_USER_DEFINED**, BSON user defined type. + + +------------------ +Constants +------------------ + +.. csv-table:: + :header: "Constant Name", "Value", "Description" + :widths: 15, 10, 30 + + "Binary.SUBTYPE_DEFAULT", "0", "Default BSON type" + "Binary.SUBTYPE_FUNCTION", "1", "Function BSON type" + "Binary.SUBTYPE_BYTE_ARRAY", "2", "Byte Array BSON type" + "Binary.SUBTYPE_UUID_OLD", "3", "OLD UUID BSON type" + "Binary.SUBTYPE_UUID", "4", "UUID BSON type" + "Binary.SUBTYPE_MD5", "5", "MD5 BSON type" + "Binary.SUBTYPE_USER_DEFINED", "128", "User BSON type" + +--- +put +--- + + +Updates this binary with byte_value. + +.. js:function:: put(byte_value) + + :param character byte_value: a single byte we wish to write. + +----- +write +----- + + +Writes a buffer or string to the binary. + +.. js:function:: write(string, offset) + + :param buffer string: a string or buffer to be written to the Binary BSON object. + :param number offset: specify the binary of where to write the content. + +---- +read +---- + + +Reads **length** bytes starting at **position**. + +.. js:function:: read(position, length) + + :param number position: read from the given position in the Binary. + :param number length: the number of bytes to read. + :returns: buffer + + +----- +value +----- + + +Returns the value of this binary as a string. + +.. js:function:: value() + + :returns: string + + +------ +length +------ + + +Length. + +.. js:function:: length() + + :returns: number the length of the binary. + diff --git a/_sources/api-bson-generated/bson.txt b/_sources/api-bson-generated/bson.txt new file mode 100644 index 00000000000..345102d4a3a --- /dev/null +++ b/_sources/api-bson-generated/bson.txt @@ -0,0 +1,238 @@ +====== +BSON() +====== + +------------------ +Constructor +------------------ + + +Create a new BSON instance + + + .. js:class:: BSON() + + :returns: bson instance of BSON Parser. + + + +------------------ +Constants +------------------ + +.. csv-table:: + :header: "Constant Name", "Value", "Description" + :widths: 15, 10, 30 + + "BSON.BSON_DATA_NUMBER", "1", "Number BSON Type" + "BSON.BSON_DATA_STRING", "2", "String BSON Type" + "BSON.BSON_DATA_OBJECT", "3", "Object BSON Type" + "BSON.BSON_DATA_ARRAY", "4", "Array BSON Type" + "BSON.BSON_DATA_BINARY", "5", "Binary BSON Type" + "BSON.BSON_DATA_OID", "7", "ObjectID BSON Type" + "BSON.BSON_DATA_BOOLEAN", "8", "Boolean BSON Type" + "BSON.BSON_DATA_DATE", "9", "Date BSON Type" + "BSON.BSON_DATA_NULL", "10", "null BSON Type" + "BSON.BSON_DATA_REGEXP", "11", "RegExp BSON Type" + "BSON.BSON_DATA_CODE", "13", "Code BSON Type" + "BSON.BSON_DATA_SYMBOL", "14", "Symbol BSON Type" + "BSON.BSON_DATA_CODE_W_SCOPE", "15", "Code with Scope BSON Type" + "BSON.BSON_DATA_INT", "16", "32 bit Integer BSON Type" + "BSON.BSON_DATA_TIMESTAMP", "17", "Timestamp BSON Type" + "BSON.BSON_DATA_LONG", "18", "Long BSON Type" + "BSON.BSON_DATA_MIN_KEY", "0xff", "MinKey BSON Type" + "BSON.BSON_DATA_MAX_KEY", "0x7f", "MaxKey BSON Type" + "BSON.BSON_BINARY_SUBTYPE_DEFAULT", "0", "Binary Default Type" + "BSON.BSON_BINARY_SUBTYPE_FUNCTION", "1", "Binary Function Type" + "BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY", "2", "Binary Byte Array Type" + "BSON.BSON_BINARY_SUBTYPE_UUID", "3", "Binary UUID Type" + "BSON.BSON_BINARY_SUBTYPE_MD5", "4", "Binary MD5 Type" + "BSON.BSON_BINARY_SUBTYPE_USER_DEFINED", "128", "Binary User Defined Type" + +------------------------ +BSON.calculateObjectSize +------------------------ + + +Calculate the bson size for a passed in Javascript object. + +.. js:function:: BSON.calculateObjectSize(object[, serializeFunctions]) + + :param object object: the Javascript object to calculate the BSON byte size for. + :param boolean [serializeFunctions]: serialize all functions in the object **(default:false)**. + :returns: number returns the number of bytes the BSON object will take up. + + +-------------------------------- +BSON.serializeWithBufferAndIndex +-------------------------------- + + +Serialize a Javascript object using a predefined Buffer and index into the buffer, useful when pre-allocating the space for serialization. + +.. js:function:: BSON.serializeWithBufferAndIndex(object, checkKeys, buffer, index, serializeFunctions) + + :param object object: the Javascript object to serialize. + :param boolean checkKeys: the serializer will check if keys are valid. + :param buffer buffer: the Buffer you pre-allocated to store the serialized BSON object. + :param number index: the index in the buffer where we wish to start serializing into. + :param boolean serializeFunctions: serialize the javascript functions **(default:false)**. + :returns: number returns the new write index in the Buffer. + + +-------------- +BSON.serialize +-------------- + + +Serialize a Javascript object. + +.. js:function:: BSON.serialize(object, checkKeys, asBuffer, serializeFunctions) + + :param object object: the Javascript object to serialize. + :param boolean checkKeys: the serializer will check if keys are valid. + :param boolean asBuffer: return the serialized object as a Buffer object **(ignore)**. + :param boolean serializeFunctions: serialize the javascript functions **(default:false)**. + :returns: buffer returns the Buffer object containing the serialized object. + + +---------------------- +BSON.deserializeStream +---------------------- + + +Deserialize stream data as BSON documents. + + + +Options + - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized. + - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse. + - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function. + +.. js:function:: BSON.deserializeStream(data, startIndex, numberOfDocuments, documents, docStartIndex[, options]) + + :param buffer data: the buffer containing the serialized set of BSON documents. + :param number startIndex: the start index in the data Buffer where the deserialization is to start. + :param number numberOfDocuments: number of documents to deserialize. + :param array documents: an array where to store the deserialized documents. + :param number docStartIndex: the index in the documents array from where to start inserting documents. + :param object [options]: additional options used for the deserialization. + :returns: number returns the next index in the buffer after deserialization **x** numbers of documents. + + +---------------- +BSON.deserialize +---------------- + + +Deserialize data as BSON. + + + +Options + - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized. + - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse. + - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function. + +.. js:function:: BSON.deserialize(buffer[, options], [isArray]) + + :param buffer buffer: the buffer containing the serialized set of BSON documents. + :param object [options]: additional options used for the deserialization. + :param boolean [isArray]: ignore used for recursive parsing. + :returns: object returns the deserialized Javascript Object. + + +----------- +deserialize +----------- + + +Deserialize data as BSON. + + + +Options + - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized. + - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse. + - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function. + +.. js:function:: deserialize(buffer[, options], [isArray]) + + :param buffer buffer: the buffer containing the serialized set of BSON documents. + :param object [options]: additional options used for the deserialization. + :param boolean [isArray]: ignore used for recursive parsing. + :returns: object returns the deserialized Javascript Object. + + +----------------- +deserializeStream +----------------- + + +Deserialize stream data as BSON documents. + + + +Options + - **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized. + - **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse. + - **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function. + +.. js:function:: deserializeStream(data, startIndex, numberOfDocuments, documents, docStartIndex[, options]) + + :param buffer data: the buffer containing the serialized set of BSON documents. + :param number startIndex: the start index in the data Buffer where the deserialization is to start. + :param number numberOfDocuments: number of documents to deserialize. + :param array documents: an array where to store the deserialized documents. + :param number docStartIndex: the index in the documents array from where to start inserting documents. + :param object [options]: additional options used for the deserialization. + :returns: number returns the next index in the buffer after deserialization **x** numbers of documents. + + +--------- +serialize +--------- + + +Serialize a Javascript object. + +.. js:function:: serialize(object, checkKeys, asBuffer, serializeFunctions) + + :param object object: the Javascript object to serialize. + :param boolean checkKeys: the serializer will check if keys are valid. + :param boolean asBuffer: return the serialized object as a Buffer object **(ignore)**. + :param boolean serializeFunctions: serialize the javascript functions **(default:false)**. + :returns: buffer returns the Buffer object containing the serialized object. + + +------------------- +calculateObjectSize +------------------- + + +Calculate the bson size for a passed in Javascript object. + +.. js:function:: calculateObjectSize(object[, serializeFunctions]) + + :param object object: the Javascript object to calculate the BSON byte size for. + :param boolean [serializeFunctions]: serialize all functions in the object **(default:false)**. + :returns: number returns the number of bytes the BSON object will take up. + + +--------------------------- +serializeWithBufferAndIndex +--------------------------- + + +Serialize a Javascript object using a predefined Buffer and index into the buffer, useful when pre-allocating the space for serialization. + +.. js:function:: serializeWithBufferAndIndex(object, checkKeys, buffer, index, serializeFunctions) + + :param object object: the Javascript object to serialize. + :param boolean checkKeys: the serializer will check if keys are valid. + :param buffer buffer: the Buffer you pre-allocated to store the serialized BSON object. + :param number index: the index in the buffer where we wish to start serializing into. + :param boolean serializeFunctions: serialize the javascript functions **(default:false)**. + :returns: number returns the new write index in the Buffer. + diff --git a/_sources/api-bson-generated/code.txt b/_sources/api-bson-generated/code.txt new file mode 100644 index 00000000000..7302232a2a3 --- /dev/null +++ b/_sources/api-bson-generated/code.txt @@ -0,0 +1,19 @@ +====== +Code() +====== + +------------------ +Constructor +------------------ + + +A class representation of the BSON Code type. + + + .. js:class:: Code() + + :param string code: a string or function. + :param object [scope]: an optional scope for the function. + :returns: code + + diff --git a/_sources/api-bson-generated/db_ref.txt b/_sources/api-bson-generated/db_ref.txt new file mode 100644 index 00000000000..0fd9f15e124 --- /dev/null +++ b/_sources/api-bson-generated/db_ref.txt @@ -0,0 +1,20 @@ +======= +DBRef() +======= + +------------------ +Constructor +------------------ + + +A class representation of the BSON DBRef type. + + + .. js:class:: DBRef() + + :param string namespace: the collection name. + :param objectid oid: the reference ObjectID. + :param string [db]: optional db name, if omitted the reference is local to the current db. + :returns: dbref + + diff --git a/_sources/api-bson-generated/double.txt b/_sources/api-bson-generated/double.txt new file mode 100644 index 00000000000..d563b36f0f3 --- /dev/null +++ b/_sources/api-bson-generated/double.txt @@ -0,0 +1,30 @@ +======== +Double() +======== + +------------------ +Constructor +------------------ + + +A class representation of the BSON Double type. + + + .. js:class:: Double() + + :param number value: the number we want to represent as a double. + :returns: double + + + +------- +valueOf +------- + + +Access the number value. + +.. js:function:: valueOf() + + :returns: number returns the wrapped double number. + diff --git a/_sources/api-bson-generated/index.txt b/_sources/api-bson-generated/index.txt new file mode 100644 index 00000000000..3cedf0394a4 --- /dev/null +++ b/_sources/api-bson-generated/index.txt @@ -0,0 +1,18 @@ +================== +Binary JSON API +================== + +.. toctree:: + :maxdepth: 2 + + objectid + binary + code + db_ref + double + minkey + maxkey + symbol + timestamp + long + bson diff --git a/_sources/api-bson-generated/long.txt b/_sources/api-bson-generated/long.txt new file mode 100644 index 00000000000..8001c79a1b4 --- /dev/null +++ b/_sources/api-bson-generated/long.txt @@ -0,0 +1,487 @@ +====== +Long() +====== + +------------------ +Constructor +------------------ + + +Defines a Long class for representing a 64-bit two's-complement +integer value, which faithfully simulates the behavior of a Java "Long". This +implementation is derived from LongLib in GWT. + + + .. js:class:: Long() + + :param number low: the low (signed) 32 bits of the Long. + :param number high: the high (signed) 32 bits of the Long. + + +Constructs a 64-bit two's-complement integer, given its low and high 32-bit +values as *signed* integers. See the from* functions below for more +convenient ways of constructing Longs. + + + +The internal representation of a Long is the two given signed, 32-bit values. +We use 32-bit pieces because these are the size of integers on which +Javascript performs bit-operations. For operations like addition and +multiplication, we split each number into 16-bit pieces, which can easily be +multiplied within Javascript's floating-point representation without overflow +or change in sign. + + + +In the algorithms below, we frequently reduce the negative case to the +positive case by negating the input(s) and then post-processing the result. +Note that we must ALWAYS check specially whether those values are MIN_VALUE +(-2^63) because -MIN_VALUE == MIN_VALUE (since 2^63 cannot be represented as +a positive number, it overflows back into a negative). Not handling this +case would often result in infinite recursion. + + +----- +toInt +----- + + +Return the int value. + +.. js:function:: toInt() + + :returns: number the value, assuming it is a 32-bit integer. + + +-------- +toNumber +-------- + + +Return the Number value. + +.. js:function:: toNumber() + + :returns: number the closest floating-point representation to this value. + + +------ +toJSON +------ + + +Return the JSON value. + +.. js:function:: toJSON() + + :returns: string the JSON representation. + + +-------- +toString +-------- + + +Return the String value. + +.. js:function:: toString([opt_radix]) + + :param number [opt_radix]: the radix in which the text should be written. + :returns: string the textual representation of this value. + + +----------- +getHighBits +----------- + + +Return the high 32-bits value. + +.. js:function:: getHighBits() + + :returns: number the high 32-bits as a signed value. + + +---------- +getLowBits +---------- + + +Return the low 32-bits value. + +.. js:function:: getLowBits() + + :returns: number the low 32-bits as a signed value. + + +------------------ +getLowBitsUnsigned +------------------ + + +Return the low unsigned 32-bits value. + +.. js:function:: getLowBitsUnsigned() + + :returns: number the low 32-bits as an unsigned value. + + +------------- +getNumBitsAbs +------------- + + +Returns the number of bits needed to represent the absolute value of this Long. + +.. js:function:: getNumBitsAbs() + + :returns: number Returns the number of bits needed to represent the absolute value of this Long. + + +------ +isZero +------ + + +Return whether this value is zero. + +.. js:function:: isZero() + + :returns: boolean whether this value is zero. + + +---------- +isNegative +---------- + + +Return whether this value is negative. + +.. js:function:: isNegative() + + :returns: boolean whether this value is negative. + + +----- +isOdd +----- + + +Return whether this value is odd. + +.. js:function:: isOdd() + + :returns: boolean whether this value is odd. + + +------ +equals +------ + + +Return whether this Long equals the other + +.. js:function:: equals(other) + + :param long other: Long to compare against. + :returns: boolean whether this Long equals the other + + +--------- +notEquals +--------- + + +Return whether this Long does not equal the other. + +.. js:function:: notEquals(other) + + :param long other: Long to compare against. + :returns: boolean whether this Long does not equal the other. + + +-------- +lessThan +-------- + + +Return whether this Long is less than the other. + +.. js:function:: lessThan(other) + + :param long other: Long to compare against. + :returns: boolean whether this Long is less than the other. + + +--------------- +lessThanOrEqual +--------------- + + +Return whether this Long is less than or equal to the other. + +.. js:function:: lessThanOrEqual(other) + + :param long other: Long to compare against. + :returns: boolean whether this Long is less than or equal to the other. + + +----------- +greaterThan +----------- + + +Return whether this Long is greater than the other. + +.. js:function:: greaterThan(other) + + :param long other: Long to compare against. + :returns: boolean whether this Long is greater than the other. + + +------------------ +greaterThanOrEqual +------------------ + + +Return whether this Long is greater than or equal to the other. + +.. js:function:: greaterThanOrEqual(other) + + :param long other: Long to compare against. + :returns: boolean whether this Long is greater than or equal to the other. + + +------- +compare +------- + + +Compares this Long with the given one. + +.. js:function:: compare(other) + + :param long other: Long to compare against. + :returns: boolean 0 if they are the same, 1 if the this is greater, and -1 if the given one is greater. + + +------ +negate +------ + + +The negation of this value. + +.. js:function:: negate() + + :returns: long the negation of this value. + + +--- +add +--- + + +Returns the sum of this and the given Long. + +.. js:function:: add(other) + + :param long other: Long to add to this one. + :returns: long the sum of this and the given Long. + + +-------- +subtract +-------- + + +Returns the difference of this and the given Long. + +.. js:function:: subtract(other) + + :param long other: Long to subtract from this. + :returns: long the difference of this and the given Long. + + +-------- +multiply +-------- + + +Returns the product of this and the given Long. + +.. js:function:: multiply(other) + + :param long other: Long to multiply with this. + :returns: long the product of this and the other. + + +--- +div +--- + + +Returns this Long divided by the given one. + +.. js:function:: div(other) + + :param long other: Long by which to divide. + :returns: long this Long divided by the given one. + + +------ +modulo +------ + + +Returns this Long modulo the given one. + +.. js:function:: modulo(other) + + :param long other: Long by which to mod. + :returns: long this Long modulo the given one. + + +--- +not +--- + + +The bitwise-NOT of this value. + +.. js:function:: not() + + :returns: long the bitwise-NOT of this value. + + +--- +and +--- + + +Returns the bitwise-AND of this Long and the given one. + +.. js:function:: and(other) + + :param long other: the Long with which to AND. + :returns: long the bitwise-AND of this and the other. + + +-- +or +-- + + +Returns the bitwise-OR of this Long and the given one. + +.. js:function:: or(other) + + :param long other: the Long with which to OR. + :returns: long the bitwise-OR of this and the other. + + +--- +xor +--- + + +Returns the bitwise-XOR of this Long and the given one. + +.. js:function:: xor(other) + + :param long other: the Long with which to XOR. + :returns: long the bitwise-XOR of this and the other. + + +--------- +shiftLeft +--------- + + +Returns this Long with bits shifted to the left by the given amount. + +.. js:function:: shiftLeft(numBits) + + :param number numBits: the number of bits by which to shift. + :returns: long this shifted to the left by the given amount. + + +---------- +shiftRight +---------- + + +Returns this Long with bits shifted to the right by the given amount. + +.. js:function:: shiftRight(numBits) + + :param number numBits: the number of bits by which to shift. + :returns: long this shifted to the right by the given amount. + + +------------------ +shiftRightUnsigned +------------------ + + +Returns this Long with bits shifted to the right by the given amount, with the new top bits matching the current sign bit. + +.. js:function:: shiftRightUnsigned(numBits) + + :param number numBits: the number of bits by which to shift. + :returns: long this shifted to the right by the given amount, with zeros placed into the new leading bits. + + +------------ +Long.fromInt +------------ + + +Returns a Long representing the given (32-bit) integer value. + +.. js:function:: Long.fromInt(value) + + :param number value: the 32-bit integer in question. + :returns: long the corresponding Long value. + + +--------------- +Long.fromNumber +--------------- + + +Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned. + +.. js:function:: Long.fromNumber(value) + + :param number value: the number in question. + :returns: long the corresponding Long value. + + +------------- +Long.fromBits +------------- + + +Returns a Long representing the 64-bit integer that comes by concatenating the given high and low bits. Each is assumed to use 32 bits. + +.. js:function:: Long.fromBits(lowBits, highBits) + + :param number lowBits: the low 32-bits. + :param number highBits: the high 32-bits. + :returns: long the corresponding Long value. + + +--------------- +Long.fromString +--------------- + + +Returns a Long representation of the given string, written using the given radix. + +.. js:function:: Long.fromString(str, opt_radix) + + :param string str: the textual representation of the Long. + :param number opt_radix: the radix in which the text is written. + :returns: long the corresponding Long value. + diff --git a/_sources/api-bson-generated/maxkey.txt b/_sources/api-bson-generated/maxkey.txt new file mode 100644 index 00000000000..5134b480104 --- /dev/null +++ b/_sources/api-bson-generated/maxkey.txt @@ -0,0 +1,17 @@ +======== +MaxKey() +======== + +------------------ +Constructor +------------------ + + +A class representation of the BSON MaxKey type. + + + .. js:class:: MaxKey() + + :returns: maxkey + + diff --git a/_sources/api-bson-generated/minkey.txt b/_sources/api-bson-generated/minkey.txt new file mode 100644 index 00000000000..68fd1efdba2 --- /dev/null +++ b/_sources/api-bson-generated/minkey.txt @@ -0,0 +1,17 @@ +======== +MinKey() +======== + +------------------ +Constructor +------------------ + + +A class representation of the BSON MinKey type. + + + .. js:class:: MinKey() + + :returns: minkey + + diff --git a/_sources/api-bson-generated/objectid.txt b/_sources/api-bson-generated/objectid.txt new file mode 100644 index 00000000000..568ee883990 --- /dev/null +++ b/_sources/api-bson-generated/objectid.txt @@ -0,0 +1,223 @@ +========== +ObjectID() +========== + +------------------ +Constructor +------------------ + + +Create a new ObjectID instance + + + .. js:class:: ObjectID() + + :param string id: Can be a 24 byte hex string, 12 byte binary string or a Number. + :returns: object instance of ObjectID. + + + +----------- +toHexString +----------- + + +Return the ObjectID id as a 24 byte hex string representation + +.. js:function:: toHexString() + + :returns: string return the 24 byte hex string representation. + + +**Examples** + + + + Generate a 24 character hex string representation of the ObjectID + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Create a new ObjectID + var objectId = new ObjectID(); + // Verify that the hex string is 24 characters long + assert.equal(24, objectId.toHexString().length); + + +------ +equals +------ + + +Compares the equality of this ObjectID with otherID. + +.. js:function:: equals(otherID) + + :param object otherID: ObjectID instance to compare against. + :returns: bool the result of comparing two ObjectID's + + +**Examples** + + + + Compare two different ObjectID's using the equals method + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Create a new ObjectID + var objectId = new ObjectID(); + // Create a new ObjectID Based on the first ObjectID + var objectId2 = new ObjectID(objectId.id); + // Create another ObjectID + var objectId3 = new ObjectID(); + // objectId and objectId2 should be the same + assert.ok(objectId.equals(objectId2)); + // objectId and objectId2 should be different + assert.ok(!objectId.equals(objectId3)); + + +------------ +getTimestamp +------------ + + +Returns the generation time in seconds that this ID was generated. + +.. js:function:: getTimestamp() + + :returns: number return number of seconds in the timestamp part of the 12 byte id. + + +**Examples** + + + + Generate 12 byte binary string representation using a second based timestamp or + default value + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Get a timestamp in seconds + var timestamp = Math.floor(new Date().getTime()/1000); + // Create a date with the timestamp + var timestampDate = new Date(timestamp*1000); + + // Create a new ObjectID with a specific timestamp + var objectId = new ObjectID(timestamp); + + // Get the timestamp and validate correctness + assert.equal(timestampDate.toString(), objectId.getTimestamp().toString()); + + +----------------------- +ObjectID.createFromTime +----------------------- + + +Creates an ObjectID from a second based number, with the rest of the ObjectID zeroed out. Used for comparisons or sorting the ObjectID. + +.. js:function:: ObjectID.createFromTime(time) + + :param number time: an integer number representing a number of seconds. + :returns: objectid return the created ObjectID + + +**Examples** + + + + Show the usage of the Objectid createFromTime function + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var objectId = ObjectID.createFromTime(1); + assert.equal("000000010000000000000000", objectId.toHexString()); + + +---------------------------- +ObjectID.createFromHexString +---------------------------- + + +Creates an ObjectID from a hex string representation of an ObjectID. + +.. js:function:: ObjectID.createFromHexString(hexString) + + :param string hexString: create a ObjectID from a passed in 24 byte hexstring. + :returns: objectid return the created ObjectID + + +**Examples** + + + + Convert a ObjectID into a hex string representation and then back to an ObjectID + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Create a new ObjectID + var objectId = new ObjectID(); + // Convert the object id to a hex string + var originalHex = objectId.toHexString(); + // Create a new ObjectID using the createFromHexString function + var newObjectId = new ObjectID.createFromHexString(originalHex) + // Convert the new ObjectID back into a hex string using the toHexString function + var newHex = newObjectId.toHexString(); + // Compare the two hex strings + assert.equal(originalHex, newHex); + diff --git a/_sources/api-bson-generated/symbol.txt b/_sources/api-bson-generated/symbol.txt new file mode 100644 index 00000000000..3cf5cfc13e2 --- /dev/null +++ b/_sources/api-bson-generated/symbol.txt @@ -0,0 +1,30 @@ +======== +Symbol() +======== + +------------------ +Constructor +------------------ + + +A class representation of the BSON Symbol type. + + + .. js:class:: Symbol() + + :param string value: the string representing the symbol. + :returns: symbol + + + +------- +valueOf +------- + + +Access the wrapped string value. + +.. js:function:: valueOf() + + :returns: string returns the wrapped string. + diff --git a/_sources/api-bson-generated/timestamp.txt b/_sources/api-bson-generated/timestamp.txt new file mode 100644 index 00000000000..e28ea56dc3e --- /dev/null +++ b/_sources/api-bson-generated/timestamp.txt @@ -0,0 +1,487 @@ +=========== +Timestamp() +=========== + +------------------ +Constructor +------------------ + + +Defines a Timestamp class for representing a 64-bit two's-complement +integer value, which faithfully simulates the behavior of a Java "Timestamp". This +implementation is derived from TimestampLib in GWT. + + + .. js:class:: Timestamp() + + :param number low: the low (signed) 32 bits of the Timestamp. + :param number high: the high (signed) 32 bits of the Timestamp. + + +Constructs a 64-bit two's-complement integer, given its low and high 32-bit +values as *signed* integers. See the from* functions below for more +convenient ways of constructing Timestamps. + + + +The internal representation of a Timestamp is the two given signed, 32-bit values. +We use 32-bit pieces because these are the size of integers on which +Javascript performs bit-operations. For operations like addition and +multiplication, we split each number into 16-bit pieces, which can easily be +multiplied within Javascript's floating-point representation without overflow +or change in sign. + + + +In the algorithms below, we frequently reduce the negative case to the +positive case by negating the input(s) and then post-processing the result. +Note that we must ALWAYS check specially whether those values are MIN_VALUE +(-2^63) because -MIN_VALUE == MIN_VALUE (since 2^63 cannot be represented as +a positive number, it overflows back into a negative). Not handling this +case would often result in infinite recursion. + + +----- +toInt +----- + + +Return the int value. + +.. js:function:: toInt() + + :returns: number the value, assuming it is a 32-bit integer. + + +-------- +toNumber +-------- + + +Return the Number value. + +.. js:function:: toNumber() + + :returns: number the closest floating-point representation to this value. + + +------ +toJSON +------ + + +Return the JSON value. + +.. js:function:: toJSON() + + :returns: string the JSON representation. + + +-------- +toString +-------- + + +Return the String value. + +.. js:function:: toString([opt_radix]) + + :param number [opt_radix]: the radix in which the text should be written. + :returns: string the textual representation of this value. + + +----------- +getHighBits +----------- + + +Return the high 32-bits value. + +.. js:function:: getHighBits() + + :returns: number the high 32-bits as a signed value. + + +---------- +getLowBits +---------- + + +Return the low 32-bits value. + +.. js:function:: getLowBits() + + :returns: number the low 32-bits as a signed value. + + +------------------ +getLowBitsUnsigned +------------------ + + +Return the low unsigned 32-bits value. + +.. js:function:: getLowBitsUnsigned() + + :returns: number the low 32-bits as an unsigned value. + + +------------- +getNumBitsAbs +------------- + + +Returns the number of bits needed to represent the absolute value of this Timestamp. + +.. js:function:: getNumBitsAbs() + + :returns: number Returns the number of bits needed to represent the absolute value of this Timestamp. + + +------ +isZero +------ + + +Return whether this value is zero. + +.. js:function:: isZero() + + :returns: boolean whether this value is zero. + + +---------- +isNegative +---------- + + +Return whether this value is negative. + +.. js:function:: isNegative() + + :returns: boolean whether this value is negative. + + +----- +isOdd +----- + + +Return whether this value is odd. + +.. js:function:: isOdd() + + :returns: boolean whether this value is odd. + + +------ +equals +------ + + +Return whether this Timestamp equals the other + +.. js:function:: equals(other) + + :param timestamp other: Timestamp to compare against. + :returns: boolean whether this Timestamp equals the other + + +--------- +notEquals +--------- + + +Return whether this Timestamp does not equal the other. + +.. js:function:: notEquals(other) + + :param timestamp other: Timestamp to compare against. + :returns: boolean whether this Timestamp does not equal the other. + + +-------- +lessThan +-------- + + +Return whether this Timestamp is less than the other. + +.. js:function:: lessThan(other) + + :param timestamp other: Timestamp to compare against. + :returns: boolean whether this Timestamp is less than the other. + + +--------------- +lessThanOrEqual +--------------- + + +Return whether this Timestamp is less than or equal to the other. + +.. js:function:: lessThanOrEqual(other) + + :param timestamp other: Timestamp to compare against. + :returns: boolean whether this Timestamp is less than or equal to the other. + + +----------- +greaterThan +----------- + + +Return whether this Timestamp is greater than the other. + +.. js:function:: greaterThan(other) + + :param timestamp other: Timestamp to compare against. + :returns: boolean whether this Timestamp is greater than the other. + + +------------------ +greaterThanOrEqual +------------------ + + +Return whether this Timestamp is greater than or equal to the other. + +.. js:function:: greaterThanOrEqual(other) + + :param timestamp other: Timestamp to compare against. + :returns: boolean whether this Timestamp is greater than or equal to the other. + + +------- +compare +------- + + +Compares this Timestamp with the given one. + +.. js:function:: compare(other) + + :param timestamp other: Timestamp to compare against. + :returns: boolean 0 if they are the same, 1 if the this is greater, and -1 if the given one is greater. + + +------ +negate +------ + + +The negation of this value. + +.. js:function:: negate() + + :returns: timestamp the negation of this value. + + +--- +add +--- + + +Returns the sum of this and the given Timestamp. + +.. js:function:: add(other) + + :param timestamp other: Timestamp to add to this one. + :returns: timestamp the sum of this and the given Timestamp. + + +-------- +subtract +-------- + + +Returns the difference of this and the given Timestamp. + +.. js:function:: subtract(other) + + :param timestamp other: Timestamp to subtract from this. + :returns: timestamp the difference of this and the given Timestamp. + + +-------- +multiply +-------- + + +Returns the product of this and the given Timestamp. + +.. js:function:: multiply(other) + + :param timestamp other: Timestamp to multiply with this. + :returns: timestamp the product of this and the other. + + +--- +div +--- + + +Returns this Timestamp divided by the given one. + +.. js:function:: div(other) + + :param timestamp other: Timestamp by which to divide. + :returns: timestamp this Timestamp divided by the given one. + + +------ +modulo +------ + + +Returns this Timestamp modulo the given one. + +.. js:function:: modulo(other) + + :param timestamp other: Timestamp by which to mod. + :returns: timestamp this Timestamp modulo the given one. + + +--- +not +--- + + +The bitwise-NOT of this value. + +.. js:function:: not() + + :returns: timestamp the bitwise-NOT of this value. + + +--- +and +--- + + +Returns the bitwise-AND of this Timestamp and the given one. + +.. js:function:: and(other) + + :param timestamp other: the Timestamp with which to AND. + :returns: timestamp the bitwise-AND of this and the other. + + +-- +or +-- + + +Returns the bitwise-OR of this Timestamp and the given one. + +.. js:function:: or(other) + + :param timestamp other: the Timestamp with which to OR. + :returns: timestamp the bitwise-OR of this and the other. + + +--- +xor +--- + + +Returns the bitwise-XOR of this Timestamp and the given one. + +.. js:function:: xor(other) + + :param timestamp other: the Timestamp with which to XOR. + :returns: timestamp the bitwise-XOR of this and the other. + + +--------- +shiftLeft +--------- + + +Returns this Timestamp with bits shifted to the left by the given amount. + +.. js:function:: shiftLeft(numBits) + + :param number numBits: the number of bits by which to shift. + :returns: timestamp this shifted to the left by the given amount. + + +---------- +shiftRight +---------- + + +Returns this Timestamp with bits shifted to the right by the given amount. + +.. js:function:: shiftRight(numBits) + + :param number numBits: the number of bits by which to shift. + :returns: timestamp this shifted to the right by the given amount. + + +------------------ +shiftRightUnsigned +------------------ + + +Returns this Timestamp with bits shifted to the right by the given amount, with the new top bits matching the current sign bit. + +.. js:function:: shiftRightUnsigned(numBits) + + :param number numBits: the number of bits by which to shift. + :returns: timestamp this shifted to the right by the given amount, with zeros placed into the new leading bits. + + +----------------- +Timestamp.fromInt +----------------- + + +Returns a Timestamp representing the given (32-bit) integer value. + +.. js:function:: Timestamp.fromInt(value) + + :param number value: the 32-bit integer in question. + :returns: timestamp the corresponding Timestamp value. + + +-------------------- +Timestamp.fromNumber +-------------------- + + +Returns a Timestamp representing the given value, provided that it is a finite number. Otherwise, zero is returned. + +.. js:function:: Timestamp.fromNumber(value) + + :param number value: the number in question. + :returns: timestamp the corresponding Timestamp value. + + +------------------ +Timestamp.fromBits +------------------ + + +Returns a Timestamp representing the 64-bit integer that comes by concatenating the given high and low bits. Each is assumed to use 32 bits. + +.. js:function:: Timestamp.fromBits(lowBits, highBits) + + :param number lowBits: the low 32-bits. + :param number highBits: the high 32-bits. + :returns: timestamp the corresponding Timestamp value. + + +-------------------- +Timestamp.fromString +-------------------- + + +Returns a Timestamp representation of the given string, written using the given radix. + +.. js:function:: Timestamp.fromString(str, opt_radix) + + :param string str: the textual representation of the Timestamp. + :param number opt_radix: the radix in which the text is written. + :returns: timestamp the corresponding Timestamp value. + diff --git a/_sources/api-generated/admin.txt b/_sources/api-generated/admin.txt new file mode 100644 index 00000000000..28f4e7e65a7 --- /dev/null +++ b/_sources/api-generated/admin.txt @@ -0,0 +1,979 @@ +======= +Admin() +======= + +------------------ +Constructor +------------------ + + +Allows the user to access the admin functionality of MongoDB + + + .. js:class:: Admin() + + :param object db: Current db instance we wish to perform Admin operations on. + :returns: function Constructor for Admin type. + + + +--------- +buildInfo +--------- + + +Retrieve the server information for the current +instance of the db client + +.. js:function:: buildInfo(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from buildInfo or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + Retrieve the buildInfo for the current MongoDB instance + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin3', 'admin3', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin3', 'admin3', function(err, result) { + assert.ok(result); + + // Retrive the build information for the MongoDB instance + adminDb.buildInfo(function(err, info) { + assert.ok(err == null); + + adminDb.removeUser('admin3', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + + +------------ +serverStatus +------------ + + +Retrieve this db's server status. + +.. js:function:: serverStatus(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from serverStatus or null if an error occured. + :returns: null + + +**Examples** + + + + Retrieve the current server Info + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Grab a collection object + var collection = db.collection('test'); + + // Force the creation of the collection by inserting a document + // Collections are not created until the first document is inserted + collection.insert({'a':1}, {w: 1}, function(err, doc) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin13', 'admin13', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin13', 'admin13', function(err, result) { + + // Retrive the server Info + adminDb.serverStatus(function(err, info) { + assert.equal(null, err); + assert.ok(info != null); + + adminDb.removeUser('admin13', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + }); + + +-------------- +profilingLevel +-------------- + + +Retrieve the current profiling Level for MongoDB + +.. js:function:: profilingLevel(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from profilingLevel or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + Retrieve the current profiling level set for the MongoDB instance + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Grab a collection object + var collection = db.collection('test'); + + // Force the creation of the collection by inserting a document + // Collections are not created until the first document is inserted + collection.insert({'a':1}, {w: 1}, function(err, doc) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin5', 'admin5', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin5', 'admin5', function(err, replies) { + + // Retrive the profiling level + adminDb.profilingLevel(function(err, level) { + + adminDb.removeUser('admin5', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + }); + + +---- +ping +---- + + +Ping the MongoDB server and retrieve results + +.. js:function:: ping(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from ping or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of how to add a user to the admin database + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin9', 'admin9', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin9', 'admin9', function(err, result) { + assert.ok(result); + + // Ping the server + adminDb.ping(function(err, pingResult) { + assert.equal(null, err); + + adminDb.removeUser('admin9', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + + +------------ +authenticate +------------ + + +Authenticate against MongoDB + +.. js:function:: authenticate(username, password, callback) + + :param string username: The user name for the authentication. + :param string password: The password for the authentication. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from authenticate or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + Authenticate against MongoDB Admin user + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + db.open(function(err, db) { + // Grab a collection object + var collection = db.collection('test'); + + // Force the creation of the collection by inserting a document + // Collections are not created until the first document is inserted + collection.insert({'a':1}, {w:1}, function(err, doc) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin2', 'admin2', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin2', 'admin2', function(err, result) { + assert.ok(result); + + adminDb.removeUser('admin2', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + + +------ +logout +------ + + +Logout current authenticated user + +.. js:function:: logout([options], callback) + + :param object [options]: Optional parameters to the command. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from logout or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of how add a user, authenticate and logout + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin10', 'admin10', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin10', 'admin10', function(err, result) { + assert.ok(result); + + // Logout the user + adminDb.logout(function(err, result) { + assert.equal(true, result); + + adminDb.removeUser('admin10', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + + +------- +addUser +------- + + +Add a user to the MongoDB server, if the user exists it will +overwrite the current password + + + +Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: addUser(username, password[, options], callback) + + :param string username: The user name for the authentication. + :param string password: The password for the authentication. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from addUser or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of how to add a user to the admin database + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin11', 'admin11', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin11', 'admin11', function(err, result) { + assert.ok(result); + + adminDb.removeUser('admin11', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + + +---------- +removeUser +---------- + + +Remove a user from the MongoDB server + + + +Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: removeUser(username[, options], callback) + + :param string username: The user name for the authentication. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from removeUser or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of how to remove a user from the admin database + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin12', 'admin12', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin12', 'admin12', function(err, result) { + assert.ok(result); + + // Remove the user + adminDb.removeUser('admin12', function(err, result) { + assert.equal(null, err); + assert.equal(true, result); + + // Authenticate using the removed user should fail + adminDb.authenticate('admin12', 'admin12', function(err, result) { + assert.ok(err != null); + assert.ok(!result); + + db.close(); + }); + }) + }); + }); + }); + + +----------------- +setProfilingLevel +----------------- + + +Set the current profiling level of MongoDB + +.. js:function:: setProfilingLevel(level, callback) + + :param string level: The new profiling level (off, slow_only, all) + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from setProfilingLevel or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of how to use the setProfilingInfo + Use this command to set the Profiling level on the MongoDB server + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Grab a collection object + var collection = db.collection('test'); + + // Force the creation of the collection by inserting a document + // Collections are not created until the first document is inserted + collection.insert({'a':1}, {w: 1}, function(err, doc) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin6', 'admin6', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin6', 'admin6', function(err, replies) { + + // Set the profiling level to only profile slow queries + adminDb.setProfilingLevel('slow_only', function(err, level) { + + // Retrive the profiling level and verify that it's set to slow_only + adminDb.profilingLevel(function(err, level) { + assert.equal('slow_only', level); + + // Turn profiling off + adminDb.setProfilingLevel('off', function(err, level) { + + // Retrive the profiling level and verify that it's set to off + adminDb.profilingLevel(function(err, level) { + assert.equal('off', level); + + // Set the profiling level to log all queries + adminDb.setProfilingLevel('all', function(err, level) { + + // Retrive the profiling level and verify that it's set to all + adminDb.profilingLevel(function(err, level) { + assert.equal('all', level); + + // Attempt to set an illegal profiling level + adminDb.setProfilingLevel('medium', function(err, level) { + assert.ok(err instanceof Error); + assert.equal("Error: illegal profiling level value medium", err.message); + + adminDb.removeUser('admin6', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }) + }); + }) + }); + }) + }); + }); + }); + }); + }); + + +------------- +profilingInfo +------------- + + +Retrive the current profiling information for MongoDB + +.. js:function:: profilingInfo(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from profilingInfo or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of how to use the profilingInfo + Use this command to pull back the profiling information currently set for Mongodb + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Grab a collection object + var collection = db.collection('test'); + + // Force the creation of the collection by inserting a document + // Collections are not created until the first document is inserted + collection.insert({'a':1}, {w: 1}, function(doc) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin7', 'admin7', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin7', 'admin7', function(err, replies) { + + // Set the profiling level to all + adminDb.setProfilingLevel('all', function(err, level) { + + // Execute a query command + collection.find().toArray(function(err, items) { + + // Turn off profiling + adminDb.setProfilingLevel('off', function(err, level) { + + // Retrive the profiling information + adminDb.profilingInfo(function(err, infos) { + assert.ok(infos.constructor == Array); + assert.ok(infos.length >= 1); + assert.ok(infos[0].ts.constructor == Date); + assert.ok(infos[0].millis.constructor == Number); + + adminDb.removeUser('admin7', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + }); + }); + + +------- +command +------- + + +Execute a db command against the Admin database + +.. js:function:: command(command[, options], callback) + + :param object command: A command object `{ping:1}`. + :param object [options]: Optional parameters to the command. + :param function callback: this will be called after executing this method. The command always return the whole result of the command as the second parameter. + :returns: null Returns no result + + +**Examples** + + + + Retrieve the buildInfo using the command function + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin4', 'admin4', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin4', 'admin4', function(err, result) { + assert.ok(result); + + // Retrive the build information using the admin command + adminDb.command({buildInfo:1}, function(err, info) { + assert.ok(err == null); + + adminDb.removeUser('admin4', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + + +------------------ +validateCollection +------------------ + + +Validate an existing collection + +.. js:function:: validateCollection(collectionName[, options], callback) + + :param string collectionName: The name of the collection to validate. + :param object [options]: Optional parameters to the command. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from validateCollection or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of how to use the validateCollection command + Use this command to check that a collection is valid (not corrupt) and to get various statistics. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Grab a collection object + var collection = db.collection('test'); + + // Force the creation of the collection by inserting a document + // Collections are not created until the first document is inserted + collection.insert({'a':1}, {w: 1}, function(err, doc) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin8', 'admin8', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin8', 'admin8', function(err, replies) { + + // Validate the 'test' collection + adminDb.validateCollection('test', function(err, doc) { + + // Pre 1.9.1 servers + if(doc.result != null) { + assert.ok(doc.result != null); + assert.ok(doc.result.match(/firstExtent/) != null); + } else { + assert.ok(doc.firstExtent != null); + } + + adminDb.removeUser('admin8', function(err, result) { + assert.ok(result); + + db.close(); + }); + }); + }); + }); + }); + }); + + +------------- +listDatabases +------------- + + +List the available databases + +.. js:function:: listDatabases(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from listDatabases or null if an error occured. + :returns: null Returns no result + + +**Examples** + + + + An example of listing all available databases. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // List all the available databases + adminDb.listDatabases(function(err, dbs) { + assert.equal(null, err); + assert.ok(dbs.databases.length > 0); + + db.close(); + }); + }); + + +---------------- +replSetGetStatus +---------------- + + +Get ReplicaSet status + +.. js:function:: replSetGetStatus(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from replSetGetStatus or null if an error occured. + :returns: null + + +**Examples** + + + + Retrieve the current replicaset status if the server is running as part of a replicaset + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Grab a collection object + var collection = db.collection('test'); + + // Force the creation of the collection by inserting a document + // Collections are not created until the first document is inserted + collection.insert({'a':1}, {w: 1}, function(err, doc) { + + // Use the admin database for the operation + var adminDb = db.admin(); + + // Add the new user to the admin database + adminDb.addUser('admin14', 'admin14', function(err, result) { + + // Authenticate using the newly added user + adminDb.authenticate('admin14', 'admin14', function(err, result) { + + // Retrive the server Info, returns error if we are not + // running a replicaset + adminDb.replSetGetStatus(function(err, info) { + + adminDb.removeUser('admin14', function(err, result) { + assert.ok(result); + + db.close(); + }); + }) + }); + }); + }); + }); + diff --git a/_sources/api-generated/collection.txt b/_sources/api-generated/collection.txt new file mode 100644 index 00000000000..fe2914a345a --- /dev/null +++ b/_sources/api-generated/collection.txt @@ -0,0 +1,2888 @@ +============ +Collection() +============ + +------------------ +Constructor +------------------ + + +Create a new Collection instance (INTERNAL TYPE) + + + .. js:class:: Collection() + + :param object db: db instance. + :param string collectionName: collection name. + :param object [pkFactory]: alternative primary key factory. + :param object [options]: additional options for the collection. + :returns: object a collection instance. + + +Options + - **readPreference** {String}, the prefered read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + - **slaveOk** {Boolean, default:false}, Allow reads from secondaries. + - **serializeFunctions** {Boolean, default:false}, serialize functions on the document. + - **raw** {Boolean, default:false}, perform all operations using raw bson objects. + - **pkFactory** {Object}, object overriding the basic ObjectID primary key generation. + + +------ +insert +------ + + +Inserts a single document or a an array of documents into MongoDB. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **continueOnError/keepGoing** {Boolean, default:false}, keep inserting documents even if one document has an error, *mongodb 1.9.1 >*. + - **serializeFunctions** {Boolean, default:false}, serialize functions on the document. + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: insert(docs[, options], [callback]) + + :param array docs: + :param object [options]: optional options for insert command + :param function [callback]: optional callback for the function, must be provided when using a writeconcern + :returns: null + + +**Examples** + + + + A simple document insert example, not using safe mode to ensure document persistance on MongoDB + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Fetch a collection to insert document into + var collection = db.collection("simple_document_insert_collection_no_safe"); + // Insert a single document + collection.insert({hello:'world_no_safe'}); + + // Wait for a second before finishing up, to ensure we have written the item to disk + setTimeout(function() { + + // Fetch the document + collection.findOne({hello:'world_no_safe'}, function(err, item) { + assert.equal(null, err); + assert.equal('world_no_safe', item.hello); + }) + }, 100); + + + + A batch document insert example, using safe mode to ensure document persistance on MongoDB + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Fetch a collection to insert document into + var collection = db.collection("batch_document_insert_collection_safe"); + // Insert a single document + collection.insert([{hello:'world_safe1'} + , {hello:'world_safe2'}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Fetch the document + collection.findOne({hello:'world_safe2'}, function(err, item) { + assert.equal(null, err); + assert.equal('world_safe2', item.hello); + }) + }); + + + + Example of inserting a document containing functions + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Fetch a collection to insert document into + var collection = db.collection("simple_document_insert_with_function_safe"); + // Insert a single document + collection.insert({hello:'world' + , func:function() {}}, {w:1, serializeFunctions:true}, function(err, result) { + assert.equal(null, err); + + // Fetch the document + collection.findOne({hello:'world'}, function(err, item) { + assert.equal(null, err); + assert.ok("function() {}", item.code); + }) + }); + + + + Example of using keepGoing to allow batch insert to complete even when there are illegal documents in the batch + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Only run the rest of the code if we have a mongodb server with version >= 1.9.1 + db.admin().serverInfo(function(err, result){ + + // Ensure we are running at least MongoDB v1.9.1 + if(parseInt((result.version.replace(/\./g, ''))) >= 191) { + + // Create a collection + var collection = db.collection('keepGoingExample'); + + // Add an unique index to title to force errors in the batch insert + collection.ensureIndex({title:1}, {unique:true}, function(err, indexName) { + + // Insert some intial data into the collection + collection.insert([{name:"Jim"} + , {name:"Sarah", title:"Princess"}], {w:1}, function(err, result) { + + // Force keep going flag, ignoring unique index issue + collection.insert([{name:"Jim"} + , {name:"Sarah", title:"Princess"} + , {name:'Gump', title:"Gump"}], {w:1, keepGoing:true}, function(err, result) { + + // Count the number of documents left (should not include the duplicates) + collection.count(function(err, count) { + assert.equal(3, count); + }) + }); + }); + }); + } else { + test.done(); + } + }); + + +------ +remove +------ + + +Removes documents specified by selector from the db. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **single** {Boolean, default:false}, removes the first document found. + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: remove([selector][, options], [callback]) + + :param object [selector]: optional select, no selector is equivalent to removing all documents. + :param object [options]: additional options during remove. + :param function [callback]: must be provided if you performing a remove with a writeconcern + :returns: null + + +**Examples** + + + + An example removing all documents in a collection not using safe mode + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Fetch a collection to insert document into + db.collection("remove_all_documents_no_safe", function(err, collection) { + + // Insert a bunch of documents + collection.insert([{a:1}, {b:2}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Remove all the document + collection.remove(); + + // Fetch all results + collection.find().toArray(function(err, items) { + assert.equal(null, err); + assert.equal(0, items.length); + db.close(); + }); + }); + }) + }); + + + + An example removing a subset of documents using safe mode to ensure removal of documents + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Fetch a collection to insert document into + db.collection("remove_subset_of_documents_safe", function(err, collection) { + + // Insert a bunch of documents + collection.insert([{a:1}, {b:2}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Remove all the document + collection.remove({a:1}, {w:1}, function(err, numberOfRemovedDocs) { + assert.equal(null, err); + assert.equal(1, numberOfRemovedDocs); + db.close(); + }); + }); + }) + }); + + +------ +rename +------ + + +Renames the collection. + + + +Options + - **dropTarget** {Boolean, default:false}, drop the target name collection if it previously exists. + +.. js:function:: rename(newName[, options], callback) + + :param string newName: the new name of the collection. + :param object [options]: returns option results. + :param function callback: the callback accepting the result + :returns: null + + +**Examples** + + + + An example of illegal and legal renaming of a collection + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Open a couple of collections + db.createCollection('test_rename_collection', function(err, collection1) { + db.createCollection('test_rename_collection2', function(err, collection2) { + + // Attemp to rename a collection to a number + try { + collection1.rename(5, function(err, collection) {}); + } catch(err) { + assert.ok(err instanceof Error); + assert.equal("collection name must be a String", err.message); + } + + // Attemp to rename a collection to an empty string + try { + collection1.rename("", function(err, collection) {}); + } catch(err) { + assert.ok(err instanceof Error); + assert.equal("collection names cannot be empty", err.message); + } + + // Attemp to rename a collection to an illegal name including the character $ + try { + collection1.rename("te$t", function(err, collection) {}); + } catch(err) { + assert.ok(err instanceof Error); + assert.equal("collection names must not contain '$'", err.message); + } + + // Attemp to rename a collection to an illegal name starting with the character . + try { + collection1.rename(".test", function(err, collection) {}); + } catch(err) { + assert.ok(err instanceof Error); + assert.equal("collection names must not start or end with '.'", err.message); + } + + // Attemp to rename a collection to an illegal name ending with the character . + try { + collection1.rename("test.", function(err, collection) {}); + } catch(err) { + assert.ok(err instanceof Error); + assert.equal("collection names must not start or end with '.'", err.message); + } + + // Attemp to rename a collection to an illegal name with an empty middle name + try { + collection1.rename("tes..t", function(err, collection) {}); + } catch(err) { + assert.equal("collection names cannot be empty", err.message); + } + + // Insert a couple of documents + collection1.insert([{'x':1}, {'x':2}], {w:1}, function(err, docs) { + + // Attemp to rename the first collection to the second one, this will fail + collection1.rename('test_rename_collection2', function(err, collection) { + assert.ok(err instanceof Error); + assert.ok(err.message.length > 0); + + // Attemp to rename the first collection to a name that does not exist + // this will be succesful + collection1.rename('test_rename_collection3', function(err, collection) { + assert.equal("test_rename_collection3", collection.collectionName); + + // Ensure that the collection is pointing to the new one + collection1.count(function(err, count) { + assert.equal(2, count); + db.close(); + }); + }); + }); + }) + }); + }); + }); + + +---- +save +---- + + +Save a document. Simple full document replacement function. Not recommended for efficiency, use atomic +operators and update instead for more efficient operations. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: save([doc][, options], [callback]) + + :param object [doc]: the document to save + :param object [options]: additional options during remove. + :param function [callback]: must be provided if you performing a safe save + :returns: null + + +**Examples** + + + + Example of a simple document save with safe set to false + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Fetch the collection + var collection = db.collection("save_a_simple_document"); + // Save a document with no safe option + collection.save({hello:'world'}); + + // Wait for a second + setTimeout(function() { + + // Find the saved document + collection.findOne({hello:'world'}, function(err, item) { + assert.equal(null, err); + assert.equal('world', item.hello); + db.close(); + }); + }, 1000); + }); + + + + Example of a simple document save and then resave with safe set to true + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Fetch the collection + var collection = db.collection("save_a_simple_document_modify_it_and_resave_it"); + + // Save a document with no safe option + collection.save({hello:'world'}, {w: 0}, function(err, result) { + + // Find the saved document + collection.findOne({hello:'world'}, function(err, item) { + assert.equal(null, err); + assert.equal('world', item.hello); + + // Update the document + item['hello2'] = 'world2'; + + // Save the item with the additional field + collection.save(item, {w: 1}, function(err, result) { + + // Find the changed document + collection.findOne({hello:'world'}, function(err, item) { + assert.equal(null, err); + assert.equal('world', item.hello); + assert.equal('world2', item.hello2); + + db.close(); + }); + }); + }); + }); + }); + + +------ +update +------ + + +Updates documents. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **upsert** {Boolean, default:false}, perform an upsert operation. + - **multi** {Boolean, default:false}, update all documents matching the selector. + - **serializeFunctions** {Boolean, default:false}, serialize functions on the document. + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: update(selector, document[, options], [callback]) + + :param object selector: the query to select the document/documents to be updated + :param object document: the fields/vals to be updated, or in the case of an upsert operation, inserted. + :param object [options]: additional options during update. + :param function [callback]: must be provided if you performing an update with a writeconcern + :returns: null + + +**Examples** + + + + Example of a simple document update with safe set to false on an existing document + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Get a collection + db.collection('update_a_simple_document', function(err, collection) { + + // Insert a document, then update it + collection.insert({a:1}, {w: 1}, function(err, doc) { + + // Update the document with an atomic operator + collection.update({a:1}, {$set:{b:2}}); + + // Wait for a second then fetch the document + setTimeout(function() { + + // Fetch the document that we modified + collection.findOne({a:1}, function(err, item) { + assert.equal(null, err); + assert.equal(1, item.a); + assert.equal(2, item.b); + db.close(); + }); + }, 1000); + }) + }); + }); + + + + Example of a simple document update using upsert (the document will be inserted if it does not exist) + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Get a collection + db.collection('update_a_simple_document_upsert', function(err, collection) { + + // Update the document using an upsert operation, ensuring creation if it does not exist + collection.update({a:1}, {b:2, a:1}, {upsert:true, w: 1}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result); + + // Fetch the document that we modified and check if it got inserted correctly + collection.findOne({a:1}, function(err, item) { + assert.equal(null, err); + assert.equal(1, item.a); + assert.equal(2, item.b); + db.close(); + }); + }); + }); + }); + + + + Example of an update across multiple documents using the multi option. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Get a collection + db.collection('update_a_simple_document_multi', function(err, collection) { + + // Insert a couple of documentations + collection.insert([{a:1, b:1}, {a:1, b:2}], {w: 1}, function(err, result) { + + // Update multiple documents using the multi option + collection.update({a:1}, {$set:{b:0}}, {w: 1, multi:true}, function(err, numberUpdated) { + assert.equal(null, err); + assert.equal(2, numberUpdated); + + // Fetch all the documents and verify that we have changed the b value + collection.find().toArray(function(err, items) { + assert.equal(null, err); + assert.equal(1, items[0].a); + assert.equal(0, items[0].b); + assert.equal(1, items[1].a); + assert.equal(0, items[1].b); + + db.close(); + }); + }) + }); + }); + }); + + +-------- +distinct +-------- + + +The distinct command returns returns a list of distinct values for the given key across a collection. + + + +Options + - **readPreference** {String}, the preferred read preference (Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: distinct(key[, query], [options], callback) + + :param string key: key to run distinct against. + :param object [query]: option query to narrow the returned objects. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from distinct or null if an error occured. + :returns: null + + +**Examples** + + + + Example of running the distinct command against a collection + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Crete the collection for the distinct example + db.createCollection('simple_key_based_distinct', function(err, collection) { + + // Insert documents to perform distinct against + collection.insert([{a:0, b:{c:'a'}}, {a:1, b:{c:'b'}}, {a:1, b:{c:'c'}}, + {a:2, b:{c:'a'}}, {a:3}, {a:3}], {w: 1}, function(err, ids) { + + // Peform a distinct query against the a field + collection.distinct('a', function(err, docs) { + assert.deepEqual([0, 1, 2, 3], docs.sort()); + + // Perform a distinct query against the sub-field b.c + collection.distinct('b.c', function(err, docs) { + assert.deepEqual(['a', 'b', 'c'], docs.sort()); + + db.close(); + }); + }); + }) + }); + }); + + + + Example of running the distinct command against a collection with a filter query + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Crete the collection for the distinct example + db.createCollection('simple_key_based_distinct_sub_query_filter', function(err, collection) { + + // Insert documents to perform distinct against + collection.insert([{a:0, b:{c:'a'}}, {a:1, b:{c:'b'}}, {a:1, b:{c:'c'}}, + {a:2, b:{c:'a'}}, {a:3}, {a:3}, {a:5, c:1}], {w: 1}, function(err, ids) { + + // Peform a distinct query with a filter against the documents + collection.distinct('a', {c:1}, function(err, docs) { + assert.deepEqual([5], docs.sort()); + + db.close(); + }); + }) + }); + }); + + +----- +count +----- + + +Count number of matching documents in the db to a query. + + + +Options + - **skip** {Number}, The number of documents to skip for the count. + - **limit** {Number}, The limit of documents to count. + - **readPreference** {String}, the preferred read preference (Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: count([query][, options], callback) + + :param object [query]: query to filter by before performing count. + :param object [options]: additional options during count. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the count method or null if an error occured. + :returns: null + + +**Examples** + + + + Example of running simple count commands against a collection. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Crete the collection for the distinct example + db.createCollection('simple_count_example', function(err, collection) { + + // Insert documents to perform distinct against + collection.insert([{a:1}, {a:2}, {a:3}, {a:4, b:1}], {w: 1}, function(err, ids) { + + // Perform a total count command + collection.count(function(err, count) { + assert.equal(null, err); + assert.equal(4, count); + + // Peform a partial account where b=1 + collection.count({b:1}, function(err, count) { + assert.equal(null, err); + assert.equal(1, count); + + db.close(); + }); + }); + }); + }); + }); + + +---- +drop +---- + + +Drop the collection + +.. js:function:: drop(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the drop method or null if an error occured. + :returns: null + + +**Examples** + + + + Example of a simple document save and then resave with safe set to true + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('test_other_drop', function(err, collection) { + assert.equal(null, err); + + // Drop the collection + collection.drop(function(err, reply) { + + // Ensure we don't have the collection in the set of names + db.collectionNames(function(err, replies) { + + var found = false; + // For each collection in the list of collection names in this db look for the + // dropped collection + replies.forEach(function(document) { + if(document.name == "test_other_drop") { + found = true; + return; + } + }); + + // Ensure the collection is not found + assert.equal(false, found); + + // Let's close the db + db.close(); + }); + }); + }); + }); + + +------------- +findAndModify +------------- + + +Find and update a document. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **remove** {Boolean, default:false}, set to true to remove the object before returning. + - **upsert** {Boolean, default:false}, perform an upsert operation. + - **new** {Boolean, default:false}, set to true if you want to return the modified object rather than the original. Ignored for remove. + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: findAndModify(query, sort, doc[, options], callback) + + :param object query: query object to locate the object to modify + :param array sort: - if multiple docs match, choose the first one in the specified sort order as the object to manipulate + :param object doc: - the fields/vals to be updated + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the findAndModify method or null if an error occured. + :returns: null + + +**Examples** + + + + A whole set of different ways to use the findAndModify command. + + + + The first findAndModify command modifies a document and returns the modified document back. + The second findAndModify command removes the document. + The second findAndModify command upserts a document and returns the new document. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('simple_find_and_modify_operations_', function(err, collection) { + assert.equal(null, err); + + // Insert some test documentations + collection.insert([{a:1}, {b:1}, {c:1}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Simple findAndModify command returning the new document + collection.findAndModify({a:1}, [['a', 1]], {$set:{b1:1}}, {new:true}, function(err, doc) { + assert.equal(null, err); + assert.equal(1, doc.a); + assert.equal(1, doc.b1); + + // Simple findAndModify command returning the new document and + // removing it at the same time + collection.findAndModify({b:1}, [['b', 1]], + {$set:{b:2}}, {remove:true}, function(err, doc) { + + // Verify that the document is gone + collection.findOne({b:1}, function(err, item) { + assert.equal(null, err); + assert.equal(null, item); + + // Simple findAndModify command performing an upsert and returning the new document + // executing the command safely + collection.findAndModify({d:1}, [['b', 1]], + {d:1, f:1}, {new:true, upsert:true, w:1}, function(err, doc) { + assert.equal(null, err); + assert.equal(1, doc.d); + assert.equal(1, doc.f); + + db.close(); + }) + }); + }); + }); + }); + }); + }); + + +------------- +findAndRemove +------------- + + +Find and remove a document + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: findAndRemove(query, sort[, options], callback) + + :param object query: query object to locate the object to modify + :param array sort: - if multiple docs match, choose the first one in the specified sort order as the object to manipulate + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the findAndRemove method or null if an error occured. + :returns: null + + +**Examples** + + + + An example of using findAndRemove + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('simple_find_and_modify_operations_', function(err, collection) { + assert.equal(null, err); + + // Insert some test documentations + collection.insert([{a:1}, {b:1}, {c:1}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Simple findAndModify command returning the new document and + // removing it at the same time + collection.findAndRemove({b:1}, [['b', 1]], function(err, doc) { + assert.equal(null, err); + assert.equal(1, doc.b); + + // Verify that the document is gone + collection.findOne({b:1}, function(err, item) { + assert.equal(null, err); + assert.equal(null, item); + + db.close(); + }); + }); + }); + }); + }); + + +---- +find +---- + + +Creates a cursor for a query that can be used to iterate over results from MongoDB + + + +Various argument possibilities + - callback? + - selector, callback?, + - selector, fields, callback? + - selector, options, callback? + - selector, fields, options, callback? + - selector, fields, skip, limit, callback? + - selector, fields, skip, limit, timeout, callback? + + + +Options + - **limit** {Number, default:0}, sets the limit of documents returned in the query. + - **sort** {Array | Object}, set to sort the documents coming back from the query. Array of indexes, [['a', 1]] etc. + - **fields** {Object}, the fields to return in the query. Object of fields to include or exclude (not both), {'a':1} + - **skip** {Number, default:0}, set to skip N documents ahead in your query (useful for pagination). + - **hint** {Object}, tell the query to use specific indexes in the query. Object of indexes to use, {'_id':1} + - **explain** {Boolean, default:false}, explain the query instead of returning the data. + - **snapshot** {Boolean, default:false}, snapshot query. + - **timeout** {Boolean, default:false}, specify if the cursor can timeout. + - **tailable** {Boolean, default:false}, specify if the cursor is tailable. + - **tailableRetryInterval** {Number, default:100}, specify the miliseconds between getMores on tailable cursor. + - **numberOfRetries** {Number, default:5}, specify the number of times to retry the tailable cursor. + - **awaitdata** {Boolean, default:false} allow the cursor to wait for data, only applicable for tailable cursor. + - **exhaust** {Boolean, default:false} have the server send all the documents at once as getMore packets, not recommended. + - **batchSize** {Number, default:0}, set the batchSize for the getMoreCommand when iterating over the query results. + - **returnKey** {Boolean, default:false}, only return the index key. + - **maxScan** {Number}, Limit the number of items to scan. + - **min** {Number}, Set index bounds. + - **max** {Number}, Set index bounds. + - **showDiskLoc** {Boolean, default:false}, Show disk location of results. + - **comment** {String}, You can put a $comment field on a query to make looking in the profiler logs simpler. + - **raw** {Boolean, default:false}, Return all BSON documents as Raw Buffer documents. + - **readPreference** {String}, the preferred read preference ((Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + - **numberOfRetries** {Number, default:5}, if using awaidata specifies the number of times to retry on timeout. + - **partial** {Boolean, default:false}, specify if the cursor should return partial results when querying against a sharded system + +.. js:function:: find(query[, options], callback) + + :param object query: query object to locate the object to modify + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the find method or null if an error occured. + :returns: cursor returns a cursor to the query + + +**Examples** + + + + A simple query using the find method on the collection. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('simple_query', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the testing + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Peform a simple find and return all the documents + collection.find().toArray(function(err, docs) { + assert.equal(null, err); + assert.equal(3, docs.length); + + db.close(); + }); + }); + }); + }); + + + + A simple query showing the explain for a query + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('simple_explain_query', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the testing + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Peform a simple find and return all the documents + collection.find({}, {explain:true}).toArray(function(err, docs) { + assert.equal(null, err); + assert.equal(1, docs.length); + + db.close(); + }); + }); + }); + }); + + + + A simple query showing skip and limit + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('simple_limit_skip_query', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the testing + collection.insert([{a:1, b:1}, {a:2, b:2}, {a:3, b:3}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Peform a simple find and return all the documents + collection.find({}, {skip:1, limit:1, fields:{b:1}}).toArray(function(err, docs) { + assert.equal(null, err); + assert.equal(1, docs.length); + assert.equal(null, docs[0].a); + assert.equal(2, docs[0].b); + + db.close(); + }); + }); + }); + }); + + +------- +findOne +------- + + +Finds a single document based on the query + + + +Various argument possibilities + - callback? + - selector, callback?, + - selector, fields, callback? + - selector, options, callback? + - selector, fields, options, callback? + - selector, fields, skip, limit, callback? + - selector, fields, skip, limit, timeout, callback? + + + +Options + - **limit** {Number, default:0}, sets the limit of documents returned in the query. + - **sort** {Array | Object}, set to sort the documents coming back from the query. Array of indexes, [['a', 1]] etc. + - **fields** {Object}, the fields to return in the query. Object of fields to include or exclude (not both), {'a':1} + - **skip** {Number, default:0}, set to skip N documents ahead in your query (useful for pagination). + - **hint** {Object}, tell the query to use specific indexes in the query. Object of indexes to use, {'_id':1} + - **explain** {Boolean, default:false}, explain the query instead of returning the data. + - **snapshot** {Boolean, default:false}, snapshot query. + - **timeout** {Boolean, default:false}, specify if the cursor can timeout. + - **tailable** {Boolean, default:false}, specify if the cursor is tailable. + - **batchSize** {Number, default:0}, set the batchSize for the getMoreCommand when iterating over the query results. + - **returnKey** {Boolean, default:false}, only return the index key. + - **maxScan** {Number}, Limit the number of items to scan. + - **min** {Number}, Set index bounds. + - **max** {Number}, Set index bounds. + - **showDiskLoc** {Boolean, default:false}, Show disk location of results. + - **comment** {String}, You can put a $comment field on a query to make looking in the profiler logs simpler. + - **raw** {Boolean, default:false}, Return all BSON documents as Raw Buffer documents. + - **readPreference** {String}, the preferred read preference (Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + - **partial** {Boolean, default:false}, specify if the cursor should return partial results when querying against a sharded system + +.. js:function:: findOne(query[, options], callback) + + :param object query: query object to locate the object to modify + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the findOne method or null if an error occured. + :returns: cursor returns a cursor to the query + + +**Examples** + + + + A simple query using findOne + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('simple_limit_skip_find_one_query', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the testing + collection.insert([{a:1, b:1}, {a:2, b:2}, {a:3, b:3}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Peform a simple find and return all the documents + collection.findOne({a:2}, {fields:{b:1}}, function(err, doc) { + assert.equal(null, err); + assert.equal(null, doc.a); + assert.equal(2, doc.b); + + db.close(); + }); + }); + }); + }); + + +----------- +createIndex +----------- + + +Creates an index on the collection. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **unique** {Boolean, default:false}, creates an unique index. + - **sparse** {Boolean, default:false}, creates a sparse index. + - **background** {Boolean, default:false}, creates the index in the background, yielding whenever possible. + - **dropDups** {Boolean, default:false}, a unique index cannot be created on a key that has pre-existing duplicate values. If you would like to create the index anyway, keeping the first document the database indexes and deleting all subsequent documents that have duplicate value + - **min** {Number}, for geospatial indexes set the lower bound for the co-ordinates. + - **max** {Number}, for geospatial indexes set the high bound for the co-ordinates. + - **v** {Number}, specify the format version of the indexes. + - **expireAfterSeconds** {Number}, allows you to expire data on indexes applied to a data (MongoDB 2.2 or higher) + - **name** {String}, override the autogenerated index name (useful if the resulting name is larger than 128 bytes) + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: createIndex(fieldOrSpec[, options], callback) + + :param object fieldOrSpec: fieldOrSpec that defines the index. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the createIndex method or null if an error occured. + :returns: null + + +**Examples** + + + + A simple createIndex using a simple single field index + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('simple_index_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1}, {a:2}, {a:3}, {a:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.createIndex('a', {w:1}, function(err, indexName) { + assert.equal("a_1", indexName); + + // Peform a query, with explain to show we hit the query + collection.find({a:2}, {explain:true}).toArray(function(err, explanation) { + assert.deepEqual([[2, 2]], explanation[0].indexBounds.a); + + db.close(); + }); + }); + }); + }); + }); + + + + A more complex createIndex using a compound unique index in the background and dropping duplicated documents + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('more_complex_index_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.createIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Show that duplicate records got dropped + collection.find({}).toArray(function(err, items) { + assert.equal(null, err); + assert.equal(4, items.length); + + // Peform a query, with explain to show we hit the query + collection.find({a:2}, {explain:true}).toArray(function(err, explanation) { + assert.equal(null, err); + assert.ok(explanation[0].indexBounds.a != null); + assert.ok(explanation[0].indexBounds.b != null); + + db.close(); + }); + }) + }); + }); + }); + }); + + +----------- +ensureIndex +----------- + + +Ensures that an index exists, if it does not it creates it + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **unique** {Boolean, default:false}, creates an unique index. + - **sparse** {Boolean, default:false}, creates a sparse index. + - **background** {Boolean, default:false}, creates the index in the background, yielding whenever possible. + - **dropDups** {Boolean, default:false}, a unique index cannot be created on a key that has pre-existing duplicate values. If you would like to create the index anyway, keeping the first document the database indexes and deleting all subsequent documents that have duplicate value + - **min** {Number}, for geospatial indexes set the lower bound for the co-ordinates. + - **max** {Number}, for geospatial indexes set the high bound for the co-ordinates. + - **v** {Number}, specify the format version of the indexes. + - **expireAfterSeconds** {Number}, allows you to expire data on indexes applied to a data (MongoDB 2.2 or higher) + - **name** {String}, override the autogenerated index name (useful if the resulting name is larger than 128 bytes) + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: ensureIndex(fieldOrSpec[, options], callback) + + :param object fieldOrSpec: fieldOrSpec that defines the index. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the ensureIndex method or null if an error occured. + :returns: null + + +**Examples** + + + + A more complex ensureIndex using a compound unique index in the background and dropping duplicated documents. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('more_complex_ensure_index_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.ensureIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Show that duplicate records got dropped + collection.find({}).toArray(function(err, items) { + assert.equal(null, err); + assert.equal(4, items.length); + + // Peform a query, with explain to show we hit the query + collection.find({a:2}, {explain:true}).toArray(function(err, explanation) { + assert.equal(null, err); + assert.ok(explanation[0].indexBounds.a != null); + assert.ok(explanation[0].indexBounds.b != null); + + db.close(); + }); + }) + }); + }); + }); + }); + + +---------------- +indexInformation +---------------- + + +Retrieves this collections index info. + + + +Options + - **full** {Boolean, default:false}, returns the full raw index information. + +.. js:function:: indexInformation([options], callback) + + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the indexInformation method or null if an error occured. + :returns: null + + +**Examples** + + + + An examples showing the information returned by indexInformation + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('more_index_information_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.ensureIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Fetch basic indexInformation for collection + collection.indexInformation(function(err, indexInformation) { + assert.deepEqual([ [ '_id', 1 ] ], indexInformation._id_); + assert.deepEqual([ [ 'a', 1 ], [ 'b', 1 ] ], indexInformation.a_1_b_1); + + // Fetch full index information + collection.indexInformation({full:true}, function(err, indexInformation) { + assert.deepEqual({ _id: 1 }, indexInformation[0].key); + assert.deepEqual({ a: 1, b: 1 }, indexInformation[1].key); + + db.close(); + }); + }); + }); + }); + }); + }); + + +--------- +dropIndex +--------- + + +Drops an index from this collection. + +.. js:function:: dropIndex(name, callback) + + :param string name: + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the dropIndex method or null if an error occured. + :returns: null + + +**Examples** + + + + An examples showing the creation and dropping of an index + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('create_and_drop_an_index', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.ensureIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Drop the index + collection.dropIndex("a_1_b_1", function(err, result) { + assert.equal(null, err); + + // Verify that the index is gone + collection.indexInformation(function(err, indexInformation) { + assert.deepEqual([ [ '_id', 1 ] ], indexInformation._id_); + assert.equal(null, indexInformation.a_1_b_1); + + db.close(); + }); + }); + }); + }); + }); + }); + + +-------------- +dropAllIndexes +-------------- + + +Drops all indexes from this collection. + +.. js:function:: dropAllIndexes(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the dropAllIndexes method or null if an error occured. + :returns: null + + +------- +reIndex +------- + + +Reindex all indexes on the collection +Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections. + +.. js:function:: reIndex(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the reIndex method or null if an error occured. + :returns: null + + +**Examples** + + + + An example showing how to force a reindex of a collection. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('shouldCorrectlyForceReindexOnCollection', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4, c:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.ensureIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Force a reindex of the collection + collection.reIndex(function(err, result) { + assert.equal(null, err); + assert.equal(true, result); + + // Verify that the index is gone + collection.indexInformation(function(err, indexInformation) { + assert.deepEqual([ [ '_id', 1 ] ], indexInformation._id_); + assert.deepEqual([ [ 'a', 1 ], [ 'b', 1 ] ], indexInformation.a_1_b_1); + + db.close(); + }); + }); + }); + }); + }); + }); + + +--------- +mapReduce +--------- + + +Run Map Reduce across a collection. Be aware that the inline option for out will return an array of results not a collection. + + + +Options + - **out** {Object, default:*{inline:1}*}, sets the output target for the map reduce job. *{inline:1} | {replace:'collectionName'} | {merge:'collectionName'} | {reduce:'collectionName'}* + - **query** {Object}, query filter object. + - **sort** {Object}, sorts the input objects using this key. Useful for optimization, like sorting by the emit key for fewer reduces. + - **limit** {Number}, number of objects to return from collection. + - **keeptemp** {Boolean, default:false}, keep temporary data. + - **finalize** {Function | String}, finalize function. + - **scope** {Object}, can pass in variables that can be access from map/reduce/finalize. + - **jsMode** {Boolean, default:false}, it is possible to make the execution stay in JS. Provided in MongoDB > 2.0.X. + - **verbose** {Boolean, default:false}, provide statistics on job execution time. + - **readPreference** {String, only for inline results}, the preferred read preference (Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: mapReduce(map, reduce[, options], callback) + + :param function map: the mapping function. + :param function reduce: the reduce function. + :param objects [options]: options for the map reduce job. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the mapReduce method or null if an error occured. + :returns: null + + +**Examples** + + + + A simple map reduce example + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a test collection + db.createCollection('test_map_reduce_functions', function(err, collection) { + + // Insert some documents to perform map reduce over + collection.insert([{'user_id':1}, {'user_id':2}], {w:1}, function(err, r) { + + // Map function + var map = function() { emit(this.user_id, 1); }; + // Reduce function + var reduce = function(k,vals) { return 1; }; + + // Peform the map reduce + collection.mapReduce(map, reduce, {out: {replace : 'tempCollection'}}, function(err, collection) { + // Mapreduce returns the temporary collection with the results + collection.findOne({'_id':1}, function(err, result) { + assert.equal(1, result.value); + + collection.findOne({'_id':2}, function(err, result) { + assert.equal(1, result.value); + + db.close(); + }); + }); + }); + }); + }); + }); + + + + A simple map reduce example using the inline output type on MongoDB > 1.7.6 returning the statistics + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Parse version of server if available + db.admin().serverInfo(function(err, result){ + + // Only run if the MongoDB version is higher than 1.7.6 + if(parseInt((result.version.replace(/\./g, ''))) >= 176) { + + // Create a test collection + db.createCollection('test_map_reduce_functions_inline', function(err, collection) { + + // Insert some test documents + collection.insert([{'user_id':1}, {'user_id':2}], {w:1}, function(err, r) { + + // Map function + var map = function() { emit(this.user_id, 1); }; + // Reduce function + var reduce = function(k,vals) { return 1; }; + + // Execute map reduce and return results inline + collection.mapReduce(map, reduce, {out : {inline: 1}, verbose:true}, function(err, results, stats) { + assert.equal(2, results.length); + assert.ok(stats != null); + + collection.mapReduce(map, reduce, {out : {replace: 'mapreduce_integration_test'}, verbose:true}, function(err, results, stats) { + assert.ok(stats != null); + db.close(); + }); + }); + }); + }); + } else { + test.done(); + } + }); + }); + + + + Mapreduce different test with a provided scope containing a javascript function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a test collection + db.createCollection('test_map_reduce_functions_scope', function(err, collection) { + + // Insert some test documents + collection.insert([{'user_id':1, 'timestamp':new Date()} + , {'user_id':2, 'timestamp':new Date()}], {w:1}, function(err, r) { + + // Map function + var map = function(){ + emit(fn(this.timestamp.getYear()), 1); + } + + // Reduce function + var reduce = function(k, v){ + count = 0; + for(i = 0; i < v.length; i++) { + count += v[i]; + } + return count; + } + + // Javascript function available in the map reduce scope + var t = function(val){ return val+1; } + + // Execute the map reduce with the custom scope + var o = {}; + o.scope = { fn: new Code(t.toString()) } + o.out = { replace: 'replacethiscollection' } + + collection.mapReduce(map, reduce, o, function(err, outCollection) { + assert.equal(null, err); + + // Find all entries in the map-reduce collection + outCollection.find().toArray(function(err, results) { + assert.equal(null, err); + assert.equal(2, results[0].value) + + // mapReduce with scope containing plain function + var o = {}; + o.scope = { fn: t } + o.out = { replace: 'replacethiscollection' } + + collection.mapReduce(map, reduce, o, function(err, outCollection) { + assert.equal(null, err); + + // Find all entries in the map-reduce collection + outCollection.find().toArray(function(err, results) { + assert.equal(2, results[0].value) + db.close(); + }); + }); + }); + }); + }); + }); + }); + + +----- +group +----- + + +Run a group command across a collection + + + +Options + - **readPreference** {String}, the preferred read preference (Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: group(keys, condition, initial, reduce, finalize, command[, options], callback) + + :param object keys: an object, array or function expressing the keys to group by. + :param object condition: an optional condition that must be true for a row to be considered. + :param object initial: initial value of the aggregation counter object. + :param function reduce: the reduce function aggregates (reduces) the objects iterated + :param function finalize: an optional function to be run on each item in the result set just before the item is returned. + :param boolean command: specify if you wish to run using the internal group command or using eval, default is true. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the group method or null if an error occured. + :returns: null + + +**Examples** + + + + A whole lot of different wayt to execute the group command + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a test collection + db.createCollection('test_group', function(err, collection) { + + // Peform a simple group by on an empty collection + collection.group([], {}, {"count":0}, "function (obj, prev) { prev.count++; }", function(err, results) { + assert.deepEqual([], results); + + // Trigger some inserts on the collection + collection.insert([{'a':2}, {'b':5}, {'a':1}], {w:1}, function(err, ids) { + + // Perform a group count + collection.group([], {}, {"count":0}, "function (obj, prev) { prev.count++; }", function(err, results) { + assert.equal(3, results[0].count); + + // Pefrom a group count using the eval method + collection.group([], {}, {"count":0}, "function (obj, prev) { prev.count++; }", false, function(err, results) { + assert.equal(3, results[0].count); + + // Group with a conditional + collection.group([], {'a':{'$gt':1}}, {"count":0}, "function (obj, prev) { prev.count++; }", function(err, results) { + // Results + assert.equal(1, results[0].count); + + // Group with a conditional using the EVAL method + collection.group([], {'a':{'$gt':1}}, {"count":0}, "function (obj, prev) { prev.count++; }" , false, function(err, results) { + // Results + assert.equal(1, results[0].count); + + // Insert some more test data + collection.insert([{'a':2}, {'b':3}], {w:1}, function(err, ids) { + + // Do a Group by field a + collection.group(['a'], {}, {"count":0}, "function (obj, prev) { prev.count++; }", function(err, results) { + // Results + assert.equal(2, results[0].a); + assert.equal(2, results[0].count); + assert.equal(null, results[1].a); + assert.equal(2, results[1].count); + assert.equal(1, results[2].a); + assert.equal(1, results[2].count); + + // Do a Group by field a + collection.group({'a':true}, {}, {"count":0}, function (obj, prev) { prev.count++; }, true, function(err, results) { + // Results + assert.equal(2, results[0].a); + assert.equal(2, results[0].count); + assert.equal(null, results[1].a); + assert.equal(2, results[1].count); + assert.equal(1, results[2].a); + assert.equal(1, results[2].count); + + // Correctly handle illegal function + collection.group([], {}, {}, "5 ++ 5", function(err, results) { + assert.ok(err.message != null); + + // Use a function to select the keys used to group by + var keyf = function(doc) { return {a: doc.a}; }; + collection.group(keyf, {a: {$gt: 0}}, {"count": 0, "value": 0}, function(obj, prev) { prev.count++; prev.value += obj.a; }, true, function(err, results) { + // Results + results.sort(function(a, b) { return b.count - a.count; }); + assert.equal(2, results[0].count); + assert.equal(2, results[0].a); + assert.equal(4, results[0].value); + assert.equal(1, results[1].count); + assert.equal(1, results[1].a); + assert.equal(1, results[1].value); + + // Use a Code object to select the keys used to group by + var keyf = new Code(function(doc) { return {a: doc.a}; }); + collection.group(keyf, {a: {$gt: 0}}, {"count": 0, "value": 0}, function(obj, prev) { prev.count++; prev.value += obj.a; }, true, function(err, results) { + // Results + results.sort(function(a, b) { return b.count - a.count; }); + assert.equal(2, results[0].count); + assert.equal(2, results[0].a); + assert.equal(4, results[0].value); + assert.equal(1, results[1].count); + assert.equal(1, results[1].a); + assert.equal(1, results[1].value); + + // Correctly handle illegal function when using the EVAL method + collection.group([], {}, {}, "5 ++ 5", false, function(err, results) { + assert.ok(err.message != null); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + + +------- +options +------- + + +Returns the options of the collection. + +.. js:function:: options(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the options method or null if an error occured. + :returns: null + + +**Examples** + + + + An example returning the options for a collection. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a test collection that we are getting the options back from + db.createCollection('test_collection_options', {'capped':true, 'size':1024}, function(err, collection) { + assert.ok(collection instanceof Collection); + assert.equal('test_collection_options', collection.collectionName); + + // Let's fetch the collection options + collection.options(function(err, options) { + assert.equal(true, options.capped); + assert.equal(1024, options.size); + assert.equal("test_collection_options", options.create); + + db.close(); + }); + }); + }); + + +-------- +isCapped +-------- + + +Returns if the collection is a capped collection + +.. js:function:: isCapped(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the isCapped method or null if an error occured. + :returns: null + + +**Examples** + + + + An example showing how to establish if it's a capped collection + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a test collection that we are getting the options back from + db.createCollection('test_collection_is_capped', {'capped':true, 'size':1024}, function(err, collection) { + assert.ok(collection instanceof Collection); + assert.equal('test_collection_is_capped', collection.collectionName); + + // Let's fetch the collection options + collection.isCapped(function(err, capped) { + assert.equal(true, capped); + + db.close(); + }); + }); + }); + + +----------- +indexExists +----------- + + +Checks if one or more indexes exist on the collection + +.. js:function:: indexExists(indexNames, callback) + + :param string indexNames: check if one or more indexes exist on the collection. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the indexExists method or null if an error occured. + :returns: null + + +**Examples** + + + + An example showing the use of the indexExists function for a single index name and a list of index names. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a test collection that we are getting the options back from + db.createCollection('test_collection_index_exists', {w: 1}, function(err, collection) { + assert.equal(null, err); + + // Create an index on the collection + collection.createIndex('a', {w: 1}, function(err, indexName) { + + // Let's test to check if a single index exists + collection.indexExists("a_1", function(err, result) { + assert.equal(true, result); + + // Let's test to check if multiple indexes are available + collection.indexExists(["a_1", "_id_"], function(err, result) { + assert.equal(true, result); + + // Check if a non existing index exists + collection.indexExists("c_1", function(err, result) { + assert.equal(false, result); + + db.close(); + }); + }); + }); + }); + }); + }); + + +------- +geoNear +------- + + +Execute the geoNear command to search for items in the collection + + + +Options + - **num** {Number}, max number of results to return. + - **maxDistance** {Number}, include results up to maxDistance from the point. + - **distanceMultiplier** {Number}, include a value to multiply the distances with allowing for range conversions. + - **query** {Object}, filter the results by a query. + - **spherical** {Boolean, default:false}, perform query using a spherical model. + - **uniqueDocs** {Boolean, default:false}, the closest location in a document to the center of the search region will always be returned MongoDB > 2.X. + - **includeLocs** {Boolean, default:false}, include the location data fields in the top level of the results MongoDB > 2.X. + - **readPreference** {String}, the preferred read preference ((Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: geoNear(x, y[, options], callback) + + :param number x: point to search on the x axis, ensure the indexes are ordered in the same order. + :param number y: point to search on the y axis, ensure the indexes are ordered in the same order. + :param objects [options]: options for the map reduce job. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the geoNear method or null if an error occured. + :returns: null + + +**Examples** + + + + Example of a simple geoNear query across some documents + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = configuration.newDbInstance({w:0}, {poolSize:1}); + + // Establish connection to db + db.open(function(err, db) { + + // Fetch the collection + var collection = db.collection("simple_geo_near_command"); + + // Add a location based index + collection.ensureIndex({loc:"2d"}, function(err, result) { + + // Save a new location tagged document + collection.insert([{a:1, loc:[50, 30]}, {a:1, loc:[30, 50]}], {w:1}, function(err, result) { + + // Use geoNear command to find document + collection.geoNear(50, 50, {query:{a:1}, num:1}, function(err, docs) { + assert.equal(1, docs.results.length); + + db.close(); + }); + }); + }); + }); + + +----------------- +geoHaystackSearch +----------------- + + +Execute a geo search using a geo haystack index on a collection. + + + +Options + - **maxDistance** {Number}, include results up to maxDistance from the point. + - **search** {Object}, filter the results by a query. + - **limit** {Number}, max number of results to return. + - **readPreference** {String}, the preferred read preference ((Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: geoHaystackSearch(x, y[, options], callback) + + :param number x: point to search on the x axis, ensure the indexes are ordered in the same order. + :param number y: point to search on the y axis, ensure the indexes are ordered in the same order. + :param objects [options]: options for the map reduce job. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the geoHaystackSearch method or null if an error occured. + :returns: null + + +**Examples** + + + + Example of a simple geoHaystackSearch query across some documents + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Fetch the collection + var collection = db.collection("simple_geo_haystack_command"); + + // Add a location based index + collection.ensureIndex({loc: "geoHaystack", type: 1}, {bucketSize: 1}, function(err, result) { + + // Save a new location tagged document + collection.insert([{a:1, loc:[50, 30]}, {a:1, loc:[30, 50]}], {w:1}, function(err, result) { + + // Use geoNear command to find document + collection.geoHaystackSearch(50, 50, {search:{a:1}, limit:1, maxDistance:100}, function(err, docs) { + assert.equal(1, docs.results.length); + + db.close(); + }); + }); + }); + }); + + +------- +indexes +------- + + +Retrieve all the indexes on the collection. + +.. js:function:: indexes(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the indexes method or null if an error occured. + :returns: null + + +**Examples** + + + + Example of retrieving a collections indexes + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Crete the collection for the distinct example + db.createCollection('simple_key_based_distinct', function(err, collection) { + + // Create a geo 2d index + collection.ensureIndex({loc:"2d"}, {w: 1}, function(err, result) { + assert.equal(null, err); + + // Create a simple single field index + collection.ensureIndex({a:1}, {w: 1}, function(err, result) { + assert.equal(null, err); + + // List all of the indexes on the collection + collection.indexes(function(err, indexes) { + assert.equal(3, indexes.length); + + db.close(); + }); + }) + }) + }); + }); + + +--------- +aggregate +--------- + + +Execute an aggregation framework pipeline against the collection, needs MongoDB >= 2.1 + + + +Options + - **readPreference** {String}, the preferred read preference ((Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: aggregate(array[, options], callback) + + :param array array: containing all the aggregation framework commands for the execution. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the aggregate method or null if an error occured. + :returns: null + + +**Examples** + + + + Correctly call the aggregation framework using a pipeline in an Array. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Some docs for insertion + var docs = [{ + title : "this is my title", author : "bob", posted : new Date() , + pageViews : 5, tags : [ "fun" , "good" , "fun" ], other : { foo : 5 }, + comments : [ + { author :"joe", text : "this is cool" }, { author :"sam", text : "this is bad" } + ]}]; + + // Validate that we are running on at least version 2.1 of MongoDB + db.admin().serverInfo(function(err, result){ + + if(parseInt((result.version.replace(/\./g, ''))) >= 210) { + // Create a collection + var collection = db.collection('shouldCorrectlyExecuteSimpleAggregationPipelineUsingArray'); + // Insert the docs + collection.insert(docs, {w: 1}, function(err, result) { + + // Execute aggregate, notice the pipeline is expressed as an Array + collection.aggregate([ + { $project : { + author : 1, + tags : 1 + }}, + { $unwind : "$tags" }, + { $group : { + _id : {tags : "$tags"}, + authors : { $addToSet : "$author" } + }} + ], function(err, result) { + assert.equal(null, err); + assert.equal('good', result[0]._id.tags); + assert.deepEqual(['bob'], result[0].authors); + assert.equal('fun', result[1]._id.tags); + assert.deepEqual(['bob'], result[1].authors); + + db.close(); + }); + }); + } else { + db.close(); + test.done(); + } + }); + }); + + + + Correctly call the aggregation framework using a pipeline expressed as an argument list. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Some docs for insertion + var docs = [{ + title : "this is my title", author : "bob", posted : new Date() , + pageViews : 5, tags : [ "fun" , "good" , "fun" ], other : { foo : 5 }, + comments : [ + { author :"joe", text : "this is cool" }, { author :"sam", text : "this is bad" } + ]}]; + + // Validate that we are running on at least version 2.1 of MongoDB + db.admin().serverInfo(function(err, result){ + + if(parseInt((result.version.replace(/\./g, ''))) >= 210) { + // Create a collection + var collection = db.collection('shouldCorrectlyExecuteSimpleAggregationPipelineUsingArguments'); + // Insert the docs + collection.insert(docs, {w: 1}, function(err, result) { + // Execute aggregate, notice the pipeline is expressed as function call parameters + // instead of an Array. + collection.aggregate( + { $project : { + author : 1, + tags : 1 + }}, + { $unwind : "$tags" }, + { $group : { + _id : {tags : "$tags"}, + authors : { $addToSet : "$author" } + }} + , function(err, result) { + assert.equal(null, err); + assert.equal('good', result[0]._id.tags); + assert.deepEqual(['bob'], result[0].authors); + assert.equal('fun', result[1]._id.tags); + assert.deepEqual(['bob'], result[1].authors); + + db.close(); + }); + }); + } else { + db.close(); + test.done(); + } + }); + }); + + + + Correctly call the aggregation framework using a pipeline expressed as an argument list. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Some docs for insertion + var docs = [{ + title : "this is my title", author : "bob", posted : new Date() , + pageViews : 5, tags : [ "fun" , "good" , "fun" ], other : { foo : 5 }, + comments : [ + { author :"joe", text : "this is cool" }, { author :"sam", text : "this is bad" } + ]}]; + + // Validate that we are running on at least version 2.1 of MongoDB + db.admin().serverInfo(function(err, result){ + + if(parseInt((result.version.replace(/\./g, ''))) >= 210) { + // Create a collection + var collection = db.collection('shouldCorrectlyExecuteSimpleAggregationPipelineUsingArguments'); + // Insert the docs + collection.insert(docs, {w: 1}, function(err, result) { + // Execute aggregate, notice the pipeline is expressed as function call parameters + // instead of an Array. + collection.aggregate( + { $project : { + author : 1, + tags : 1 + }}, + { $unwind : "$tags" }, + { $group : { + _id : {tags : "$tags"}, + authors : { $addToSet : "$author" } + }} + , function(err, result) { + assert.equal(null, err); + assert.equal('good', result[0]._id.tags); + assert.deepEqual(['bob'], result[0].authors); + assert.equal('fun', result[1]._id.tags); + assert.deepEqual(['bob'], result[1].authors); + + db.close(); + }); + }); + } else { + db.close(); + test.done(); + } + }); + }); + + +----- +stats +----- + + +Get all the collection statistics. + + + +Options + - **scale** {Number}, divide the returned sizes by scale value. + - **readPreference** {String}, the preferred read preference ((Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: stats([options], callback) + + :param objects [options]: options for the stats command. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the stats method or null if an error occured. + :returns: null + + +**Examples** + + + + Example of retrieving a collections stats + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Crete the collection for the distinct example + db.createCollection('collection_stats_test', function(err, collection) { + + // Insert some documents + collection.insert([{a:1}, {hello:'world'}], {w: 1}, function(err, result) { + + // Retrieve the statistics for the collection + collection.stats(function(err, stats) { + assert.equal(2, stats.count); + + db.close(); + }); + }); + }); + }); + diff --git a/_sources/api-generated/cursor.txt b/_sources/api-generated/cursor.txt new file mode 100644 index 00000000000..23aae3db37a --- /dev/null +++ b/_sources/api-generated/cursor.txt @@ -0,0 +1,885 @@ +======== +Cursor() +======== + +------------------ +Constructor +------------------ + + +Constructor for a cursor object that handles all the operations on query result +using find. This cursor object is unidirectional and cannot traverse backwards. Clients should not be creating a cursor directly, +but use find to acquire a cursor. (INTERNAL TYPE) + + + .. js:class:: Cursor() + + :param db db: the database object to work with. + :param collection collection: the collection to query. + :param object selector: the query selector. + :param object fields: an object containing what fields to include or exclude from objects returned. + :param object [options]: additional options for the collection. + + +Options + - **skip** {Number} skip number of documents to skip. + - **limit** {Number}, limit the number of results to return. -1 has a special meaning and is used by Db.eval. A value of 1 will also be treated as if it were -1. + - **sort** {Array | Object}, set to sort the documents coming back from the query. Array of indexes, [['a', 1]] etc. + - **hint** {Object}, hint force the query to use a specific index. + - **explain** {Boolean}, explain return the explaination of the query. + - **snapshot** {Boolean}, snapshot Snapshot mode assures no duplicates are returned. + - **timeout** {Boolean}, timeout allow the query to timeout. + - **tailable** {Boolean}, tailable allow the cursor to be tailable. + - **awaitdata** {Boolean}, awaitdata allow the cursor to wait for data, only applicable for tailable cursor. + - **batchSize** {Number}, batchSize the number of the subset of results to request the database to return for every request. This should initially be greater than 1 otherwise the database will automatically close the cursor. The batch size can be set to 1 with cursorInstance.batchSize after performing the initial query to the database. + - **raw** {Boolean}, raw return all query documents as raw buffers (default false). + - **read** {Boolean}, read specify override of read from source (primary/secondary). + - **slaveOk** {Boolean}, slaveOk, sets the slaveOk flag on the query wire protocol for secondaries. + - **returnKey** {Boolean}, returnKey only return the index key. + - **maxScan** {Number}, maxScan limit the number of items to scan. + - **min** {Number}, min set index bounds. + - **max** {Number}, max set index bounds. + - **showDiskLoc** {Boolean}, showDiskLoc show disk location of results. + - **comment** {String}, comment you can put a $comment field on a query to make looking in the profiler logs simpler. + - **numberOfRetries** {Number}, numberOfRetries if using awaidata specifies the number of times to retry on timeout. + - **dbName** {String}, dbName override the default dbName. + - **tailableRetryInterval** {Number}, tailableRetryInterval specify the miliseconds between getMores on tailable cursor. + - **exhaust** {Boolean}, exhaust have the server send all the documents at once as getMore packets. + - **partial** {Boolean}, partial have the sharded system return a partial result from mongos. + + +------------------ +Constants +------------------ + +.. csv-table:: + :header: "Constant Name", "Value", "Description" + :widths: 15, 10, 30 + + "Cursor.INIT", "0", "Init state" + "Cursor.OPEN", "1", "Cursor open" + "Cursor.CLOSED", "2", "Cursor closed" + "Cursor.GET_MORE", "3", "Cursor performing a get more" + +------ +rewind +------ + + +Resets this cursor to its initial state. All settings like the query string, +tailable, batchSizeValue, skipValue and limits are preserved. + +.. js:function:: rewind() + + :returns: cursor returns itself with rewind applied. + + +**Examples** + + + + An example showing the information returned by indexInformation + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + var docs = []; + + // Insert 100 documents with some data + for(var i = 0; i < 100; i++) { + var d = new Date().getTime() + i*1000; + docs[i] = {'a':i, createdAt:new Date(d)}; + } + + // Create collection + db.createCollection('Should_correctly_rewind_and_restart_cursor', function(err, collection) { + assert.equal(null, err); + + // insert all docs + collection.insert(docs, {w:1}, function(err, result) { + assert.equal(null, err); + + // Grab a cursor using the find + var cursor = collection.find({}); + // Fetch the first object off the cursor + cursor.nextObject(function(err, item) { + assert.equal(0, item.a) + // Rewind the cursor, resetting it to point to the start of the query + cursor.rewind(); + + // Grab the first object again + cursor.nextObject(function(err, item) { + assert.equal(0, item.a) + + db.close(); + }) + }) + }) + }); + }); + + +------- +toArray +------- + + +Returns an array of documents. The caller is responsible for making sure that there +is enough memory to store the results. Note that the array only contain partial +results when this cursor had been previouly accessed. In that case, +cursor.rewind() can be used to reset the cursor. + +.. js:function:: toArray(callback) + + :param function callback: This will be called after executing this method successfully. The first parameter will contain the Error object if an error occured, or null otherwise. The second parameter will contain an array of BSON deserialized objects as a result of the query. + :returns: null + + +**Examples** + + + + An example showing the information returned by indexInformation + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection to hold our documents + db.createCollection('test_array', function(err, collection) { + + // Insert a test document + collection.insert({'b':[1, 2, 3]}, {w:1}, function(err, ids) { + + // Retrieve all the documents in the collection + collection.find().toArray(function(err, documents) { + assert.equal(1, documents.length); + assert.deepEqual([1, 2, 3], documents[0].b); + + db.close(); + }); + }); + }); + }); + + +---- +each +---- + + +Iterates over all the documents for this cursor. As with **{cursor.toArray}**, +not all of the elements will be iterated if this cursor had been previouly accessed. +In that case, **{cursor.rewind}** can be used to reset the cursor. However, unlike +**{cursor.toArray}**, the cursor will only hold a maximum of batch size elements +at any given time if batch size is specified. Otherwise, the caller is responsible +for making sure that the entire result can fit the memory. + +.. js:function:: each(callback) + + :param function callback: this will be called for while iterating every document of the query result. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the document. + :returns: null + + +**Examples** + + + + A simple example iterating over a query using the each function of the cursor. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('test_to_a_after_each', function(err, collection) { + assert.equal(null, err); + + // Insert a document in the collection + collection.insert({'a':1}, {w:1}, function(err, ids) { + + // Grab a cursor + var cursor = collection.find(); + + // Execute the each command, triggers for each document + cursor.each(function(err, item) { + + // If the item is null then the cursor is exhausted/empty and closed + if(item == null) { + + // Show that the cursor is closed + cursor.toArray(function(err, items) { + assert.ok(err != null); + + // Let's close the db + db.close(); + }); + }; + }); + }); + }); + }); + + +----- +count +----- + + +Determines how many result the query for this cursor will return + +.. js:function:: count(applySkipLimit, callback) + + :param boolean applySkipLimit: if set to true will apply the skip and limits set on the cursor. Defaults to false. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the number of results or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example showing the count function of the cursor. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Creat collection + db.createCollection('cursor_count_collection', function(err, collection) { + assert.equal(null, err); + + // Insert some docs + collection.insert([{a:1}, {a:2}], {w:1}, function(err, docs) { + assert.equal(null, err); + + // Do a find and get the cursor count + collection.find().count(function(err, count) { + assert.equal(null, err); + assert.equal(2, count); + + db.close(); + }) + }); + }); + }); + + +---- +sort +---- + + +Sets the sort parameter of this cursor to the given value. + +**This method has the following method signatures** + + + +(keyOrList, callback) +(keyOrList, direction, callback) + +.. js:function:: sort(keyOrList, direction, callback) + + :param string keyOrList: This can be a string or an array. If passed as a string, the string will be the field to sort. If passed an array, each element will represent a field to be sorted and should be an array that contains the format [string, direction]. + :param string direction: this determines how the results are sorted. "asc", "ascending" or 1 for asceding order while "desc", "desceding or -1 for descending order. Note that the strings are case insensitive. + :param function callback: this will be called after executing this method. The first parameter will contain an error object when the cursor is already closed while the second parameter will contain a reference to this object upon successful execution. + :returns: cursor an instance of this object. + + +**Examples** + + + + A simple example showing the use of sort on the cursor. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('simple_sort_collection', function(err, collection) { + assert.equal(null, err); + + // Insert some documents we can sort on + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, docs) { + assert.equal(null, err); + + // Do normal ascending sort + collection.find().sort([['a', 1]]).nextObject(function(err, item) { + assert.equal(null, err); + assert.equal(1, item.a); + + // Do normal descending sort + collection.find().sort([['a', -1]]).nextObject(function(err, item) { + assert.equal(null, err); + assert.equal(3, item.a); + + db.close(); + }); + }); + }); + }); + }); + + +----- +limit +----- + + +Sets the limit parameter of this cursor to the given value. + +.. js:function:: limit(limit[, callback]) + + :param number limit: the new limit. + :param function [callback]: this optional callback will be called after executing this method. The first parameter will contain an error object when the limit given is not a valid number or when the cursor is already closed while the second parameter will contain a reference to this object upon successful execution. + :returns: cursor an instance of this object. + + +**Examples** + + + + A simple example showing the use of limit on the cursor + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('simple_limit_collection', function(err, collection) { + assert.equal(null, err); + + // Insert some documents we can sort on + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, docs) { + assert.equal(null, err); + + // Limit to only one document returned + collection.find().limit(1).toArray(function(err, items) { + assert.equal(null, err); + assert.equal(1, items.length); + + db.close(); + }); + }); + }); + }); + + +----------------- +setReadPreference +----------------- + + +Sets the read preference for the cursor + +.. js:function:: setReadPreference(the[, callback]) + + :param string the: read preference for the cursor, one of Server.READ_PRIMARY, Server.READ_SECONDARY, Server.READ_SECONDARY_ONLY + :param function [callback]: this optional callback will be called after executing this method. The first parameter will contain an error object when the read preference given is not a valid number or when the cursor is already closed while the second parameter will contain a reference to this object upon successful execution. + :returns: cursor an instance of this object. + + +---- +skip +---- + + +Sets the skip parameter of this cursor to the given value. + +.. js:function:: skip(skip[, callback]) + + :param number skip: the new skip value. + :param function [callback]: this optional callback will be called after executing this method. The first parameter will contain an error object when the skip value given is not a valid number or when the cursor is already closed while the second parameter will contain a reference to this object upon successful execution. + :returns: cursor an instance of this object. + + +**Examples** + + + + A simple example showing the use of skip on the cursor + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('simple_skip_collection', function(err, collection) { + assert.equal(null, err); + + // Insert some documents we can sort on + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, docs) { + assert.equal(null, err); + + // Skip one document + collection.find().skip(1).nextObject(function(err, item) { + assert.equal(null, err); + assert.equal(2, item.a); + + db.close(); + }); + }); + }); + }); + + +--------- +batchSize +--------- + + +Sets the batch size parameter of this cursor to the given value. + +.. js:function:: batchSize(batchSize[, callback]) + + :param number batchSize: the new batch size. + :param function [callback]: this optional callback will be called after executing this method. The first parameter will contain an error object when the batchSize given is not a valid number or when the cursor is already closed while the second parameter will contain a reference to this object upon successful execution. + :returns: cursor an instance of this object. + + +**Examples** + + + + A simple example showing the use of batchSize on the cursor, batchSize only regulates how many + documents are returned for each batch using the getMoreCommand against the MongoDB server + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('simple_batch_size_collection', function(err, collection) { + assert.equal(null, err); + + // Insert some documents we can sort on + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, docs) { + assert.equal(null, err); + + // Do normal ascending sort + collection.find().batchSize(1).nextObject(function(err, item) { + assert.equal(null, err); + assert.equal(1, item.a); + + db.close(); + }); + }); + }); + }); + + +---------- +nextObject +---------- + + +Gets the next document from the cursor. + +.. js:function:: nextObject(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain an error object on error while the second parameter will contain a document from the returned result or null if there are no more results. + +**Examples** + + + + A simple example showing the use of nextObject. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('simple_next_object_collection', function(err, collection) { + assert.equal(null, err); + + // Insert some documents we can sort on + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, docs) { + assert.equal(null, err); + + // Do normal ascending sort + collection.find().nextObject(function(err, item) { + assert.equal(null, err); + assert.equal(1, item.a); + + db.close(); + }); + }); + }); + }); + + +------- +explain +------- + + +Gets a detailed information about how the query is performed on this cursor and how +long it took the database to process it. + +.. js:function:: explain(callback) + + :param function callback: this will be called after executing this method. The first parameter will always be null while the second parameter will be an object containing the details. + +**Examples** + + + + A simple example showing the use of the cursor explain function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('simple_explain_collection', function(err, collection) { + assert.equal(null, err); + + // Insert some documents we can sort on + collection.insert([{a:1}, {a:2}, {a:3}], {w:1}, function(err, docs) { + assert.equal(null, err); + + // Do normal ascending sort + collection.find().explain(function(err, explaination) { + assert.equal(null, err); + + db.close(); + }); + }); + }); + }); + + +------ +stream +------ + + +Returns a Node ReadStream interface for this cursor. + + + +Options + - **transform** {Function} function of type function(object) { return transformed }, allows for transformation of data before emitting. + +.. js:function:: stream() + + :returns: cursorstream returns a stream object. + + +**Examples** + + + + A simple example showing the use of the cursor stream function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a lot of documents to insert + var docs = [] + for(var i = 0; i < 100; i++) { + docs.push({'a':i}) + } + + // Create a collection + db.createCollection('test_stream_function', function(err, collection) { + assert.equal(null, err); + + // Insert documents into collection + collection.insert(docs, {w:1}, function(err, ids) { + // Peform a find to get a cursor + var stream = collection.find().stream(); + + // Execute find on all the documents + stream.on('close', function() { + db.close(); + }); + + stream.on('data', function(data) { + assert.ok(data != null); + }); + }); + }); + }); + + +----- +close +----- + + +Close the cursor. + +.. js:function:: close(callback) + + :param function callback: this will be called after executing this method. The first parameter will always contain null while the second parameter will contain a reference to this cursor. + :returns: null + + +**Examples** + + + + A simple example showing the use of the cursor close function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a lot of documents to insert + var docs = [] + for(var i = 0; i < 100; i++) { + docs.push({'a':i}) + } + + // Create a collection + db.createCollection('test_close_function_on_cursor', function(err, collection) { + assert.equal(null, err); + + // Insert documents into collection + collection.insert(docs, {w:1}, function(err, ids) { + // Peform a find to get a cursor + var cursor = collection.find(); + + // Fetch the first object + cursor.nextObject(function(err, object) { + assert.equal(null, err); + + // Close the cursor, this is the same as reseting the query + cursor.close(function(err, result) { + assert.equal(null, err); + + db.close(); + }); + }); + }); + }); + }); + + +-------- +isClosed +-------- + + +Check if the cursor is closed or open. + +.. js:function:: isClosed() + + :returns: boolean returns the state of the cursor. + + +**Examples** + + + + A simple example showing the use of the cursor close function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a lot of documents to insert + var docs = [] + for(var i = 0; i < 100; i++) { + docs.push({'a':i}) + } + + // Create a collection + db.createCollection('test_is_close_function_on_cursor', function(err, collection) { + assert.equal(null, err); + + // Insert documents into collection + collection.insert(docs, {w:1}, function(err, ids) { + // Peform a find to get a cursor + var cursor = collection.find(); + + // Fetch the first object + cursor.nextObject(function(err, object) { + assert.equal(null, err); + + // Close the cursor, this is the same as reseting the query + cursor.close(function(err, result) { + assert.equal(null, err); + assert.equal(true, cursor.isClosed()); + + db.close(); + }); + }); + }); + }); + }); + diff --git a/_sources/api-generated/cursorstream.txt b/_sources/api-generated/cursorstream.txt new file mode 100644 index 00000000000..11bd4fa42dd --- /dev/null +++ b/_sources/api-generated/cursorstream.txt @@ -0,0 +1,251 @@ +============== +CursorStream() +============== + +------------------ +Constructor +------------------ + + +CursorStream + + + .. js:class:: CursorStream() + + :param cursor cursor: a cursor object that the stream wraps. + :returns: stream + + +Returns a stream interface for the **cursor**. + + + +Options + - **transform** {Function} function of type function(object) { return transformed }, allows for transformation of data before emitting. + + + +Events + - **data** {function(item) {}} the data event triggers when a document is ready. + - **error** {function(err) {}} the error event triggers if an error happens. + - **close** {function() {}} the end event triggers when there is no more documents available. + + +----- +pause +----- + + +Pauses the stream. + +.. js:function:: pause() + + +**Examples** + + + + A simple example showing the use of the cursorstream pause function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a lot of documents to insert + var docs = [] + for(var i = 0; i < 1; i++) { + docs.push({'a':i}) + } + + // Create a collection + db.createCollection('test_cursorstream_pause', function(err, collection) { + assert.equal(null, err); + + // Insert documents into collection + collection.insert(docs, {w:1}, function(err, ids) { + // Peform a find to get a cursor + var stream = collection.find().stream(); + + // For each data item + stream.on("data", function(item) { + // Check if cursor is paused + assert.equal(false, stream.paused); + // Pause stream + stream.pause(); + // Check if cursor is paused + assert.equal(true, stream.paused); + + // Restart the stream after 1 miliscecond + setTimeout(function() { + stream.resume(); + // Check if cursor is paused + process.nextTick(function() { + assert.equal(false, stream.paused); + }) + }, 1); + }); + + // When the stream is done + stream.on("close", function() { + db.close(); + }); + }); + }); + }); + + +------ +resume +------ + + +Resumes the stream. + +.. js:function:: resume() + + +**Examples** + + + + A simple example showing the use of the cursorstream resume function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a lot of documents to insert + var docs = [] + for(var i = 0; i < 1; i++) { + docs.push({'a':i}) + } + + // Create a collection + db.createCollection('test_cursorstream_resume', function(err, collection) { + assert.equal(null, err); + + // Insert documents into collection + collection.insert(docs, {w:1}, function(err, ids) { + // Peform a find to get a cursor + var stream = collection.find().stream(); + + // For each data item + stream.on("data", function(item) { + // Check if cursor is paused + assert.equal(false, stream.paused); + // Pause stream + stream.pause(); + // Check if cursor is paused + assert.equal(true, stream.paused); + + // Restart the stream after 1 miliscecond + setTimeout(function() { + + // Resume the stream + stream.resume(); + + // Check if cursor is paused + process.nextTick(function() { + assert.equal(false, stream.paused); + }); + }, 1); + }); + + // When the stream is done + stream.on("close", function() { + db.close(); + }); + }); + }); + }); + + +------- +destroy +------- + + +Destroys the stream, closing the underlying +cursor. No more events will be emitted. + +.. js:function:: destroy() + + +**Examples** + + + + A simple example showing the use of the cursorstream resume function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a lot of documents to insert + var docs = [] + for(var i = 0; i < 1; i++) { + docs.push({'a':i}) + } + + // Create a collection + db.createCollection('test_cursorstream_destroy', function(err, collection) { + assert.equal(null, err); + + // Insert documents into collection + collection.insert(docs, {w:1}, function(err, ids) { + // Peform a find to get a cursor + var stream = collection.find().stream(); + + // For each data item + stream.on("data", function(item) { + // Destroy stream + stream.destroy(); + }); + + // When the stream is done + stream.on("close", function() { + db.close(); + }); + }); + }); + }); + diff --git a/_sources/api-generated/db.txt b/_sources/api-generated/db.txt new file mode 100644 index 00000000000..af3fd6e0292 --- /dev/null +++ b/_sources/api-generated/db.txt @@ -0,0 +1,2282 @@ +==== +Db() +==== + +------------------ +Constructor +------------------ + + +Create a new Db instance. + + + .. js:class:: Db() + + :param string databaseName: name of the database. + :param object serverConfig: server config object. + :param object [options]: additional options for the collection. + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **readPreference** {String}, the prefered read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + - **native_parser** {Boolean, default:false}, use c++ bson parser. + - **forceServerObjectId** {Boolean, default:false}, force server to create _id fields instead of client. + - **pkFactory** {Object}, object overriding the basic ObjectID primary key generation. + - **serializeFunctions** {Boolean, default:false}, serialize functions. + - **raw** {Boolean, default:false}, peform operations using raw bson buffers. + - **recordQueryStats** {Boolean, default:false}, record query statistics during execution. + - **retryMiliSeconds** {Number, default:5000}, number of miliseconds between retries. + - **numberOfRetries** {Number, default:5}, number of retries off connection. + - **logger** {Object, default:null}, an object representing a logger that you want to use, needs to support functions debug, log, error **({error:function(message, object) {}, log:function(message, object) {}, debug:function(message, object) {}})**. + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + + +------------------ +Constants +------------------ + +.. csv-table:: + :header: "Constant Name", "Value", "Description" + :widths: 15, 10, 30 + + "Db.DEFAULT_URL", "'mongodb://localhost:27017/default'", "Default URL" + +---- +open +---- + + +Initialize the database connection. + +.. js:function:: open(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the index information or null if an error occured. + :returns: null + + +**Examples** + + + + An example of a simple single server db connection + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + db.close(); + }); + + + + Simple replicaset connection setup, requires a running replicaset on the correct ports + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var replSet = new ReplSetServers([ + new Server('localhost', 30000), + new Server('localhost', 30001), + new Server('localhost', 30002) + ]); + var db = new Db('integration_test_', replSet, {w:0}); + db.open(function(err, p_db) { + assert.equal(null, err); + p_db.close(); + }); + + + + Example of Read Preference usage at the query level. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var replSet = new ReplSetServers([ + new Server('localhost', 30000), + new Server('localhost', 30001), + new Server('localhost', 30002) + ]); + // Create db instance + var db = new Db('integration_test_', replSet, {w:0, native_parser: (process.env['TEST_NATIVE'] != null)}); + // Trigger test once whole set is up + db.on("fullsetup", function() { + // Rip out secondaries forcing an attempt to read from the primary + db.serverConfig._state.secondaries = {}; + + // Grab the collection + db.collection("read_preference_replicaset_test_0", function(err, collection) { + // Attempt to read (should fail due to the server not being a primary); + collection.find().setReadPreference(ReadPreference.SECONDARY).toArray(function(err, items) { + assert.ok(err != null); + assert.equal("No replica set secondary available for query with ReadPreference SECONDARY", err.message); + // Does not get called or we don't care + db.close(); + }); + }); + }); + + // Connect to the db + db.open(function(err, p_db) { + db = p_db; + }); + + + + A Simple example off connecting to Mongos with a list of alternative proxies. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Set up mongos connection + var mongos = new Mongos([ + new Server("localhost", 50000, { auto_reconnect: true }), + new Server("localhost", 50001, { auto_reconnect: true }) + ]) + + // Connect using the mongos connections + var db = new Db('integration_test_', mongos, {w:0}); + db.open(function(err, db) { + assert.equal(null, err); + assert.ok(db != null); + + // Perform a simple insert into a collection + var collection = db.collection("shard_test"); + // Insert a simple doc + collection.insert({test:1}, {w:1}, function(err, result) { + assert.equal(null, err); + + collection.findOne({test:1}, {}, {readPreference:new ReadPreference(ReadPreference.SECONDARY)}, function(err, item) { + assert.equal(null, err); + assert.equal(1, item.test); + + db.close(); + }) + }); + }); + + +-- +db +-- + + +Create a new Db instance sharing the current socket connections. + +.. js:function:: db(dbName) + + :param string dbName: the name of the database we want to use. + :returns: db a db instance using the new database. + + +**Examples** + + + + Simple example connecting to two different databases sharing the socket connections below. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Reference a different database sharing the same connections + // for the data transfer + var secondDb = db.db("integration_tests_2"); + + // Fetch the collections + var multipleColl1 = db.collection("multiple_db_instances"); + var multipleColl2 = secondDb.collection("multiple_db_instances"); + + // Write a record into each and then count the records stored + multipleColl1.insert({a:1}, {w:1}, function(err, result) { + multipleColl2.insert({a:1}, {w:1}, function(err, result) { + + // Count over the results ensuring only on record in each collection + multipleColl1.count(function(err, count) { + assert.equal(1, count); + + multipleColl2.count(function(err, count) { + assert.equal(1, count); + + db.close(); + }); + }); + }); + }); + }); + + +----- +close +----- + + +Close the current db connection, including all the child db instances. Emits close event if no callback is provided. + +.. js:function:: close([forceClose], callback) + + :param boolean [forceClose]: connection can never be reused. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results or null if an error occured. + :returns: null + + +**Examples** + + + + An example that shows how to force close a db connection so it cannot be reused. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Fetch a collection + var collection = db.collection('shouldCorrectlyFailOnRetryDueToAppCloseOfDb'); + + // Insert a document + collection.insert({a:1}, {w:1}, function(err, result) { + assert.equal(null, err); + + // Force close the connection + db.close(true, function(err, result) { + + // Attemp to insert should fail now with correct message 'db closed by application' + collection.insert({a:2}, {w:1}, function(err, result) { + assert.equal('db closed by application', err.message); + }); + }); + }); + }); + + + + An example of a simple single server db connection and close function + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Close the connection with a callback that is optional + db.close(function(err, result) { + assert.equal(null, err); + + }); + }); + + +----- +admin +----- + + +Access the Admin database + +.. js:function:: admin([callback]) + + :param function [callback]: returns the results. + :returns: admin the admin db object. + + +**Examples** + + + + Example showing how to access the Admin database for admin level operations. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + db.open(function(err, db) { + + // Use the admin database for the operation + var adminDb = db.admin() + assert.ok(adminDb != null); + + db.close(); + }); + + +--------------- +collectionsInfo +--------------- + + +Returns a cursor to all the collection information. + +.. js:function:: collectionsInfo([collectionName], callback) + + :param string [collectionName]: the collection name we wish to retrieve the information from. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the options or null if an error occured. + :returns: null + + +**Examples** + + + + An example of retrieveing the information of all the collections. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create a collection + db.createCollection('test_collections_info', function(err, r) { + assert.equal(null, err); + + // Return the information of a single collection name + db.collectionsInfo("test_collections_info").toArray(function(err, items) { + assert.equal(1, items.length); + + // Return the information of a all collections, using the callback format + db.collectionsInfo(function(err, cursor) { + + // Turn the cursor into an array of results + cursor.toArray(function(err, items) { + assert.ok(items.length > 0); + + db.close(); + }); + }) + }); + }); + }); + + +--------------- +collectionNames +--------------- + + +Get the list of all collection names for the specified db + + + +Options + - **namesOnly** {String, default:false}, Return only the full collection namespace. + +.. js:function:: collectionNames([collectionName][, options], callback) + + :param string [collectionName]: the collection name we wish to filter by. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the collection names or null if an error occured. + :returns: null + + +**Examples** + + + + An example of retrieveing the collection names for a database. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create a collection + db.createCollection('test_collections_info', function(err, r) { + assert.equal(null, err); + + // Return the information of a single collection name + db.collectionNames("test_collections_info", function(err, items) { + assert.equal(1, items.length); + + // Return the information of a all collections, using the callback format + db.collectionNames(function(err, items) { + assert.ok(items.length > 0); + + db.close(); + }); + }); + }); + }); + + +---------- +collection +---------- + + +Fetch a specific collection (containing the actual collection information). If the application does not use strict mode you can +can use it without a callback in the following way. var collection = db.collection('mycollection'); + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **serializeFunctions** {Boolean, default:false}, serialize functions on the document. + - **raw** {Boolean, default:false}, perform all operations using raw bson objects. + - **pkFactory** {Object}, object overriding the basic ObjectID primary key generation. + - **readPreference** {String}, the prefered read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + - **strict**, (Boolean, default:false) throws and error if the collection does not exist + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: collection(collectionName[, options], callback) + + :param string collectionName: the collection name we wish to access. + :param object [options]: returns option results. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the collection or null if an error occured. + :returns: null + + +**Examples** + + + + An example of retrieving a collection from a db using the collection function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Grab a collection without a callback no safe mode + var col1 = db.collection('test_correctly_access_collections'); + + // Grab a collection with a callback but no safe operation + db.collection('test_correctly_access_collections', function(err, col2) { + assert.equal(null, err); + + // Grab a collection with a callback in safe mode, ensuring it exists (should fail as it's not created) + db.collection('test_correctly_access_collections', {strict:true}, function(err, col3) { + assert.ok(err != null); + + // Create the collection + db.createCollection('test_correctly_access_collections', function(err, result) { + + // Retry to get the collection, should work as it's now created + db.collection('test_correctly_access_collections', {strict:true}, function(err, col3) { + assert.equal(null, err); + + db.close(); + }); + }); + }); + }); + }); + + +----------- +collections +----------- + + +Fetch all collections for the current db. + +.. js:function:: collections(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the collections or null if an error occured. + :returns: null + + +**Examples** + + + + An example of retrieving all collections for a db as Collection objects + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create the collection + db.createCollection('test_correctly_access_collections2', function(err, result) { + + // Retry to get the collection, should work as it's now created + db.collections(function(err, collections) { + assert.equal(null, err); + assert.ok(collections.length > 0); + + db.close(); + }); + }); + }); + + +---- +eval +---- + + +Evaluate javascript on the server + + + +Options + - **nolock** {Boolean, default:false}, Tell MongoDB not to block on the evaulation of the javascript. + +.. js:function:: eval(code[, parameters], [options], callback) + + :param code code: javascript to execute on server. + :param object [parameters]: the parameters for the call. + :param object [options]: the options + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from eval or null if an error occured. + :returns: null + + +**Examples** + + + + A whole bunch of examples on how to use eval on the server. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + var numberOfTests = + + // Evaluate a function on the server with the parameter 3 passed in + db.eval('function (x) {return x;}', [3], function(err, result) { + assert.equal(3, result); + }); + + // Evaluate a function on the server with the parameter 3 passed in no lock aquired for eval + // on server + db.eval('function (x) {return x;}', [3], {nolock:true}, function(err, result) { + assert.equal(3, result); + }); + + // Evaluate a function on the server that writes to a server collection + db.eval('function (x) {db.test_eval.save({y:x});}', [5], function(err, result) { + // Locate the entry + db.collection('test_eval', function(err, collection) { + collection.findOne(function(err, item) { + assert.equal(5, item.y); + }); + }); + }); + + // Evaluate a function with 2 parameters passed in + db.eval('function (x, y) {return x + y;}', [2, 3], function(err, result) { + assert.equal(5, result); + }); + + // Evaluate a function with no parameters passed in + db.eval('function () {return 5;}', function(err, result) { + assert.equal(5, result); + }); + + // Evaluate a statement + db.eval('2 + 3;', function(err, result) { + assert.equal(5, result); + }); + + // Evaluate a statement using the code object + db.eval(new Code("2 + 3;"), function(err, result) { + assert.equal(5, result); + }); + + // Evaluate a statement using the code object including a scope + db.eval(new Code("return i;", {'i':2}), function(err, result) { + assert.equal(2, result); + }); + + // Evaluate a statement using the code object including a scope + db.eval(new Code("i + 3;", {'i':2}), function(err, result) { + assert.equal(5, result); + }); + + // Evaluate an illegal statement + db.eval("5 ++ 5;", function(err, result) { + assert.ok(err instanceof Error); + assert.ok(err.message != null); + // Let's close the db + db.close(); + }); + }); + + + + Defining and calling a system level javascript function (NOT recommended, http://www.mongodb.org/display/DOCS/Server-side+Code+Execution) + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Clean out the collection + db.collection("system.js").remove({}, {w:1}, function(err, result) { + assert.equal(null, err); + + // Define a system level function + db.collection("system.js").insert({_id: "echo", value: new Code("function(x) { return x; }")}, {w:1}, function(err, result) { + assert.equal(null, err); + + db.eval("echo(5)", function(err, result) { + assert.equal(null, err); + assert.equal(5, result); + + db.close(); + }); + }); + }); + }); + + +----------- +dereference +----------- + + +Dereference a dbref, against a db + +.. js:function:: dereference(dbRef, callback) + + :param dbref dbRef: db reference object we wish to resolve. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from dereference or null if an error occured. + :returns: null + + +**Examples** + + + + An example of dereferencing values. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Get a second db + var secondDb = db.db('integration_tests_2'); + + // Create a dereference example + secondDb.createCollection('test_deref_examples', function(err, collection) { + + // Insert a document in the collection + collection.insert({'a':1}, {w:1}, function(err, ids) { + + // Let's build a db reference and resolve it + var dbRef = new DBRef('test_deref_examples', ids[0]._id, 'integration_tests_2'); + + // Resolve it including a db resolve + db.dereference(dbRef, function(err, item) { + assert.equal(1, item.a); + + // Let's build a db reference and resolve it + var dbRef = new DBRef('test_deref_examples', ids[0]._id); + + // Simple local resolve + secondDb.dereference(dbRef, function(err, item) { + assert.equal(1, item.a); + + db.close(); + }); + }); + }); + }); + }); + + +------ +logout +------ + + +Logout user from server, fire off on all connections and remove all auth info + +.. js:function:: logout(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from logout or null if an error occured. + :returns: null + + +**Examples** + + + + An example of using the logout command for the database. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Add a user to the database + db.addUser('user', 'name', function(err, result) { + assert.equal(null, err); + + // Authenticate + db.authenticate('user', 'name', function(err, result) { + assert.equal(true, result); + + // Logout the db + db.logout(function(err, result) { + assert.equal(true, result); + + db.close(); + }); + }); + }); + }); + + +------------ +authenticate +------------ + + +Authenticate a user against the server. + + + +Options + - **authSource** {String}, The database that the credentials are for, + different from the name of the current DB, for example admin + +.. js:function:: authenticate(username, password[, options], callback) + + :param string username: username. + :param string password: password. + :param object [options]: the options + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from authentication or null if an error occured. + :returns: null + + +**Examples** + + + + An example of using the authenticate command. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Add a user to the database + db.addUser('user', 'name', function(err, result) { + assert.equal(null, err); + + // Authenticate + db.authenticate('user', 'name', function(err, result) { + assert.equal(true, result); + + db.close(); + }); + }); + }); + + +------- +addUser +------- + + +Add a user to the database. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: addUser(username, password[, options], callback) + + :param string username: username. + :param string password: password. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from addUser or null if an error occured. + :returns: null + + +**Examples** + + + + An example of adding a user to the database. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Add a user to the database + db.addUser('user', 'name', function(err, result) { + assert.equal(null, err); + + db.close(); + }); + }); + + +---------- +removeUser +---------- + + +Remove a user from a database + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: removeUser(username[, options], callback) + + :param string username: username. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from removeUser or null if an error occured. + :returns: null + + +**Examples** + + + + An example of dereferencing values. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Add a user to the database + db.addUser('user', 'name', function(err, result) { + assert.equal(null, err); + + // Authenticate + db.authenticate('user', 'name', function(err, result) { + assert.equal(true, result); + + // Logout the db + db.logout(function(err, result) { + assert.equal(true, result); + + // Remove the user from the db + db.removeUser('user', function(err, result) { + + // Authenticate + db.authenticate('user', 'name', function(err, result) { + assert.equal(false, result); + + db.close(); + }); + }); + }); + }); + }); + }); + + +---------------- +createCollection +---------------- + + +Creates a collection on a server pre-allocating space, need to create f.ex capped collections. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **serializeFunctions** {Boolean, default:false}, serialize functions on the document. + - **raw** {Boolean, default:false}, perform all operations using raw bson objects. + - **pkFactory** {Object}, object overriding the basic ObjectID primary key generation. + - **capped** {Boolean, default:false}, create a capped collection. + - **size** {Number}, the size of the capped collection in bytes. + - **max** {Number}, the maximum number of documents in the capped collection. + - **autoIndexId** {Boolean, default:true}, create an index on the _id field of the document, True by default on MongoDB 2.2 or higher off for version < 2.2. + - **readPreference** {String}, the prefered read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + - **strict**, (Boolean, default:false) throws and error if collection already exists + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: createCollection(collectionName[, options], callback) + + :param string collectionName: the collection name we wish to access. + :param object [options]: returns option results. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from createCollection or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example showing the creation of a collection. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create a capped collection with a maximum of 1000 documents + db.createCollection("a_simple_collection", {capped:true, size:10000, max:1000, w:1}, function(err, collection) { + assert.equal(null, err); + + // Insert a document in the capped collection + collection.insert({a:1}, {w:1}, function(err, result) { + assert.equal(null, err); + + db.close(); + }); + }); + }); + + +------- +command +------- + + +Execute a command hash against MongoDB. This lets you acess any commands not available through the api on the server. + +.. js:function:: command(selector, callback) + + :param object selector: the command hash to send to the server, ex: {ping:1}. + :param function callback: this will be called after executing this method. The command always return the whole result of the command as the second parameter. + :returns: null + + +**Examples** + + + + A simple example creating, dropping a collection and then verifying that the collection is gone. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Execute ping against the server + db.command({ping:1}, function(err, result) { + assert.equal(null, err); + + db.close(); + }); + }); + + +-------------- +dropCollection +-------------- + + +Drop a collection from the database, removing it permanently. New accesses will create a new collection. + +.. js:function:: dropCollection(collectionName, callback) + + :param string collectionName: the name of the collection we wish to drop. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from dropCollection or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example executing a command against the server. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Execute ping against the server + db.command({ping:1}, function(err, result) { + assert.equal(null, err); + + // Create a capped collection with a maximum of 1000 documents + db.createCollection("a_simple_create_drop_collection", {capped:true, size:10000, max:1000, w:1}, function(err, collection) { + assert.equal(null, err); + + // Insert a document in the capped collection + collection.insert({a:1}, {w:1}, function(err, result) { + assert.equal(null, err); + + // Drop the collection from this world + db.dropCollection("a_simple_create_drop_collection", function(err, result) { + assert.equal(null, err); + + // Verify that the collection is gone + db.collectionNames("a_simple_create_drop_collection", function(err, names) { + assert.equal(0, names.length); + + db.close(); + }); + }); + }); + }); + }); + }); + + +---------------- +renameCollection +---------------- + + +Rename a collection. + + + +Options + - **dropTarget** {Boolean, default:false}, drop the target name collection if it previously exists. + +.. js:function:: renameCollection(fromCollection, toCollection[, options], callback) + + :param string fromCollection: the name of the current collection we wish to rename. + :param string toCollection: the new name of the collection. + :param object [options]: returns option results. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from renameCollection or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example creating, dropping a collection and then verifying that the collection is gone. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create a collection + db.createCollection("simple_rename_collection", {w:1}, function(err, collection) { + assert.equal(null, err); + + // Insert a document in the collection + collection.insert({a:1}, {w:1}, function(err, result) { + assert.equal(null, err); + + // Rename the collection + db.renameCollection("simple_rename_collection", "simple_rename_collection_2", function(err, collection2) { + assert.equal(null, err); + + // Retrieve the number of documents from the collection + collection2.count(function(err, count) { + assert.equal(1, count); + + // Verify that the collection is gone + db.collectionNames("simple_rename_collection", function(err, names) { + assert.equal(0, names.length); + + // Verify that the new collection exists + db.collectionNames("simple_rename_collection_2", function(err, names) { + assert.equal(1, names.length); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + + +--------- +lastError +--------- + + +Return last error message for the given connection, note options can be combined. + + + +Options + - **fsync** {Boolean, default:false}, option forces the database to fsync all files before returning. + - **j** {Boolean, default:false}, awaits the journal commit before returning, > MongoDB 2.0. + - **w** {Number}, until a write operation has been replicated to N servers. + - **wtimeout** {Number}, number of miliseconds to wait before timing out. + + + +Connection Options + - **connection** {Connection}, fire the getLastError down a specific connection. + +.. js:function:: lastError([options][, connectionOptions], callback) + + :param object [options]: returns option results. + :param object [connectionOptions]: returns option results. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from lastError or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example using lastError on a single connection with a pool of 1. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create a collection + db.createCollection("simple_rename_collection3", {w:1}, function(err, collection) { + assert.equal(null, err); + + // Insert a document in the collection + collection.insert({a:1}, function(err, result) { + assert.equal(null, err); + + // Execute lastError + db.lastError(function(err, result) { + assert.equal(null, err); + assert.equal(null, result[0].err); + + // Pick a specific connection and execute lastError against it + var connection = db.serverConfig.checkoutWriter(); + // Execute lastError + db.lastError({}, {connection:connection}, function(err, result) { + assert.equal(null, err); + assert.equal(null, result[0].err); + + db.close(); + }); + }); + }); + }); + }); + + +-------------- +previousErrors +-------------- + + +Return all errors up to the last time db reset_error_history was called. + + + +Options + - **connection** {Connection}, fire the getLastError down a specific connection. + +.. js:function:: previousErrors([options], callback) + + :param object [options]: returns option results. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from previousErrors or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example using previousError to return the list of all errors, might be deprecated in the future. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create a collection + db.createCollection("simple_previous_error_coll", {w:1}, function(err, collection) { + assert.equal(null, err); + + // Force a unique index + collection.ensureIndex({a:1}, {unique:true, w:1}, function(err, result) { + assert.equal(null, err); + + // Force some errors + collection.insert([{a:1}, {a:1}, {a:1}, {a:2}], function(err, result) { + + // Pick a specific connection and execute lastError against it + var connection = db.serverConfig.checkoutWriter(); + + // Execute previousErrors + db.previousErrors({connection:connection}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result.length); + assert.ok(result[0].err != null); + + db.close(); + }); + }); + }); + }); + }); + + +----------------- +resetErrorHistory +----------------- + + +Resets the error history of the mongo instance. + + + +Options + - **connection** {Connection}, fire the getLastError down a specific connection. + +.. js:function:: resetErrorHistory([options], callback) + + :param object [options]: returns option results. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from resetErrorHistory or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example using resetErrorHistory to clean up the history of errors. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + // Create a collection + db.createCollection("simple_reset_error_history_coll", {w:1}, function(err, collection) { + assert.equal(null, err); + + // Force a unique index + collection.ensureIndex({a:1}, {unique:true, w:1}, function(err, result) { + assert.equal(null, err); + + // Force some errors + collection.insert([{a:1}, {a:1}, {a:1}, {a:2}], function(err, result) { + // Pick a specific connection and execute lastError against it + var connection = db.serverConfig.checkoutWriter(); + + // Reset the error history + db.resetErrorHistory({connection:connection}, function(err, result) { + + // Execute previousErrors and validate that there are no errors left + db.previousErrors({connection:connection}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result.length); + assert.equal(null, result[0].err); + + db.close(); + }); + }); + }); + }); + }); + }); + + +----------- +createIndex +----------- + + +Creates an index on the collection. + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **unique** {Boolean, default:false}, creates an unique index. + - **sparse** {Boolean, default:false}, creates a sparse index. + - **background** {Boolean, default:false}, creates the index in the background, yielding whenever possible. + - **dropDups** {Boolean, default:false}, a unique index cannot be created on a key that has pre-existing duplicate values. If you would like to create the index anyway, keeping the first document the database indexes and deleting all subsequent documents that have duplicate value + - **min** {Number}, for geospatial indexes set the lower bound for the co-ordinates. + - **max** {Number}, for geospatial indexes set the high bound for the co-ordinates. + - **v** {Number}, specify the format version of the indexes. + - **expireAfterSeconds** {Number}, allows you to expire data on indexes applied to a data (MongoDB 2.2 or higher) + - **name** {String}, override the autogenerated index name (useful if the resulting name is larger than 128 bytes) + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: createIndex(collectionName, fieldOrSpec[, options], callback) + + :param string collectionName: name of the collection to create the index on. + :param object fieldOrSpec: fieldOrSpec that defines the index. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from createIndex or null if an error occured. + :returns: null + + +**Examples** + + + + A more complex createIndex using a compound unique index in the background and dropping duplicated documents + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('more_complex_index_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + db.createIndex('more_complex_index_test', {a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Show that duplicate records got dropped + collection.find({}).toArray(function(err, items) { + assert.equal(null, err); + assert.equal(4, items.length); + + // Peform a query, with explain to show we hit the query + collection.find({a:2}, {explain:true}).toArray(function(err, explanation) { + assert.equal(null, err); + assert.ok(explanation[0].indexBounds.a != null); + assert.ok(explanation[0].indexBounds.b != null); + + db.close(); + }); + }) + }); + }); + }); + }); + + +----------- +ensureIndex +----------- + + +Ensures that an index exists, if it does not it creates it + + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **unique** {Boolean, default:false}, creates an unique index. + - **sparse** {Boolean, default:false}, creates a sparse index. + - **background** {Boolean, default:false}, creates the index in the background, yielding whenever possible. + - **dropDups** {Boolean, default:false}, a unique index cannot be created on a key that has pre-existing duplicate values. If you would like to create the index anyway, keeping the first document the database indexes and deleting all subsequent documents that have duplicate value + - **min** {Number}, for geospatial indexes set the lower bound for the co-ordinates. + - **max** {Number}, for geospatial indexes set the high bound for the co-ordinates. + - **v** {Number}, specify the format version of the indexes. + - **expireAfterSeconds** {Number}, allows you to expire data on indexes applied to a data (MongoDB 2.2 or higher) + - **name** {String}, override the autogenerated index name (useful if the resulting name is larger than 128 bytes) + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + +.. js:function:: ensureIndex(collectionName, fieldOrSpec[, options], callback) + + :param string collectionName: name of the collection to create the index on. + :param object fieldOrSpec: fieldOrSpec that defines the index. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from ensureIndex or null if an error occured. + :returns: null + + +**Examples** + + + + A more complex ensureIndex using a compound unique index in the background and dropping duplicated documents. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('more_complex_ensure_index_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + db.ensureIndex('more_complex_ensure_index_test', {a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Show that duplicate records got dropped + collection.find({}).toArray(function(err, items) { + assert.equal(null, err); + assert.equal(4, items.length); + + // Peform a query, with explain to show we hit the query + collection.find({a:2}, {explain:true}).toArray(function(err, explanation) { + assert.equal(null, err); + assert.ok(explanation[0].indexBounds.a != null); + assert.ok(explanation[0].indexBounds.b != null); + + db.close(); + }); + }) + }); + }); + }); + }); + + +---------- +cursorInfo +---------- + + +Returns the information available on allocated cursors. + + + +Options + - **readPreference** {String}, the prefered read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + +.. js:function:: cursorInfo([options], callback) + + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from cursorInfo or null if an error occured. + :returns: null + + +**Examples** + + + + A Simple example of returning current cursor information in MongoDB + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('cursor_information_collection', function(err, collection) { + assert.equal(null, err); + + // Create a bunch of documents so we can force the creation of a cursor + var docs = []; + for(var i = 0; i < 1000; i++) { + docs.push({a:'hello world hello world hello world hello world hello world hello world hello world hello world'}); + } + + // Insert a bunch of documents for the index + collection.insert(docs, {w:1}, function(err, result) { + assert.equal(null, err); + + // Let's set a cursor + var cursor = collection.find({}, {batchSize:10}); + cursor.nextObject(function(err, item) { + assert.equal(null, err); + + // Let's grab the information about the cursors on the database + db.cursorInfo(function(err, cursorInformation) { + assert.ok(cursorInformation.totalOpen > 0); + + db.close(); + }); + }); + }); + }); + }); + + +--------- +dropIndex +--------- + + +Drop an index on a collection. + +.. js:function:: dropIndex(collectionName, indexName, callback) + + :param string collectionName: the name of the collection where the command will drop an index. + :param string indexName: name of the index to drop. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from dropIndex or null if an error occured. + :returns: null + + +**Examples** + + + + An examples showing the creation and dropping of an index + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('create_and_drop_an_index', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.ensureIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Drop the index + db.dropIndex("create_and_drop_an_index", "a_1_b_1", function(err, result) { + assert.equal(null, err); + + // Verify that the index is gone + collection.indexInformation(function(err, indexInformation) { + assert.deepEqual([ [ '_id', 1 ] ], indexInformation._id_); + assert.equal(null, indexInformation.a_1_b_1); + + db.close(); + }); + }); + }); + }); + }); + }); + + +------- +reIndex +------- + + +Reindex all indexes on the collection +Warning: reIndex is a blocking operation (indexes are rebuilt in the foreground) and will be slow for large collections. + +.. js:function:: reIndex(collectionName, callback) + + :param string collectionName: the name of the collection. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from reIndex or null if an error occured. + +**Examples** + + + + An example showing how to force a reindex of a collection. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('create_and_drop_all_indexes', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4, c:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.ensureIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Force a reindex of the collection + db.reIndex('create_and_drop_all_indexes', function(err, result) { + assert.equal(null, err); + assert.equal(true, result); + + // Verify that the index is gone + collection.indexInformation(function(err, indexInformation) { + assert.deepEqual([ [ '_id', 1 ] ], indexInformation._id_); + assert.deepEqual([ [ 'a', 1 ], [ 'b', 1 ] ], indexInformation.a_1_b_1); + + db.close(); + }); + }); + }); + }); + }); + }); + + +---------------- +indexInformation +---------------- + + +Retrieves this collections index info. + + + +Options + - **full** {Boolean, default:false}, returns the full raw index information. + - **readPreference** {String}, the preferred read preference ((Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: indexInformation(collectionName[, options], callback) + + :param string collectionName: the name of the collection. + :param object [options]: additional options during update. + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from indexInformation or null if an error occured. + :returns: null + + +**Examples** + + + + An example showing the information returned by indexInformation + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection we want to drop later + db.createCollection('more_index_information_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Create an index on the a field + collection.ensureIndex({a:1, b:1} + , {unique:true, background:true, dropDups:true, w:1}, function(err, indexName) { + + // Fetch basic indexInformation for collection + db.indexInformation('more_index_information_test', function(err, indexInformation) { + assert.deepEqual([ [ '_id', 1 ] ], indexInformation._id_); + assert.deepEqual([ [ 'a', 1 ], [ 'b', 1 ] ], indexInformation.a_1_b_1); + + // Fetch full index information + collection.indexInformation({full:true}, function(err, indexInformation) { + assert.deepEqual({ _id: 1 }, indexInformation[0].key); + assert.deepEqual({ a: 1, b: 1 }, indexInformation[1].key); + + db.close(); + }); + }); + }); + }); + }); + }); + + +------------ +dropDatabase +------------ + + +Drop a database. + +.. js:function:: dropDatabase(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from dropDatabase or null if an error occured. + :returns: null + + +**Examples** + + + + An examples showing the dropping of a database + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Create a collection + db.createCollection('more_index_information_test', function(err, collection) { + assert.equal(null, err); + + // Insert a bunch of documents for the index + collection.insert([{a:1, b:1}, {a:1, b:1} + , {a:2, b:2}, {a:3, b:3}, {a:4, b:4}], {w:1}, function(err, result) { + assert.equal(null, err); + + // Let's drop the database + db.dropDatabase(function(err, result) { + assert.equal(null, err); + + // Wait to seconds to let it replicate across + setTimeout(function() { + // Get the admin database + db.admin().listDatabases(function(err, dbs) { + // Grab the databases + dbs = dbs.databases; + // Did we find the db + var found = false; + + // Check if we have the db in the list + for(var i = 0; i < dbs.length; i++) { + if(dbs[i].name == 'integration_tests_to_drop') found = true; + } + + // We should not find the databases + if(process.env['JENKINS'] == null) assert.equal(false, found); + + db.close(); + }); + }, 2000); + }); + }); + }); + }); + + +----- +stats +----- + + +Get all the db statistics. + + + +Options + - **scale** {Number}, divide the returned sizes by scale value. + - **readPreference** {String}, the preferred read preference ((Server.PRIMARY, Server.PRIMARY_PREFERRED, Server.SECONDARY, Server.SECONDARY_PREFERRED, Server.NEAREST). + +.. js:function:: stats([options], callback) + + :param objects [options]: options for the stats command + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from stats or null if an error occured. + :returns: null + + +**Examples** + + + + An example showing how to retrieve the db statistics + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + assert.equal(null, err); + + db.stats(function(err, stats) { + assert.equal(null, err); + assert.ok(stats != null); + + db.close(); + }) + }); + + +---------- +Db.connect +---------- + + +Connect to MongoDB using a url as documented at + + + +docs.mongodb.org/manual/reference/connection-string/ + + + +Options + - **uri_decode_auth** {Boolean, default:false} uri decode the user name and password for authentication + - **db** {Object, default: null} a hash off options to set on the db object, see **Db constructor** + - **server** {Object, default: null} a hash off options to set on the server objects, see **Server** constructor** + - **replSet** {Object, default: null} a hash off options to set on the replSet object, see **ReplSet** constructor** + - **mongos** {Object, default: null} a hash off options to set on the mongos object, see **Mongos** constructor** + +.. js:function:: Db.connect(url[, options], callback) + + :param string url: connection url for MongoDB. + :param object [options]: optional options for insert command + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the db instance or null if an error occured. + :returns: null + + +**Examples** + + + + Example of a simple url connection string, with no acknowledgement of writes. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Connect to the server + Db.connect(configuration.url(), function(err, db) { + assert.equal(null, err); + + db.close(); + }); + diff --git a/_sources/api-generated/grid.txt b/_sources/api-generated/grid.txt new file mode 100644 index 00000000000..5ca5956af1d --- /dev/null +++ b/_sources/api-generated/grid.txt @@ -0,0 +1,272 @@ +====== +Grid() +====== + +------------------ +Constructor +------------------ + + +A class representation of a simple Grid interface. + + + .. js:class:: Grid() + + :param db db: A database instance to interact with. + :param string [fsName]: optional different root collection for GridFS. + :returns: grid + + + +--- +put +--- + + +Puts binary data to the grid + + + +Options + - **_id** {Any}, unique id for this file + - **root** {String}, root collection to use. Defaults to **{GridStore.DEFAULT_ROOT_COLLECTION}**. + - **content_type** {String}, mime type of the file. Defaults to **{GridStore.DEFAULT_CONTENT_TYPE}**. + - **chunk_size** {Number}, size for the chunk. Defaults to **{Chunk.DEFAULT_CHUNK_SIZE}**. + - **metadata** {Object}, arbitrary data the user wants to store. + +.. js:function:: put(data[, options], callback) + + :param buffer data: buffer with Binary Data. + :param object [options]: the options for the files. + :param function callback: this will be called after this method is executed. The first parameter will contain an Error object if an error occured or null otherwise. The second parameter will contain a reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the put method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new grid instance + var grid = new Grid(db, 'fs'); + // Some data to write + var originalData = new Buffer('Hello world'); + // Write data to grid + grid.put(originalData, {}, function(err, result) { + // Fetch the content + grid.get(result._id, function(err, data) { + assert.deepEqual(originalData.toString('base64'), data.toString('base64')); + + db.close(); + }); + }); + }); + + + + A simple example showing the usage of the put method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new grid instance + var grid = new Grid(db, 'fs'); + // Some data to write + var originalData = new Buffer('Hello world'); + // Write data to grid + var id = 123; + grid.put(originalData, {_id: id}, function(err, result) { + // Fetch the content + grid.get(id, function(err, data) { + assert.deepEqual(originalData.toString('base64'), data.toString('base64')); + + db.close(); + }); + }); + }); + + + + A simple example showing the usage of the put method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new grid instance + var grid = new Grid(db, 'fs'); + // Some data to write + var originalData = new Buffer('Hello world'); + // Write data to grid + var id = 'test'; + grid.put(originalData, {_id: id}, function(err, result) { + assert.equal(result._id, id); + + // Fetch the content + grid.get(id, function(err, data) { + assert.deepEqual(originalData.toString('base64'), data.toString('base64')); + + db.close(); + }); + }); + }); + + +--- +get +--- + + +Get binary data to the grid + +.. js:function:: get(id, callback) + + :param any id: for file. + :param function callback: this will be called after this method is executed. The first parameter will contain an Error object if an error occured or null otherwise. The second parameter will contain a reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the get method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new grid instance + var grid = new Grid(db, 'fs'); + // Some data to write + var originalData = new Buffer('Hello world'); + // Write data to grid + grid.put(originalData, {}, function(err, result) { + // Fetch the content + grid.get(result._id, function(err, data) { + assert.deepEqual(originalData.toString('base64'), data.toString('base64')); + + // Should fail due to illegal objectID + grid.get('not an id', function(err, result) { + assert.ok(err != null); + + db.close(); + }); + }); + }); + }); + + +------ +delete +------ + + +Delete file from grid + +.. js:function:: delete(id, callback) + + :param any id: for file. + :param function callback: this will be called after this method is executed. The first parameter will contain an Error object if an error occured or null otherwise. The second parameter will contain a reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the delete method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new grid instance + var grid = new Grid(db, 'fs'); + // Some data to write + var originalData = new Buffer('Hello world'); + // Write data to grid + grid.put(originalData, {}, function(err, result) { + + // Delete file + grid.delete(result._id, function(err, result2) { + assert.equal(null, err); + assert.equal(true, result2); + + // Fetch the content, showing that the file is gone + grid.get(result._id, function(err, data) { + assert.ok(err != null); + assert.equal(null, data); + + db.close(); + }); + }); + }); + }); + diff --git a/_sources/api-generated/gridstore.txt b/_sources/api-generated/gridstore.txt new file mode 100644 index 00000000000..bd10620e5dc --- /dev/null +++ b/_sources/api-generated/gridstore.txt @@ -0,0 +1,1883 @@ +=========== +GridStore() +=========== + +------------------ +Constructor +------------------ + + +A class representation of a file stored in GridFS. + + + .. js:class:: GridStore() + + :param db db: A database instance to interact with. + :param any [id]: optional unique id for this file + :param string [filename]: optional filename for this file, no unique constrain on the field + :param string mode: set the mode for this file. + :param object options: optional properties to specify. + :returns: gridstore + + +Modes + - **"r"** - read only. This is the default mode. + - **"w"** - write in truncate mode. Existing data will be overwriten. + - **w+"** - write in edit mode. + + + +Options + - **root** {String}, root collection to use. Defaults to **{GridStore.DEFAULT_ROOT_COLLECTION}**. + - **content_type** {String}, mime type of the file. Defaults to **{GridStore.DEFAULT_CONTENT_TYPE}**. + - **chunk_size** {Number}, size for the chunk. Defaults to **{Chunk.DEFAULT_CHUNK_SIZE}**. + - **metadata** {Object}, arbitrary data the user wants to store. + + +------------------ +Constants +------------------ + +.. csv-table:: + :header: "Constant Name", "Value", "Description" + :widths: 15, 10, 30 + + "GridStore.DEFAULT_ROOT_COLLECTION", "'fs'", "The collection to be used for holding the files and chunks collection." + "GridStore.DEFAULT_CONTENT_TYPE", "'binary/octet-stream'", "Default file mime type" + "GridStore.IO_SEEK_SET", "0", "Seek mode where the given length is absolute." + "GridStore.IO_SEEK_CUR", "1", "Seek mode where the given length is an offset to the current read/write head." + "GridStore.IO_SEEK_END", "2", "Seek mode where the given length is an offset to the end of the file." + +------------------ +Properties +------------------ + + +Returns the current chunksize of the file. + +.. js:attribute:: chunkSize number [Getter|Setter] + + + +The md5 checksum for this file. + +.. js:attribute:: md5 number [Getter|Setter] + + +---- +open +---- + + +Opens the file from the database and initialize this object. Also creates a +new one if file does not exist. + +.. js:function:: open(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain an **{Error}** object and the second parameter will be null if an error occured. Otherwise, the first parameter will be null and the second will contain the reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing how to save a file with a filename allowing for multiple files with the same name + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a file and open it + var gridStore = new GridStore(db, new ObjectID(), "test_gs_getc_file", "w"); + gridStore.open(function(err, gridStore) { + // Write some content to the file + gridStore.write(new Buffer("hello, world!", "utf8"), function(err, gridStore) { + // Flush the file to GridFS + gridStore.close(function(err, fileData) { + assert.equal(null, err); + + // Create another file with same name and and save content to it + gridStore = new GridStore(db, new ObjectID(), "test_gs_getc_file", "w"); + gridStore.open(function(err, gridStore) { + // Write some content to the file + gridStore.write(new Buffer("hello, world!", "utf8"), function(err, gridStore) { + // Flush the file to GridFS + gridStore.close(function(err, fileData) { + assert.equal(null, err); + + // Open the file in read mode using the filename + var gridStore2 = new GridStore(db, "test_gs_getc_file", "r"); + gridStore2.open(function(err, gridStore) { + + // Read first character and verify + gridStore.getc(function(err, chr) { + assert.equal('h', chr); + + // Open the file using an object id + gridStore2 = new GridStore(db, fileData._id, "r"); + gridStore2.open(function(err, gridStore) { + + // Read first character and verify + gridStore.getc(function(err, chr) { + assert.equal('h', chr); + + db.close(); + }) + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + + + + A simple example showing opening a file using a filename, writing to it and saving it. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new instance of the gridstore + var gridStore = new GridStore(db, 'ourexamplefiletowrite.txt', 'w'); + + // Open the file + gridStore.open(function(err, gridStore) { + + // Write some data to the file + gridStore.write('bar', function(err, gridStore) { + assert.equal(null, err); + + // Close (Flushes the data to MongoDB) + gridStore.close(function(err, result) { + assert.equal(null, err); + + // Verify that the file exists + GridStore.exist(db, 'ourexamplefiletowrite.txt', function(err, result) { + assert.equal(null, err); + assert.equal(true, result); + + db.close(); + }); + }); + }); + }); + }); + + + + A simple example showing opening a file using an ObjectID, writing to it and saving it. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Create a new instance of the gridstore + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the file + gridStore.open(function(err, gridStore) { + + // Write some data to the file + gridStore.write('bar', function(err, gridStore) { + assert.equal(null, err); + + // Close (Flushes the data to MongoDB) + gridStore.close(function(err, result) { + assert.equal(null, err); + + // Verify that the file exists + GridStore.exist(db, fileId, function(err, result) { + assert.equal(null, err); + assert.equal(true, result); + + db.close(); + }); + }); + }); + }); + }); + + +--------- +writeFile +--------- + + +Stores a file from the file system to the GridFS database. + +.. js:function:: writeFile(file, callback) + + :param string file: the file to store. + :param function callback: this will be called after this method is executed. The first parameter will be null and the the second will contain the reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing how to write a file to Gridstore using file location path. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Read the filesize of file on disk (provide your own) + var fileSize = fs.statSync('./test/tests/functional/gridstore/test_gs_weird_bug.png').size; + // Read the buffered data for comparision reasons + var data = fs.readFileSync('./test/tests/functional/gridstore/test_gs_weird_bug.png'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Write the file to gridFS + gridStore.writeFile('./test/tests/functional/gridstore/test_gs_weird_bug.png', function(err, doc) { + + // Read back all the written content and verify the correctness + GridStore.read(db, fileId, function(err, fileData) { + assert.equal(data.toString('base64'), fileData.toString('base64')) + assert.equal(fileSize, fileData.length); + + db.close(); + }); + }); + }); + }); + + + + A simple example showing how to write a file to Gridstore using a file handle. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Read the filesize of file on disk (provide your own) + var fileSize = fs.statSync('./test/tests/functional/gridstore/test_gs_weird_bug.png').size; + // Read the buffered data for comparision reasons + var data = fs.readFileSync('./test/tests/functional/gridstore/test_gs_weird_bug.png'); + + // Open a file handle for reading the file + var fd = fs.openSync('./test/tests/functional/gridstore/test_gs_weird_bug.png', 'r', 0666); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Write the file to gridFS using the file handle + gridStore.writeFile(fd, function(err, doc) { + + // Read back all the written content and verify the correctness + GridStore.read(db, fileId, function(err, fileData) { + assert.equal(data.toString('base64'), fileData.toString('base64')); + assert.equal(fileSize, fileData.length); + + db.close(); + }); + }); + }); + }); + + +----- +close +----- + + +Saves this file to the database. This will overwrite the old entry if it +already exists. This will work properly only if mode was initialized to +"w" or "w+". + +.. js:function:: close(callback) + + :param function callback: this will be called after executing this method. Passes an **{Error}** object to the first parameter and null to the second if an error occured. Otherwise, passes null to the first and a reference to this object to the second. + :returns: null + + +**Examples** + + + + A simple example showing how to use the write command with strings and Buffers. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Write a text string + gridStore.write('Hello world', function(err, gridStore) { + + // Close the + gridStore.close(function(err, result) { + assert.equal(err, null); + + db.close(); + }); + }); + }); + }); + + +--------------- +chunkCollection +--------------- + + +Retrieve this file's chunks collection. + +.. js:function:: chunkCollection(callback) + + :param function callback: this will be called after executing this method. An exception object will be passed to the first parameter when an error occured or null otherwise. A new **{Collection}** object will be passed to the second parameter if no error occured. + :returns: null + + +**Examples** + + + + A simple example showing how to access the chunks collection object. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Access the Chunk collection + gridStore.chunkCollection(function(err, collection) { + assert.equal(err, null); + + db.close(); + }); + }); + }); + + +------ +unlink +------ + + +Deletes all the chunks of this file in the database. + +.. js:function:: unlink(callback) + + :param function callback: this will be called after this method executes. Passes null to the first and true to the second argument. + :returns: null + + +**Examples** + + + + A simple example showing how to use the instance level unlink command to delete a gridstore item. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Write a text string + gridStore.write('Hello world', function(err, gridStore) { + + // Close the + gridStore.close(function(err, result) { + assert.equal(err, null); + + // Open the file again and unlin it + new GridStore(db, fileId, 'r').open(function(err, gridStore) { + + // Unlink the file + gridStore.unlink(function(err, result) { + assert.equal(null, err); + + // Verify that the file no longer exists + GridStore.exist(db, fileId, function(err, result) { + assert.equal(null, err); + assert.equal(false, result); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + + +---------- +collection +---------- + + +Retrieves the file collection associated with this object. + +.. js:function:: collection(callback) + + :param function callback: this will be called after executing this method. An exception object will be passed to the first parameter when an error occured or null otherwise. A new **{Collection}** object will be passed to the second parameter if no error occured. + :returns: null + + +**Examples** + + + + A simple example showing how to access the files collection object. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Access the Chunk collection + gridStore.collection(function(err, collection) { + assert.equal(err, null); + + db.close(); + }); + }); + }); + + +--------- +readlines +--------- + + +Reads the data of this file. + +.. js:function:: readlines([separator], callback) + + :param string [separator]: the character to be recognized as the newline separator. + :param function callback: This will be called after this method is executed. The first parameter will be null and the second parameter will contain an array of strings representing the entire data, each element representing a line including the separator character. + :returns: null + + +**Examples** + + + + A simple example showing reading back using readlines to split the text into lines by the seperator provided. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Write one line to gridStore + gridStore.puts("line one", function(err, gridStore) { + + // Write second line to gridStore + gridStore.puts("line two", function(err, gridStore) { + + // Write third line to gridStore + gridStore.puts("line three", function(err, gridStore) { + + // Flush file to disk + gridStore.close(function(err, result) { + + // Open file for reading + gridStore = new GridStore(db, fileId, 'r'); + gridStore.open(function(err, gridStore) { + + // Read all the lines and verify correctness + gridStore.readlines(function(err, lines) { + assert.deepEqual(["line one\n", "line two\n", "line three\n"], lines); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + }); + + +------ +rewind +------ + + +Deletes all the chunks of this file in the database if mode was set to "w" or +"w+" and resets the read/write head to the initial position. + +.. js:function:: rewind(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain null and the second one will contain a reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing how to rewind and overwrite the file. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Create a new file + var gridStore = new GridStore(db, fileId, "w"); + // Open the file + gridStore.open(function(err, gridStore) { + // Write to the file + gridStore.write("hello, world!", function(err, gridStore) { + // Flush the file to disk + gridStore.close(function(err, result) { + + // Reopen the file + gridStore = new GridStore(db, fileId, "w"); + gridStore.open(function(err, gridStore) { + // Write some more text to the file + gridStore.write('some text is inserted here', function(err, gridStore) { + + // Let's rewind to truncate the file + gridStore.rewind(function(err, gridStore) { + + // Write something from the start + gridStore.write('abc', function(err, gridStore) { + + // Flush the data to mongodb + gridStore.close(function(err, result) { + + // Verify that the new data was written + GridStore.read(db, fileId, function(err, data) { + assert.equal("abc", data); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + + +---- +read +---- + + +Retrieves the contents of this file and advances the read/write head. Works with Buffers only. + +**There are 3 signatures for this method** + + + +(callback) +(length, callback) +(length, buffer, callback) + +.. js:function:: read([length][, buffer], callback) + + :param number [length]: the number of characters to read. Reads all the characters from the read/write head to the EOF if not specified. + :param string [buffer]: a string to hold temporary data. This is used for storing the string data read so far when recursively calling this method. + :param function callback: this will be called after this method is executed. null will be passed to the first parameter and a string containing the contents of the buffer concatenated with the contents read from this file will be passed to the second. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the read method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Read in the content of a file + var data = fs.readFileSync('./test/tests/functional/gridstore/iya_logo_final_bw.jpg'); + // Create a new file + var gs = new GridStore(db, "test", "w"); + // Open the file + gs.open(function(err, gs) { + // Write the file to GridFS + gs.write(data, function(err, gs) { + // Flush to the GridFS + gs.close(function(err, gs) { + + // Define the file we wish to read + var gs2 = new GridStore(db, "test", "r"); + // Open the file + gs2.open(function(err, gs) { + // Set the pointer of the read head to the start of the gridstored file + gs2.seek(0, function() { + // Read the entire file + gs2.read(function(err, data2) { + // Compare the file content against the orgiinal + assert.equal(data.toString('base64'), data2.toString('base64')); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + + +---- +tell +---- + + +Retrieves the position of the read/write head of this file. + +.. js:function:: tell(callback) + + :param function callback: This gets called after this method terminates. null is passed to the first parameter and the position is passed to the second. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the tell method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new file + var gridStore = new GridStore(db, "test_gs_tell", "w"); + // Open the file + gridStore.open(function(err, gridStore) { + // Write a string to the file + gridStore.write("hello, world!", function(err, gridStore) { + // Flush the file to GridFS + gridStore.close(function(err, result) { + + // Open the file in read only mode + var gridStore2 = new GridStore(db, "test_gs_tell", "r"); + gridStore2.open(function(err, gridStore) { + + // Read the first 5 characters + gridStore.read(5, function(err, data) { + assert.equal("hello", data); + + // Get the current position of the read head + gridStore.tell(function(err, position) { + assert.equal(5, position); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + + +---- +seek +---- + + +Moves the read/write head to a new location. + + + +There are 3 signatures for this method + + + +Seek Location Modes + - **GridStore.IO_SEEK_SET**, **(default)** set the position from the start of the file. + - **GridStore.IO_SEEK_CUR**, set the position from the current position in the file. + - **GridStore.IO_SEEK_END**, set the position from the end of the file. + +.. js:function:: seek([position][, seekLocation], callback) + + :param number [position]: the position to seek to + :param number [seekLocation]: seek mode. Use one of the Seek Location modes. + :param function callback: this will be called after executing this method. The first parameter will contain null and the second one will contain a reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the seek method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a file and open it + var gridStore = new GridStore(db, "test_gs_seek_with_buffer", "w"); + gridStore.open(function(err, gridStore) { + // Write some content to the file + gridStore.write(new Buffer("hello, world!", "utf8"), function(err, gridStore) { + // Flush the file to GridFS + gridStore.close(function(result) { + + // Open the file in read mode + var gridStore2 = new GridStore(db, "test_gs_seek_with_buffer", "r"); + gridStore2.open(function(err, gridStore) { + // Seek to start + gridStore.seek(0, function(err, gridStore) { + // Read first character and verify + gridStore.getc(function(err, chr) { + assert.equal('h', chr); + }); + }); + }); + + // Open the file in read mode + var gridStore3 = new GridStore(db, "test_gs_seek_with_buffer", "r"); + gridStore3.open(function(err, gridStore) { + // Seek to 7 characters from the beginning off the file and verify + gridStore.seek(7, function(err, gridStore) { + gridStore.getc(function(err, chr) { + assert.equal('w', chr); + }); + }); + }); + + // Open the file in read mode + var gridStore5 = new GridStore(db, "test_gs_seek_with_buffer", "r"); + gridStore5.open(function(err, gridStore) { + // Seek to -1 characters from the end off the file and verify + gridStore.seek(-1, GridStore.IO_SEEK_END, function(err, gridStore) { + gridStore.getc(function(err, chr) { + assert.equal('!', chr); + }); + }); + }); + + // Open the file in read mode + var gridStore6 = new GridStore(db, "test_gs_seek_with_buffer", "r"); + gridStore6.open(function(err, gridStore) { + // Seek to -6 characters from the end off the file and verify + gridStore.seek(-6, GridStore.IO_SEEK_END, function(err, gridStore) { + gridStore.getc(function(err, chr) { + assert.equal('w', chr); + }); + }); + }); + + // Open the file in read mode + var gridStore7 = new GridStore(db, "test_gs_seek_with_buffer", "r"); + gridStore7.open(function(err, gridStore) { + + // Seek forward 7 characters from the current read position and verify + gridStore.seek(7, GridStore.IO_SEEK_CUR, function(err, gridStore) { + gridStore.getc(function(err, chr) { + assert.equal('w', chr); + + // Seek forward -1 characters from the current read position and verify + gridStore.seek(-1, GridStore.IO_SEEK_CUR, function(err, gridStore) { + gridStore.getc(function(err, chr) { + assert.equal('w', chr); + + // Seek forward -4 characters from the current read position and verify + gridStore.seek(-4, GridStore.IO_SEEK_CUR, function(err, gridStore) { + gridStore.getc(function(err, chr) { + assert.equal('o', chr); + + // Seek forward 3 characters from the current read position and verify + gridStore.seek(3, GridStore.IO_SEEK_CUR, function(err, gridStore) { + gridStore.getc(function(err, chr) { + assert.equal('o', chr); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + }); + + +--- +eof +--- + + +Verify if the file is at EOF. + +.. js:function:: eof() + + :returns: boolean true if the read/write head is at the end of this file. + + +**Examples** + + + + A simple example showing the usage of the eof method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Open the file in write mode + var gridStore = new GridStore(db, 'test_gs_empty_file_eof', "w"); + gridStore.open(function(err, gridStore) { + // Flush the empty file to GridFS + gridStore.close(function(err, gridStore) { + + // Open the file in read mode + var gridStore2 = new GridStore(db, 'test_gs_empty_file_eof', "r"); + gridStore2.open(function(err, gridStore) { + // Verify that we are at the end of the file + assert.equal(true, gridStore.eof()); + + db.close(); + }) + }); + }); + }); + + +---- +getc +---- + + +Retrieves a single character from this file. + +.. js:function:: getc(callback) + + :param function callback: this gets called after this method is executed. Passes null to the first parameter and the character read to the second or null to the second if the read/write head is at the end of the file. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the seek method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a file and open it + var gridStore = new GridStore(db, "test_gs_getc_file", "w"); + gridStore.open(function(err, gridStore) { + // Write some content to the file + gridStore.write(new Buffer("hello, world!", "utf8"), function(err, gridStore) { + // Flush the file to GridFS + gridStore.close(function(result) { + + // Open the file in read mode + var gridStore2 = new GridStore(db, "test_gs_getc_file", "r"); + gridStore2.open(function(err, gridStore) { + + // Read first character and verify + gridStore.getc(function(err, chr) { + assert.equal('h', chr); + + db.close(); + }); + }); + }); + }); + }); + }); + + +---- +puts +---- + + +Writes a string to the file with a newline character appended at the end if +the given string does not have one. + +.. js:function:: puts(string, callback) + + :param string string: the string to write. + :param function callback: this will be called after executing this method. The first parameter will contain null and the second one will contain a reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the puts method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Open a file for writing + var gridStore = new GridStore(db, "test_gs_puts_and_readlines", "w"); + gridStore.open(function(err, gridStore) { + + // Write a line to the file using the puts method + gridStore.puts("line one", function(err, gridStore) { + + // Flush the file to GridFS + gridStore.close(function(err, result) { + + // Read in the entire contents + GridStore.read(db, 'test_gs_puts_and_readlines', function(err, data) { + assert.equal("line one\n", data.toString()); + + db.close(); + }); + }); + }); + }); + }); + + +------ +stream +------ + + +Returns read stream based on this GridStore file + + + +Events + - **data** {function(item) {}} the data event triggers when a document is ready. + - **end** {function() {}} the end event triggers when there is no more documents available. + - **close** {function() {}} the close event triggers when the stream is closed. + - **error** {function(err) {}} the error event triggers if an error happens. + +.. js:function:: stream(autoclose) + + :param boolean autoclose: if true current GridStore will be closed when EOF and 'close' event will be fired + :returns: null + + +**Examples** + + + + A simple example showing the usage of the stream method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Open a file for reading + var gridStoreR = new GridStore(db, "test_gs_read_stream", "r"); + // Open a file for writing + var gridStoreW = new GridStore(db, "test_gs_read_stream", "w"); + // Read in the data of a file + var data = fs.readFileSync("./test/tests/functional/gridstore/test_gs_weird_bug.png"); + + var readLen = 0; + var gotEnd = 0; + + // Open the file we are writting to + gridStoreW.open(function(err, gs) { + // Write the file content + gs.write(data, function(err, gs) { + // Flush the file to GridFS + gs.close(function(err, result) { + + // Open the read file + gridStoreR.open(function(err, gs) { + + // Create a stream to the file + var stream = gs.stream(true); + + // Register events + stream.on("data", function(chunk) { + // Record the length of the file + readLen += chunk.length; + }); + + stream.on("end", function() { + // Record the end was called + ++gotEnd; + }); + + stream.on("close", function() { + // Verify the correctness of the read data + assert.equal(data.length, readLen); + assert.equal(1, gotEnd); + + db.close(); + }); + }); + }); + }); + }); + }); + + + + A simple example showing how to pipe a file stream through from gridfs to a file + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Open a file for writing + var gridStoreWrite = new GridStore(db, "test_gs_read_stream_pipe", "w", {chunkSize:1024}); + gridStoreWrite.writeFile("./test/tests/functional/gridstore/test_gs_weird_bug.png", function(err, result) { + // Open the gridStore for reading and pipe to a file + var gridStore = new GridStore(db, "test_gs_read_stream_pipe", "r"); + gridStore.open(function(err, gridStore) { + // Grab the read stream + var stream = gridStore.stream(true); + // When the stream is finished close the database + stream.on("end", function(err) { + // Read the original content + var originalData = fs.readFileSync("./test/tests/functional/gridstore/test_gs_weird_bug.png"); + // Ensure we are doing writing before attempting to open the file + fs.readFile("./test_gs_weird_bug_streamed.tmp", function(err, streamedData) { + // Compare the data + assert.deepEqual(originalData, streamedData); + + // Close the database + db.close(); + }); + }) + + // Create a file write stream + var fileStream = fs.createWriteStream("./test_gs_weird_bug_streamed.tmp"); + // Pipe out the data + stream.pipe(fileStream); + }) + }) + }); + + +--------------- +GridStore.exist +--------------- + + +Checks if a file exists in the database. + +.. js:function:: GridStore.exist(db, name[, rootCollection], callback) + + :param db db: the database to query. + :param string name: the name of the file to look for. + :param string [rootCollection]: the root collection that holds the files and chunks collection. Defaults to **{GridStore.DEFAULT_ROOT_COLLECTION}**. + :param function callback: this will be called after this method executes. Passes null to the first and passes true to the second if the file exists and false otherwise. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the Gridstore.exist method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Open a file for writing + var gridStore = new GridStore(db, null, "w"); + gridStore.open(function(err, gridStore) { + assert.equal(null, err); + + // Writing some content to the file + gridStore.write("hello world!", function(err, gridStore) { + assert.equal(null, err); + + // Flush the file to GridFS + gridStore.close(function(err, result) { + assert.equal(null, err); + + // Check if the file exists using the id returned from the close function + GridStore.exist(db, result._id, function(err, result) { + assert.equal(null, err); + assert.equal(true, result); + }) + + // Show that the file does not exist for a random ObjectID + GridStore.exist(db, new ObjectID(), function(err, result) { + assert.equal(null, err); + assert.equal(false, result); + }); + + // Show that the file does not exist for a different file root + GridStore.exist(db, result._id, 'another_root', function(err, result) { + assert.equal(null, err); + assert.equal(false, result); + + db.close(); + }); + }); + }); + }); + }); + + +-------------- +GridStore.list +-------------- + + +Gets the list of files stored in the GridFS. + +.. js:function:: GridStore.list(db[, rootCollection], callback) + + :param db db: the database to query. + :param string [rootCollection]: the root collection that holds the files and chunks collection. Defaults to **{GridStore.DEFAULT_ROOT_COLLECTION}**. + :param function callback: this will be called after this method executes. Passes null to the first and passes an array of strings containing the names of the files. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the eof method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file id + var fileId = new ObjectID(); + + // Open a file for writing + var gridStore = new GridStore(db, fileId, "foobar2", "w"); + gridStore.open(function(err, gridStore) { + + // Write some content to the file + gridStore.write("hello world!", function(err, gridStore) { + // Flush to GridFS + gridStore.close(function(err, result) { + + // List the existing files + GridStore.list(db, function(err, items) { + var found = false; + items.forEach(function(filename) { + if(filename == 'foobar2') found = true; + }); + + assert.ok(items.length >= 1); + assert.ok(found); + }); + + // List the existing files but return only the file ids + GridStore.list(db, {id:true}, function(err, items) { + var found = false; + items.forEach(function(id) { + assert.ok(typeof id == 'object'); + }); + + assert.ok(items.length >= 1); + }); + + // List the existing files in a specific root collection + GridStore.list(db, 'fs', function(err, items) { + var found = false; + items.forEach(function(filename) { + if(filename == 'foobar2') found = true; + }); + + assert.ok(items.length >= 1); + assert.ok(found); + }); + + // List the existing files in a different root collection where the file is not located + GridStore.list(db, 'my_fs', function(err, items) { + var found = false; + items.forEach(function(filename) { + if(filename == 'foobar2') found = true; + }); + + assert.ok(items.length >= 0); + assert.ok(!found); + + // Specify seperate id + var fileId2 = new ObjectID(); + // Write another file to GridFS + var gridStore2 = new GridStore(db, fileId2, "foobar3", "w"); + gridStore2.open(function(err, gridStore) { + // Write the content + gridStore2.write('my file', function(err, gridStore) { + // Flush to GridFS + gridStore.close(function(err, result) { + + // List all the available files and verify that our files are there + GridStore.list(db, function(err, items) { + var found = false; + var found2 = false; + + items.forEach(function(filename) { + if(filename == 'foobar2') found = true; + if(filename == 'foobar3') found2 = true; + }); + + assert.ok(items.length >= 2); + assert.ok(found); + assert.ok(found2); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + }); + }); + + +-------------- +GridStore.read +-------------- + + +Reads the contents of a file. + + + +This method has the following signatures + + + +(db, name, callback) +(db, name, length, callback) +(db, name, length, offset, callback) +(db, name, length, offset, options, callback) + +.. js:function:: GridStore.read(db, name[, length], [offset], [options], callback) + + :param db db: the database to query. + :param string name: the name of the file. + :param number [length]: the size of data to read. + :param number [offset]: the offset from the head of the file of which to start reading from. + :param object [options]: the options for the file. + :param function callback: this will be called after this method executes. A string with an error message will be passed to the first parameter when the length and offset combination exceeds the length of the file while an Error object will be passed if other forms of error occured, otherwise, a string is passed. The second parameter will contain the data read if successful or null if an error occured. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the read method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Create a new file + var gridStore = new GridStore(db, null, "w"); + // Read in the content from a file, replace with your own + var data = fs.readFileSync("./test/tests/functional/gridstore/test_gs_weird_bug.png"); + + // Open the file + gridStore.open(function(err, gridStore) { + // Write the binary file data to GridFS + gridStore.write(data, function(err, gridStore) { + // Flush the remaining data to GridFS + gridStore.close(function(err, result) { + + // Read in the whole file and check that it's the same content + GridStore.read(db, result._id, function(err, fileData) { + assert.equal(data.length, fileData.length); + + db.close(); + }); + }); + }); + }); + }); + + +------------------- +GridStore.readlines +------------------- + + +Reads the data of this file. + +.. js:function:: GridStore.readlines(db, name[, separator], [options], callback) + + :param db db: the database to query. + :param string name: the name of the file. + :param string [separator]: the character to be recognized as the newline separator. + :param object [options]: file options. + :param function callback: this will be called after this method is executed. The first parameter will be null and the second parameter will contain an array of strings representing the entire data, each element representing a line including the separator character. + :returns: null + + +**Examples** + + + + A simple example showing reading back using readlines to split the text into lines by the seperator provided. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Write one line to gridStore + gridStore.puts("line one", function(err, gridStore) { + + // Write second line to gridStore + gridStore.puts("line two", function(err, gridStore) { + + // Write third line to gridStore + gridStore.puts("line three", function(err, gridStore) { + + // Flush file to disk + gridStore.close(function(err, result) { + + // Read back all the lines + GridStore.readlines(db, fileId, function(err, lines) { + assert.deepEqual(["line one\n", "line two\n", "line three\n"], lines); + + db.close(); + }); + }); + }); + }); + }); + }); + }); + + +---------------- +GridStore.unlink +---------------- + + +Deletes the chunks and metadata information of a file from GridFS. + +.. js:function:: GridStore.unlink(db, names[, options]) + + :param db db: the database to interact with. + :param string names: the name/names of the files to delete. + :param object [options]: the options for the files. + :returns: null + + +**Examples** + + + + A simple example showing the usage of the GridStore.unlink method. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + + // Open a new file for writing + var gridStore = new GridStore(db, "test_gs_unlink", "w"); + gridStore.open(function(err, gridStore) { + + // Write some content + gridStore.write("hello, world!", function(err, gridStore) { + + // Flush file to GridFS + gridStore.close(function(err, result) { + + // Verify the existance of the fs.files document + db.collection('fs.files', function(err, collection) { + collection.count(function(err, count) { + assert.equal(1, count); + }) + }); + + // Verify the existance of the fs.chunks chunk document + db.collection('fs.chunks', function(err, collection) { + collection.count(function(err, count) { + assert.equal(1, count); + + // Unlink the file (removing it) + GridStore.unlink(db, 'test_gs_unlink', function(err, gridStore) { + + // Verify that fs.files document is gone + db.collection('fs.files', function(err, collection) { + collection.count(function(err, count) { + assert.equal(0, count); + }) + }); + + // Verify that fs.chunks chunk documents are gone + db.collection('fs.chunks', function(err, collection) { + collection.count(function(err, count) { + assert.equal(0, count); + + db.close(); + }) + }); + }); + }) + }); + }); + }); + }); + }); + + +----- +write +----- + + +Writes some data. This method will work properly only if initialized with mode "w" or "w+". + +.. js:function:: write(data[, close], callback) + + :param string data: the data to write. + :param boolean [close]: closes this file after writing if set to true. + :param function callback: this will be called after executing this method. The first parameter will contain null and the second one will contain a reference to this object. + :returns: null + + +**Examples** + + + + A simple example showing how to use the write command with strings and Buffers. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // Our file ID + var fileId = new ObjectID(); + + // Open a new file + var gridStore = new GridStore(db, fileId, 'w'); + + // Open the new file + gridStore.open(function(err, gridStore) { + + // Write a text string + gridStore.write('Hello world', function(err, gridStore) { + + // Write a buffer + gridStore.write(new Buffer('Buffer Hello world'), function(err, gridStore) { + + // Close the + gridStore.close(function(err, result) { + + // Read back all the written content and verify the correctness + GridStore.read(db, fileId, function(err, fileData) { + assert.equal('Hello worldBuffer Hello world', fileData.toString()); + + db.close(); + }); + }); + }); + }); + }); + }); + + +----- +pause +----- + + +pause + +.. js:function:: pause() + + +------ +resume +------ + + +resume + +.. js:function:: resume() + diff --git a/_sources/api-generated/index.txt b/_sources/api-generated/index.txt new file mode 100644 index 00000000000..fa5d737938a --- /dev/null +++ b/_sources/api-generated/index.txt @@ -0,0 +1,20 @@ +================== +Driver API +================== + +.. toctree:: + :maxdepth: 2 + + admin + collection + mongoclient + db + cursor + cursorstream + gridstore + readstream + grid + server + mongos + replset + readpreference diff --git a/_sources/api-generated/mongoclient.txt b/_sources/api-generated/mongoclient.txt new file mode 100644 index 00000000000..777f02c028a --- /dev/null +++ b/_sources/api-generated/mongoclient.txt @@ -0,0 +1,239 @@ +============= +MongoClient() +============= + +------------------ +Constructor +------------------ + + +Create a new MongoClient instance. + + + .. js:class:: MongoClient() + + :param object serverConfig: server config object. + :param object [options]: additional options for the collection. + + +Options + - **w**, {Number/String, > -1 || 'majority' || tag name} the write concern for the operation where < 1 is no acknowlegement of write and w >= 1, w = 'majority' or tag acknowledges the write + - **wtimeout**, {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + - **fsync**, (Boolean, default:false) write waits for fsync before returning + - **journal**, (Boolean, default:false) write waits for journal sync before returning + - **readPreference** {String}, the prefered read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + - **native_parser** {Boolean, default:false}, use c++ bson parser. + - **forceServerObjectId** {Boolean, default:false}, force server to create _id fields instead of client. + - **pkFactory** {Object}, object overriding the basic ObjectID primary key generation. + - **serializeFunctions** {Boolean, default:false}, serialize functions. + - **raw** {Boolean, default:false}, peform operations using raw bson buffers. + - **recordQueryStats** {Boolean, default:false}, record query statistics during execution. + - **retryMiliSeconds** {Number, default:5000}, number of miliseconds between retries. + - **numberOfRetries** {Number, default:5}, number of retries off connection. + + + +Deprecated Options + - **safe** {true | {w:n, wtimeout:n} | {fsync:true}, default:false}, executes with a getLastError command returning the results of the command on MongoDB. + + +---- +open +---- + + +Initialize the database connection. + +.. js:function:: open(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the connected mongoclient or null if an error occured. + :returns: null + + +**Examples** + + + + A basic example using the MongoClient to connect using a Server instance, similar to existing Db version + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Set up the connection to the local db + var mongoclient = new MongoClient(new Server("localhost", 27017, {native_parser: true})); + + // Open the connection to the server + mongoclient.open(function(err, mongoclient) { + + // Get the first db and do an update document on it + var db = mongoclient.db("integration_tests"); + db.collection('mongoclient_test').update({a:1}, {b:1}, {upsert:true}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result); + + // Get another db and do an update document on it + var db2 = mongoclient.db("integration_tests2"); + db2.collection('mongoclient_test').update({a:1}, {b:1}, {upsert:true}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result); + + // Close the connection + mongoclient.close(); + }); + }); + }); + + +----- +close +----- + + +Close the current db connection, including all the child db instances. Emits close event if no callback is provided. + +.. js:function:: close(callback) + + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the results from the close method or null if an error occured. + :returns: null + + +-- +db +-- + + +Create a new Db instance sharing the current socket connections. + +.. js:function:: db(dbName) + + :param string dbName: the name of the database we want to use. + :returns: db a db instance using the new database. + + +------------------- +MongoClient.connect +------------------- + + +Connect to MongoDB using a url as documented at + + + +docs.mongodb.org/manual/reference/connection-string/ + + + +Options + - **uri_decode_auth** {Boolean, default:false} uri decode the user name and password for authentication + - **db** {Object, default: null} a hash off options to set on the db object, see **Db constructor** + - **server** {Object, default: null} a hash off options to set on the server objects, see **Server** constructor** + - **replSet** {Object, default: null} a hash off options to set on the replSet object, see **ReplSet** constructor** + - **mongos** {Object, default: null} a hash off options to set on the mongos object, see **Mongos** constructor** + +.. js:function:: MongoClient.connect(url[, options], callback) + + :param string url: connection url for MongoDB. + :param object [options]: optional options for insert command + :param function callback: this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the initialized db object or null if an error occured. + :returns: null + + +**Examples** + + + + Example of a simple url connection string for a single server connection + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + // Connect using the connection string + MongoClient.connect("mongodb://localhost:27017/integration_tests", {native_parser:true}, function(err, db) { + assert.equal(null, err); + + db.collection('mongoclient_test').update({a:1}, {b:1}, {upsert:true}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result); + + db.close(); + }); + }); + + + + Example of a simple url connection string to a replicaset, with acknowledgement of writes. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + MongoClient.connect("mongodb://localhost:30000,localhost:30001,localhost:30002/integration_test_?w=1", function(err, db) { + assert.equal(null, err); + assert.ok(db != null); + + db.collection("replicaset_mongo_client_collection").update({a:1}, {b:1}, {upsert:true}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result); + + db.close(); + }); + }); + + + + Example of a simple url connection string to a shard, with acknowledgement of writes. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + MongoClient.connect('mongodb://localhost:50000,localhost:50001/sharded_test_db?w=1', function(err, db) { + assert.equal(null, err); + assert.ok(db != null); + + db.collection("replicaset_mongo_client_collection").update({a:1}, {b:1}, {upsert:true}, function(err, result) { + assert.equal(null, err); + assert.equal(1, result); + + db.close(); + }); + }); + diff --git a/_sources/api-generated/mongos.txt b/_sources/api-generated/mongos.txt new file mode 100644 index 00000000000..5ca38cbaa6b --- /dev/null +++ b/_sources/api-generated/mongos.txt @@ -0,0 +1,33 @@ +======== +Mongos() +======== + +------------------ +Constructor +------------------ + + +Mongos constructor provides a connection to a mongos proxy including failover to additional servers + + + .. js:class:: Mongos() + + :param array list: of mongos server objects + :param object [options]: additional options for the mongos connection + + +Options + - **socketOptions** {Object, default:null}, an object containing socket options to use (noDelay:(boolean), keepAlive:(number), connectTimeoutMS:(number), socketTimeoutMS:(number)) + - **ha** {Boolean, default:true}, turn on high availability, attempts to reconnect to down proxies + - **haInterval** {Number, default:2000}, time between each replicaset status check. + + +----------------- +setReadPreference +----------------- + + +Always ourselves + +.. js:function:: setReadPreference() + diff --git a/_sources/api-generated/readpreference.txt b/_sources/api-generated/readpreference.txt new file mode 100644 index 00000000000..ac6110fb8de --- /dev/null +++ b/_sources/api-generated/readpreference.txt @@ -0,0 +1,26 @@ +================ +ReadPreference() +================ + +------------------ +Constructor +------------------ + + +A class representation of the Read Preference. + + + .. js:class:: ReadPreference() + + :param string the: read preference type + :param object tags: + :returns: readpreference + + +Read Preferences + - **ReadPreference.PRIMARY**, Read from primary only. All operations produce an error (throw an exception where applicable) if primary is unavailable. Cannot be combined with tags (This is the default.). + - **ReadPreference.PRIMARY_PREFERRED**, Read from primary if available, otherwise a secondary. + - **ReadPreference.SECONDARY**, Read from secondary if available, otherwise error. + - **ReadPreference.SECONDARY_PREFERRED**, Read from a secondary if available, otherwise read from the primary. + - **ReadPreference.NEAREST**, All modes read from among the nearest candidates, but unlike other modes, NEAREST will include both the primary and all secondaries in the random selection. + diff --git a/_sources/api-generated/readstream.txt b/_sources/api-generated/readstream.txt new file mode 100644 index 00000000000..4b7a22ee9c7 --- /dev/null +++ b/_sources/api-generated/readstream.txt @@ -0,0 +1,264 @@ +============ +ReadStream() +============ + +------------------ +Constructor +------------------ + + +ReadStream + + + .. js:class:: ReadStream() + + :param boolean autoclose: automatically close file when the stream reaches the end. + :param gridstore cursor: a cursor object that the stream wraps. + :returns: readstream + + +Returns a stream interface for the **file**. + + + +Events + - **data** {function(item) {}} the data event triggers when a document is ready. + - **end** {function() {}} the end event triggers when there is no more documents available. + - **close** {function() {}} the close event triggers when the stream is closed. + - **error** {function(err) {}} the error event triggers if an error happens. + + +----- +pause +----- + + +Pauses this stream, then no farther events will be fired. + +.. js:function:: pause() + + +**Examples** + + + + A simple example showing the use of the readstream pause function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // File id + var fileId = new ObjectID(); + // Create a file + var file = new GridStore(db, fileId, "w", {chunk_size:5}); + file.open(function(err, file) { + // Write some content and flush to disk + file.write('Hello world', function(err, file) { + file.close(function(err, result) { + + // Let's create a read file + file = new GridStore(db, fileId, "r"); + // Open the file + file.open(function(err, file) { + // Peform a find to get a cursor + var stream = file.stream(); + + // For each data item + stream.on("data", function(item) { + // Check if stream is paused + assert.equal(false, stream.paused); + // Pause stream + stream.pause(); + // Restart the stream after 1 miliscecond + setTimeout(function() { + stream.resume(); + // Check if cursor is paused + assert.equal(false, stream.paused); + }, 100); + }); + + // For each data item + stream.on("end", function(item) { + stream.destroy(); + }); + // When the stream is done + stream.on("close", function() { + db.close(); + }); + }); + }); + }); + }); + }); + + +------- +destroy +------- + + +Destroys the stream, then no farther events will be fired. + +.. js:function:: destroy() + + +**Examples** + + + + A simple example showing the use of the readstream destroy function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // File id + var fileId = new ObjectID(); + // Create a file + var file = new GridStore(db, fileId, "w"); + file.open(function(err, file) { + // Write some content and flush to disk + file.write('Hello world', function(err, file) { + file.close(function(err, result) { + + // Let's create a read file + file = new GridStore(db, fileId, "r"); + // Open the file + file.open(function(err, file) { + // Peform a find to get a cursor + var stream = file.stream(); + + // For each data item + stream.on("data", function(item) { + // Destroy the stream + stream.destroy(); + }); + + // When the stream is done + stream.on("close", function() { + db.close(); + }); + }); + }); + }); + }); + }); + + +------ +resume +------ + + +Resumes this stream. + +.. js:function:: resume() + + +**Examples** + + + + A simple example showing the use of the readstream resume function. + + .. code-block:: javascript + + var Db = require('mongodb').Db, + MongoClient = require('mongodb').MongoClient, + Server = require('mongodb').Server, + ReplSetServers = require('mongodb').ReplSetServers, + ObjectID = require('mongodb').ObjectID, + Binary = require('mongodb').Binary, + GridStore = require('mongodb').GridStore, + Code = require('mongodb').Code, + BSON = require('mongodb').pure().BSON, + assert = require('assert'); + + var db = new Db('test', new Server('locahost', 27017)); + // Establish connection to db + db.open(function(err, db) { + // File id + var fileId = new ObjectID(); + // Create a file + var file = new GridStore(db, fileId, "w", {chunk_size:5}); + file.open(function(err, file) { + // Write some content and flush to disk + var fileBody = 'Hello world'; + file.write(fileBody, function(err, file) { + file.close(function(err, result) { + // Let's create a read file + file = new GridStore(db, fileId, "r"); + + // Open the file + file.open(function(err, file) { + // Peform a find to get a cursor + var stream = file.stream(true); + + // Pause the stream initially + stream.pause(); + + // Save read content here + var fileBuffer = ''; + + // For each data item + stream.on("data", function(item) { + // Check if stream is paused + assert.equal(false, stream.paused); + // Pause stream + stream.pause(); + // Check if cursor is paused + // assert.equal(true, stream.paused); + + fileBuffer += item.toString('utf8'); + + // Restart the stream after 1 miliscecond + setTimeout(function() { + stream.resume(); + // Check if cursor is paused + assert.equal(false, stream.paused); + }, 100); + }); + + // For each data item + stream.on("end", function(item) { + }); + // When the stream is done + stream.on("close", function() { + // Have we received the same file back? + assert.equal(fileBuffer, fileBody); + db.close(); + }); + + // Resume the stream + stream.resume(); + }); + }); + }); + }); + }); + diff --git a/_sources/api-generated/replset.txt b/_sources/api-generated/replset.txt new file mode 100644 index 00000000000..76af7b84ab3 --- /dev/null +++ b/_sources/api-generated/replset.txt @@ -0,0 +1,78 @@ +======= +ReplSet +======= + +------------------ +Constructor +------------------ + + +ReplSet constructor provides replicaset functionality + + + .. js:class:: ReplSet + + :param array list: of server objects participating in the replicaset. + :param object [options]: additional options for the replicaset connection. + + +Options + - **ha** {Boolean, default:true}, turn on high availability. + - **haInterval** {Number, default:2000}, time between each replicaset status check. + - **reconnectWait** {Number, default:1000}, time to wait in miliseconds before attempting reconnect. + - **retries** {Number, default:30}, number of times to attempt a replicaset reconnect. + - **rs_name** {String}, the name of the replicaset to connect to. + - **socketOptions** {Object, default:null}, an object containing socket options to use (noDelay:(boolean), keepAlive:(number), connectTimeoutMS:(number), socketTimeoutMS:(number)) + - **readPreference** {String}, the prefered read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + - **strategy** {String, default:'ping'}, selection strategy for reads choose between (ping, statistical and none, default is ping) + - **secondaryAcceptableLatencyMS** {Number, default:15}, sets the range of servers to pick when using NEAREST (lowest ping ms + the latency fence, ex: range of 1 to (1 + 15) ms) + - **connectArbiter** {Boolean, default:false}, sets if the driver should connect to arbiters or not. + - **logger** {Object, default:null}, an object representing a logger that you want to use, needs to support functions debug, log, error **({error:function(message, object) {}, log:function(message, object) {}, debug:function(message, object) {}})**. + - **ssl** {Boolean, default:false}, use ssl connection (needs to have a mongod server with ssl support) + - **sslValidate** {Boolean, default:false}, validate mongod server certificate against ca (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslCA** {Array, default:null}, Array of valid certificates either as Buffers or Strings (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslCert** {Buffer/String, default:null}, String or buffer containing the certificate we wish to present (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslKey** {Buffer/String, default:null}, String or buffer containing the certificate private key we wish to present (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslPass** {Buffer/String, default:null}, String or buffer containing the certificate password (needs to have a mongod server with ssl support, 2.4 or higher) + + +--------- +_enableHA +--------- + + +Enables high availability pings. + +.. js:function:: _enableHA() + + +------------- +ReplSet.State +------------- + + +Interval state object constructor + +.. js:function:: ReplSet.State() + + +------------------ +_handleOnFullSetup +------------------ + + +Handles the first fullsetup event of this ReplSet. + +.. js:function:: _handleOnFullSetup(parent) + + :param db parent: + +---------- +_disableHA +---------- + + +Disables high availability pings. + +.. js:function:: _disableHA() + diff --git a/_sources/api-generated/server.txt b/_sources/api-generated/server.txt new file mode 100644 index 00000000000..c8df01a9c52 --- /dev/null +++ b/_sources/api-generated/server.txt @@ -0,0 +1,76 @@ +======== +Server() +======== + +------------------ +Constructor +------------------ + + +Class representing a single MongoDB Server connection + + + .. js:class:: Server() + + :param string host: the server host + :param number port: the server port + :param object [options]: optional options for insert command + + +Options + - **readPreference** {String, default:null}, set's the read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST) + - **ssl** {Boolean, default:false}, use ssl connection (needs to have a mongod server with ssl support) + - **sslValidate** {Boolean, default:false}, validate mongod server certificate against ca (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslCA** {Array, default:null}, Array of valid certificates either as Buffers or Strings (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslCert** {Buffer/String, default:null}, String or buffer containing the certificate we wish to present (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslKey** {Buffer/String, default:null}, String or buffer containing the certificate private key we wish to present (needs to have a mongod server with ssl support, 2.4 or higher) + - **sslPass** {Buffer/String, default:null}, String or buffer containing the certificate password (needs to have a mongod server with ssl support, 2.4 or higher) + - **poolSize** {Number, default:5}, number of connections in the connection pool, set to 5 as default for legacy reasons. + - **socketOptions** {Object, default:null}, an object containing socket options to use (noDelay:(boolean), keepAlive:(number), connectTimeoutMS:(number), socketTimeoutMS:(number)) + - **logger** {Object, default:null}, an object representing a logger that you want to use, needs to support functions debug, log, error **({error:function(message, object) {}, log:function(message, object) {}, debug:function(message, object) {}})**. + - **auto_reconnect** {Boolean, default:false}, reconnect on error. + - **disableDriverBSONSizeCheck** {Boolean, default:false}, force the server to error if the BSON message is to big + + +----------------- +setReadPreference +----------------- + + +Always ourselves + +.. js:function:: setReadPreference() + + +---------------- +assignReplicaSet +---------------- + + +Assigns a replica set to this server. + +.. js:function:: assignReplicaSet(replset) + + :param replset replset: + +------------------------- +inheritReplSetOptionsFrom +------------------------- + + +Takes needed options from replset and overwrites +our own options. + +.. js:function:: inheritReplSetOptionsFrom(replset) + + :param replset replset: + +------- +connect +------- + + +Opens this server connection. + +.. js:function:: connect() + diff --git a/HISTORY b/_sources/changelog/changelog.txt similarity index 55% rename from HISTORY rename to _sources/changelog/changelog.txt index adfb2fcce5c..b54b7b4f944 100644 --- a/HISTORY +++ b/_sources/changelog/changelog.txt @@ -1,3 +1,277 @@ +========= +Changelog +========= + +1.2.14 2013-03-14 +----------------- +- Refactored test suite to speed up running of replicaset tests +- Fix of async error handling when error happens in callback (Issue #909, https://github.com/medikoo) +- Corrected a slaveOk setting issue (Issue #906, #905) +- Fixed HA issue where ping's would not go to correct server on HA server connection failure. +- Uses setImmediate if on 0.10 otherwise nextTick for cursor stream +- Fixed race condition in Cursor stream (NODE-31) +- Fixed issues related to node 0.10 and process.nextTick now correctly using setImmediate where needed on node 0.10 +- Added support for maxMessageSizeBytes if available (DRIVERS-1) +- Added support for authSource (2.4) to MongoClient URL and db.authenticate method (DRIVER-69/NODE-34) +- Fixed issue in GridStore seek and GridStore read to correctly work on multiple seeks (Issue #895) + +1.2.13 2013-02-22 +----------------- +- Allow strategy 'none' for repliaset if no strategy wanted (will default to round robin selection of servers on a set readPreference) +- Fixed missing MongoErrors on some cursor methods (Issue #882) +- Correctly returning a null for the db instance on MongoClient.connect when auth fails (Issue #890) +- Added dropTarget option support for renameCollection/rename (Issue #891, help from https://github.com/jbottigliero) +- Fixed issue where connection using MongoClient.connect would fail if first server did not exist (Issue #885) + +1.2.12 2013-02-13 +----------------- +- Added limit/skip options to Collection.count (Issue #870) +- Added applySkipLimit option to Cursor.count (Issue #870) +- Enabled ping strategy as default for Replicaset if none specified (Issue #876) +- Should correctly pick nearest server for SECONDARY/SECONDARY_PREFERRED/NEAREST (Issue #878) + +1.2.11 2013-01-29 +----------------- +- Added fixes for handling type 2 binary due to PHP driver (Issue #864) +- Moved callBackStore to Base class to have single unified store (Issue #866) +- Ping strategy now reuses sockets unless they are closed by the server to avoid overhead + +1.2.10 2013-01-25 +----------------- +- Merged in SSL support for 2.4 supporting certificate validation and presenting certificates to the server. +- Only open a new HA socket when previous one dead (Issue #859, #857) +- Minor fixes + +1.2.9 2013-01-15 +---------------- +- Fixed bug in SSL support for MongoClient/Db.connect when discovering servers (Issue #849) +- Connection string with no db specified should default to admin db (Issue #848) +- Support port passed as string to Server class (Issue #844) +- Removed noOpen support for MongoClient/Db.connect as auto discovery of servers for Mongod/Mongos makes it not possible (Issue #842) +- Included toError wrapper code moved to utils.js file (Issue #839, #840) +- Rewrote cursor handling to avoid process.nextTick using trampoline instead to avoid stack overflow, speedup about 40% + +1.2.8 2013-01-07 +---------------- +- Accept function in a Map Reduce scope object not only a function string (Issue #826, https://github.com/aheckmann) +- Typo in db.authenticate caused a check (for provided connection) to return false, causing a connection AND onAll=true to be passed into __executeQueryCommand downstream (Issue #831, https://github.com/m4tty) +- Allow gridfs objects to use non ObjectID ids (Issue #825, https://github.com/nailgun) +- Removed the double wrap, by not passing an Error object to the wrap function (Issue #832, https://github.com/m4tty) +- Fix connection leak (gh-827) for HA replicaset health checks (Issue #833, https://github.com/aheckmann) +- Modified findOne to use nextObject instead of toArray avoiding a nextTick operation (Issue #836) +- Fixes for cursor stream to avoid multiple getmore issues when one in progress (Issue #818) +- Fixes .open replaying all backed up commands correctly if called after operations performed, (Issue #829 and #823) + +1.2.7 2012-12-23 +---------------- +- Rolled back batches as they hang in certain situations +- Fixes for NODE-25, keep reading from secondaries when primary goes down + +1.2.6 2012-12-21 +---------------- +- domain sockets shouldn't require a port arg (Issue #815, https://github.com/aheckmann) +- Cannot read property 'info' of null (Issue #809, https://github.com/thesmart) +- Cursor.each should work in batches (Issue #804, https://github.com/Swatinem) +- Cursor readPreference bug for non-supported read preferences (Issue #817) + +1.2.5 2012-12-12 +---------------- +- Fixed ssl regression, added more test coverage (Issue #800) +- Added better error reporting to the Db.connect if no valid serverConfig setup found (Issue #798) + +1.2.4 2012-12-11 +---------------- +- Fix to ensure authentication is correctly applied across all secondaries when using MongoClient. + +1.2.3 2012-12-10 +---------------- +- Fix for new replicaset members correctly authenticating when being added (Issue #791, https://github.com/m4tty) +- Fixed seek issue in gridstore when using stream (Issue #790) + +1.2.2 2012-12-03 +---------------- +- Fix for journal write concern not correctly being passed under some circumstances. +- Fixed correct behavior and re-auth for servers that get stepped down (Issue #779). + +1.2.1 2012-11-30 +---------------- +- Fix for double callback on insert with w:0 specified (Issue #783) +- Small cleanup of urlparser. + +1.2.0 2012-11-27 +---------------- +- Honor connectTimeoutMS option for replicasets (Issue #750, https://github.com/aheckmann) +- Fix ping strategy regression (Issue #738, https://github.com/aheckmann) +- Small cleanup of code (Issue #753, https://github.com/sokra/node-mongodb-native) +- Fixed index declaration using objects/arrays from other contexts (Issue #755, https://github.com/sokra/node-mongodb-native) +- Intermittent (and rare) null callback exception when using ReplicaSets (Issue #752) +- Force correct setting of read_secondary based on the read preference (Issue #741) +- If using read preferences with secondaries queries will not fail if primary is down (Issue #744) +- noOpen connection for Db.connect removed as not compatible with autodetection of Mongo type +- Mongos connection with auth not working (Issue #737) +- Use the connect method directly from the require. require('mongodb')("mongodb://localhost:27017/db") +- new MongoClient introduced as the point of connecting to MongoDB's instead of the Db + - open/close/db/connect methods implemented +- Implemented common URL connection format using MongoClient.connect allowing for simialar interface across all drivers. +- Fixed a bug with aggregation helper not properly accepting readPreference + +1.1.11 2012-10-10 +----------------- +- Removed strict mode and introduced normal handling of safe at DB level. + +1.1.10 2012-10-08 +----------------- +- fix Admin.serverStatus (Issue #723, https://github.com/Contra) +- logging on connection open/close(Issue #721, https://github.com/asiletto) +- more fixes for windows bson install (Issue #724) + +1.1.9 2012-10-05 +---------------- +- Updated bson to 0.1.5 to fix build problem on sunos/windows. + +1.1.8 2012-10-01 +---------------- +- Fixed db.eval to correctly handle system.js global javascript functions (Issue #709) +- Cleanup of non-closing connections (Issue #706) +- More cleanup of connections under replicaset (Issue #707, https://github.com/elbert3) +- Set keepalive on as default, override if not needed +- Cleanup of jsbon install to correctly build without install.js script (https://github.com/shtylman) +- Added domain socket support new Server("/tmp/mongodb.sock") style + +1.1.7 2012-09-10 +---------------- +- Protect against starting PingStrategy being called more than once (Issue #694, https://github.com/aheckmann) +- Make PingStrategy interval configurable (was 1 second, relaxed to 5) (Issue #693, https://github.com/aheckmann) +- Made PingStrategy api more consistant, callback to start/stop methods are optional (Issue #693, https://github.com/aheckmann) +- Proper stopping of strategy on replicaset stop +- Throw error when gridstore file is not found in read mode (Issue #702, https://github.com/jbrumwell) +- Cursor stream resume now using nextTick to avoid duplicated records (Issue #696) + +1.1.6 2012-09-01 +---------------- +- Fix for readPreference NEAREST for replicasets (Issue #693, https://github.com/aheckmann) +- Emit end correctly on stream cursor (Issue #692, https://github.com/Raynos) + +1.1.5 2012-08-29 +---------------- +- Fix for eval on replicaset Issue #684 +- Use helpful error msg when native parser not compiled (Issue #685, https://github.com/aheckmann) +- Arbiter connect hotfix (Issue #681, https://github.com/fengmk2) +- Upgraded bson parser to 0.1.2 using gyp, deprecated support for node 0.4.X +- Added name parameter to createIndex/ensureIndex to be able to override index names larger than 128 bytes +- Added exhaust option for find for feature completion (not recommended for normal use) +- Added tailableRetryInterval to find for tailable cursors to allow to control getMore retry time interval +- Fixes for read preferences when using MongoS to correctly handle no read preference set when iterating over a cursor (Issue #686) + +1.1.4 2012-08-12 +---------------- +- Added Mongos connection type with a fallback list for mongos proxies, supports ha (on by default) and will attempt to reconnect to failed proxies. +- Documents can now have a toBSON method that lets the user control the serialization behavior for documents being saved. +- Gridstore instance object now works as a readstream or writestream (thanks to code from Aaron heckmann (https://github.com/aheckmann/gridfs-stream)). +- Fix gridfs readstream (Issue #607, https://github.com/tedeh). +- Added disableDriverBSONSizeCheck property to Server.js for people who wish to push the inserts to the limit (Issue #609). +- Fixed bug where collection.group keyf given as Code is processed as a regular object (Issue #608, https://github.com/rrusso2007). +- Case mismatch between driver's ObjectID and mongo's ObjectId, allow both (Issue #618). +- Cleanup map reduce (Issue #614, https://github.com/aheckmann). +- Add proper error handling to gridfs (Issue #615, https://github.com/aheckmann). +- Ensure cursor is using same connection for all operations to avoid potential jump of servers when using replicasets. +- Date identification handled correctly in bson js parser when running in vm context. +- Documentation updates +- GridStore filename not set on read (Issue #621) +- Optimizations on the C++ bson parser to fix a potential memory leak and avoid non-needed calls +- Added support for awaitdata for tailable cursors (Issue #624) +- Implementing read preference setting at collection and cursor level + * collection.find().setReadPreference(Server.SECONDARY_PREFERRED) + * db.collection("some", {readPreference:Server.SECONDARY}) +- Replicaset now returns when the master is discovered on db.open and lets the rest of the connections happen asynchronous. + - ReplSet/ReplSetServers emits "fullsetup" when all servers have been connected to +- Prevent callback from executing more than once in getMore function (Issue #631, https://github.com/shankar0306) +- Corrupt bson messages now errors out to all callbacks and closes up connections correctly, Issue #634 +- Replica set member status update when primary changes bug (Issue #635, https://github.com/alinsilvian) +- Fixed auth to work better when multiple connections are involved. +- Default connection pool size increased to 5 connections. +- Fixes for the ReadStream class to work properly with 0.8 of Node.js +- Added explain function support to aggregation helper +- Added socketTimeoutMS and connectTimeoutMS to socket options for repl_set.js and server.js +- Fixed addUser to correctly handle changes in 2.2 for getLastError authentication required +- Added index to gridstore chunks on file_id (Issue #649, https://github.com/jacobbubu) +- Fixed Always emit db events (Issue #657) +- Close event not correctly resets DB openCalled variable to allow reconnect +- Added open event on connection established for replicaset, mongos and server +- Much faster BSON C++ parser thanks to Lucasfilm Singapore. +- Refactoring of replicaset connection logic to simplify the code. +- Add `options.connectArbiter` to decide connect arbiters or not (Issue #675) +- Minor optimization for findAndModify when not using j,w or fsync for safe + +1.0.2 2012-05-15 +---------------- +- Reconnect functionality for replicaset fix for mongodb 2.0.5 + +1.0.1 2012-05-12 +---------------- +- Passing back getLastError object as 3rd parameter on findAndModify command. +- Fixed a bunch of performance regressions in objectId and cursor. +- Fixed issue #600 allowing for single document delete to be passed in remove command. + +1.0.0 2012-04-25 +---------------- +- Fixes to handling of failover on server error +- Only emits error messages if there are error listeners to avoid uncaught events +- Server.isConnected using the server state variable not the connection pool state + +0.9.9.8 2012-04-12 +------------------ +- _id=0 is being turned into an ObjectID (Issue #551) +- fix for error in GridStore write method (Issue #559) +- Fix for reading a GridStore from arbitrary, non-chunk aligned offsets, added test (Issue #563, https://github.com/subroutine) +- Modified limitRequest to allow negative limits to pass through to Mongo, added test (Issue #561) +- Corrupt GridFS files when chunkSize < fileSize, fixed concurrency issue (Issue #555) +- Handle dead tailable cursors (Issue #568, https://github.com/aheckmann) +- Connection pools handles closing themselves down and clearing the state +- Check bson size of documents against maxBsonSize and throw client error instead of server error, (Issue #553) +- Returning update status document at the end of the callback for updates, (Issue #569) +- Refactor use of Arguments object to gain performance (Issue #574, https://github.com/AaronAsAChimp) + +0.9.9.7 2012-03-16 +------------------ +- Stats not returned from map reduce with inline results (Issue #542) +- Re-enable testing of whether or not the callback is called in the multi-chunk seek, fix small GridStore bug (Issue #543, https://github.com/pgebheim) +- Streaming large files from GridFS causes truncation (Issue #540) +- Make callback type checks agnostic to V8 context boundaries (Issue #545) +- Correctly throw error if an attempt is made to execute an insert/update/remove/createIndex/ensureIndex with safe enabled and no callback +- Db.open throws if the application attemps to call open again without calling close first + +0.9.9.6 2012-03-12 +------------------ +- BSON parser is externalized in it's own repository, currently using git master +- Fixes for Replicaset connectivity issue (Issue #537) +- Fixed issues with node 0.4.X vs 0.6.X (Issue #534) +- Removed SimpleEmitter and replaced with standard EventEmitter +- GridStore.seek fails to change chunks and call callback when in read mode (Issue #532) + +0.9.9.5 2012-03-07 +------------------ +- Merged in replSetGetStatus helper to admin class (Issue #515, https://github.com/mojodna) +- Merged in serverStatus helper to admin class (Issue #516, https://github.com/mojodna) +- Fixed memory leak in C++ bson parser (Issue #526) +- Fix empty MongoError "message" property (Issue #530, https://github.com/aheckmann) +- Cannot save files with the same file name to GridFS (Issue #531) + +0.9.9.4 2012-02-26 +------------------ +* bugfix for findAndModify: Error: corrupt bson message < 5 bytes long (Issue #519) + +0.9.9.3 2012-02-23 +------------------ +* document: save callback arguments are both undefined, (Issue #518) +* Native BSON parser install error with npm, (Issue #517) + +0.9.9.2 2012-02-17 +------------------ +* Improved detection of Buffers using Buffer.isBuffer instead of instanceof. +* Added wrap error around db.dropDatabase to catch all errors (Issue #512) +* Added aggregate helper to collection, only for MongoDB >= 2.1 + 0.9.9.1 2012-02-15 ------------------ * Better handling of safe when using some commands such as createIndex, ensureIndex, addUser, removeUser, createCollection. @@ -28,7 +302,7 @@ * Added stats command to collection to retrieve the statistics on a Collection. * Added listDatabases command to admin object to allow retrieval of all available dbs. * Changed createCreateIndexCommand to work better with options. -* Fixed dereference method on Db class to correctly dereference Db reference objects. +* Fixed dereference method on Db class to correctly dereference Db reference objects. * Moved connect object onto Db class(Db.connect) as well as keeping backward compatibility. * Removed writeBuffer method from gridstore, write handles switching automatically now. * Changed readBuffer to read on Gridstore, Gridstore now only supports Binary Buffers no Strings anymore. @@ -40,10 +314,10 @@ * Added support for $returnKey, $maxScan, $min, $max, $showDiskLoc, $comment to cursor and find/findOne methods. * Added dropDups and v option to createIndex and ensureIndex. * Added isCapped method to Collection. -* Added indexExists method to Collection. +* Added indexExists method to Collection. * Added findAndRemove method to Collection. * Fixed bug for replicaset connection when no active servers in the set. -* Fixed bug for replicaset connections when errors occur during connection. +* Fixed bug for replicaset connections when errors occur during connection. * Merged in patch for BSON Number handling from Lee Salzman, did some small fixes and added test coverage. 0.9.8-3 2012-01-21 @@ -66,7 +340,7 @@ * Fixes formattedOrderClause in collection.js to accept a plain hash as a parameter (Issue #469) https://github.com/tedeh * Removed duplicate code for formattedOrderClause and moved to utils module * Pass in poolSize for ReplSetServers to set default poolSize for new replicaset members -* Bug fix for BSON JS deserializer. Isolating the eval functions in separate functions to avoid V8 deoptimizations +* Bug fix for BSON JS deserializer. Isolating the eval functions in separate functions to avoid V8 deoptimizations * Correct handling of illegal BSON messages during deserialization * Fixed Infinite loop when reading GridFs file with no chunks (Issue #471) * Correctly update existing user password when using addUser (Issue #470) @@ -115,7 +389,7 @@ 0.9.7.2-3 2011-12-18 -------------------- * Fixed error handling for findAndModify while still working for mongodb 1.8.6 (Issue #450). -* Allow for force send query to primary, pass option (read:'primary') on find command. +* Allow for force send query to primary, pass option (read:'primary') on find command. * ``find({a:1}, {read:'primary'}).toArray(function(err, items) {});`` 0.9.7.2-2 2011-12-16 @@ -168,7 +442,7 @@ ------------------ * Better handling of dead server for single server instances * FindOne and find treats selector == null as {}, Issue #403 -* Possible to pass in a strategy for the replicaset to pick secondary reader node +* Possible to pass in a strategy for the replicaset to pick secondary reader node * parameter strategy * ping (default), pings the servers and picks the one with the lowest ping time * statistical, measures each request and pick the one with the lowest mean and std deviation @@ -234,7 +508,7 @@ * Reworked bson.cc to throw error when trying to serialize js bson types * Added MinKey, MaxKey and Double support for JS and C++ parser * Reworked socket handling code to emit errors on unparsable messages -* Added logger option for Db class, lets you pass in a function in the shape +* Added logger option for Db class, lets you pass in a function in the shape { log : function(message, object) {}, error : function(errorMessage, errorObject) {}, @@ -314,7 +588,7 @@ * Bug fix for bson parsing the key '':'' correctly without crashing 0.9.6-8 -------- +------- * Changed to using node.js crypto library MD5 digest * Connect method support documented mongodb: syntax by (https://github.com/sethml) * Support Symbol type for BSON, serializes to it's own type Symbol, Issue #302, #288 @@ -387,7 +661,7 @@ * Replicaset support (failover and reading from secondary servers) * Removed ServerPair and ServerCluster * Added connection pool functionality -* Fixed serious bug in C++ bson parser where bytes > 127 would generate 2 byte sequences +* Fixed serious bug in C++ bson parser where bytes > 127 would generate 2 byte sequences * Allows for forcing the server to assign ObjectID's using the option {forceServerObjectId: true} 0.6.8 diff --git a/_sources/content/awesomeappsvideo.txt b/_sources/content/awesomeappsvideo.txt new file mode 100644 index 00000000000..bacbaf554ac --- /dev/null +++ b/_sources/content/awesomeappsvideo.txt @@ -0,0 +1,14 @@ +====================================== +Awesome Node.js + MongoDB Applications +====================================== + +Node.js blackboard - socket.io +------------------------------ +Nodejs test using express, jade, stylus, socket.io and Mongodb +for persist the data +Source: http://github.com/gotik/nodejs-blackboard +http://node.kamikazepanda.com + +.. raw:: html + + \ No newline at end of file diff --git a/_sources/content/nodejsvideo.txt b/_sources/content/nodejsvideo.txt new file mode 100644 index 00000000000..c8f01a5ce89 --- /dev/null +++ b/_sources/content/nodejsvideo.txt @@ -0,0 +1,160 @@ +============================== +Node.JS Specific Presentations +============================== + +An Introduction to the node.js MongoDB Driver +--------------------------------------------- +Christian Kvalheim gives an introduction to the node.js MongoDB driver. + +.. raw:: html + +
+ + + + + + + + + + + + +
+ +Node.js and MongoDB, a Panel Discussion +--------------------------------------- +Working with Node’s JavaScript means that MongoDB documents get their most natural representation - as JSON - right in the application layer. This session introduces the open source tools available for using MongoDB with Node.js through a panel discussion with the main contributors to these projects. Tom Hughes-Croucher, Node.js Chief Evangelist at Joyent, moderates the discussion. Towards the end, Tom field questions from the attendees. + +.. raw:: html + +
+ +
+ +Schemas for Real Time Analytics with Node.js +-------------------------------------------- +SimpleReach powers the Slide, a recommendation powered content discovery technology for websites. Eric Lublow will discuss how SimpleReach builds schemas in MongoDB and Node.js for powerful, real-time data delivery. Topics will include: + +* Dynamic collection creation +* Updating (Increments over sets) +* Schema +* Next gen schemas for dynamic filters + +.. raw:: html + +
+ + + + + + + + + + + + +
+ +Happy Mongo-ing in Node.js +-------------------------- +Node.js is JavaScript. MongoDB speaks JSON. So why bother with an in-between "ORM" layer like Mongoose? Join us for a look at the object modeling... tool designed and maintained by the team at LearnBoost. Recent updates have made Mongoose even more compelling. We'll share the pros and cons we've experienced in using it day-to-day on our current project. + +.. raw:: html + +
+ + + + + + + + + + + + +
+ +NYC MongoDB User Group - Schemas for Real Time Analytics with Node.js +--------------------------------------------------------------------- +Schemas for Real Time Analytics with Node.js - Eric Lubow and Russell Bradberry, SimpleReach + +SimpleReach powers the Slide, a recommendation powered content discovery technology for websites. Eric Lublow will discuss how SimpleReach builds schemas in MongoDB and Node.js for powerful, real-time data delivery. Topics will include: + +* Dynamic collection creation +* Updating (Increments over sets) +* Schema +* Next gen schemas for dynamic filters + +.. raw:: html + + + +NYC MongoDB User Group - What's in the Trello +--------------------------------------------- +What's in the Trello - Brett Kiefer, Fog Creek + +Trello is a collaboration tool that organizes your projects into boards. Brett Kiefer will discuss how Trello uses Node.js, Socket.io, MongoDB, Redis, and Backbone to create a responsive single-page app. + +.. raw:: html + + + +NYC Node JS Meetup - August 2011 - Mongoose & other MongoDB Connectors +---------------------------------------------------------------------- +Mongoose Meetup August 2011 + +.. raw:: html + +
+ + + + + + + + + + + + + + + +A beautiful marriage: MongoDB and node.js +----------------------------------------- +What does it take to handle a million requests a day with sub 400ms response times? Not much when you marry the appropriate technologies. Learn how Proxlet.com leverages MongoDB & NodeJS to serve over a million daily requests.... on a $10/month server. After briefly setting describing the product context, I will discuss why we chose NodeJS & MongoDB to form the backbone of Proxlet, how the technologies play-nice & poised for scale. + +.. raw:: html + +
+ +
+ +Rapid Realtime App Development with Node.JS & MongoDB +----------------------------------------------------- +Jump on board to learn about combining two of the most exciting technologies to quickly build realtime apps yourself. This talk will introduce the popular Node.js library, Mongoose, which is a MongoDB "ORM" for Node.js. First, the speaker will deliver a quick primer on Node.js. Then, he'll walk you through Mongoose's schema api, powerful query builder, middleware capabilities, and exciting plugin ecosystem. Finally, he'll demonstrate some realtime capabilities using Node.js and Mongoose. + +.. raw:: html + +
+ + + + + + + + + + + +
+ + diff --git a/_sources/content/tutorials.txt b/_sources/content/tutorials.txt new file mode 100644 index 00000000000..d9f84d8df3d --- /dev/null +++ b/_sources/content/tutorials.txt @@ -0,0 +1,33 @@ +=================================== +Tutorials using Node.JS and MongoDB +=================================== + +Node.js and MongoDB Tutorial - Using CoffeeScript and Mongoose (ODM) +-------------------------------------------------------------------- +This is a brief tutorial on how to use Node.js (using CoffeeScript) to connect to a MongoDB (using Mongoose) for a small project I am working on with a group of awesome students. + +.. raw:: html + + + + + + + + + + + + + + + + + + + + + + + +
Part 1/6 of the tutorialPart 2/6 of the tutorial
Part 3/6 of the tutorialPart 4/6 of the tutorial
Part 5/6 of the tutorialPart 6/6 of the tutorial
diff --git a/docs/sphinx-docs/source/contents.rst b/_sources/contents.txt similarity index 95% rename from docs/sphinx-docs/source/contents.rst rename to _sources/contents.txt index 52a2786da8e..2165ad3cf23 100644 --- a/docs/sphinx-docs/source/contents.rst +++ b/_sources/contents.txt @@ -5,6 +5,7 @@ Node.JS MongoDB Driver Manual Contents .. toctree:: :maxdepth: 3 + api-generated/mongoclient api-generated/db api-generated/collection api-generated/admin diff --git a/_sources/driver-articles/anintroductionto1_1and2_2.txt b/_sources/driver-articles/anintroductionto1_1and2_2.txt new file mode 100644 index 00000000000..25d3e06d6ba --- /dev/null +++ b/_sources/driver-articles/anintroductionto1_1and2_2.txt @@ -0,0 +1,361 @@ +====================================== +Mongo Driver and Mongo DB 2.2 Features +====================================== + + For Mongo DB there are multiple new features and improvements in the driver. This include Mongos failover support, authentication, replicaset support, read preferences and aggregation. Let's move throught the different new features starting with. + + +---------------- +Read preferences +---------------- + + Read preferences is now backed by a specification and is more consistent across drivers. With read preferences you can control from where your Reads are happing in a Replicaset and from Mongo DB also in a shard. Let's go through the different types of read Preferences that are available and what they mean. + + + * **ReadPreference.PRIMARY:** Read from primary only. All operations produce an error (throw an exception where applicable) if primary is unavailable. Cannot be combined with tags **(This is the default.)** + * **ReadPreference.PRIMARY_PREFERRED:** Read from primary if available, otherwise a secondary. + * **ReadPreference.SECONDARY:** Read from secondary if available, otherwise error. + * **ReadPreference.SECONDARY_PREFERRED:** Read from a secondary if available, otherwise read from the primary. + * **ReadPreference.NEAREST:** All modes read from among the nearest candidates, but unlike other modes, NEAREST will include both the primary and all secondaries in the random selection. The name NEAREST is chosen to emphasize its use, when latency is most important. For I/O-bound users who want to distribute reads across all members evenly regardless of ping time, set secondaryAcceptableLatencyMS very high. See "Ping Times" below. **A strategy must be enabled on the ReplSet instance to use NEAREST as it requires intermittent setTimeout events, see Db class documentation** + + + + Additionally you can now use tags with all the read preferences to actively choose specific sets of servers in a replicatset or sharded system located in different data centers. The rules are fairly simple as outline below. A server member matches a tag set if its tags match all the tags in the set. For example, a member tagged **{ dc: 'ny', rack: 2, size: 'large' }** matches the tag set **{ dc: 'ny', rack: 2 }** . A member's extra tags don't affect whether it's a match. + + + + Using a read preference is very simple. Below are some examples using it at the db level, collection level and individual query level as well as an example using tags. + + + + Below is a simple example using readpreferences at the db level. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , format = require('util').format; + + var url = format("mongodb://%s,%s,%s/%s?replicaSet=%s&readPreference=%s" + , "localhost:27017", + , "localhost:27018" + , "localhost:27019" + , "exampleDb" + , "foo" + , "secondaryPreferred"); + + MongoClient.connect(url, function(err db) { + if(!err) { + console.log("We are connected"); + } + }); + + + + Below is a simple example using readpreferences at the collection level. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , format = require('util').format; + + var url = format("mongodb://%s,%s,%s/%s?replicaSet=%s&readPreference=%s" + , "localhost:27017", + , "localhost:27018" + , "localhost:27019" + , "exampleDb" + , "foo" + , "secondaryPreferred"); + + MongoClient.connect(url, function(err db) { + if(!err) { + console.log("We are connected"); + + var collection = db.collection('somecollection', {readPreference: ReadPreference.SECONDARY_PREFERRED}); + collection.find({}).toArray(function(err, items) { + // Done reading from secondary if available + }) + } + }); + + + + Below is a simple example using readpreferences at the query level. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , format = require('util').format; + + var url = format("mongodb://%s,%s,%s/%s?replicaSet=%s&readPreference=%s" + , "localhost:27017", + , "localhost:27018" + , "localhost:27019" + , "exampleDb" + , "foo" + , "secondaryPreferred"); + + MongoClient.connect(url, function(err db) { + if(!err) { + console.log("We are connected"); + + var collection = db.collection('somecollection'); + collection.find({}).setReadPreference(new ReadPreference(ReadPreference.SECONDARY_PREFERRED)).toArray(function(err, items) { + // Done reading from secondary if available + }) + } + }); + + + + Below is a simple example using a readpreference with tags at the query level. This example will pick from the set of servers tagged with **dc1:ny** . + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , format = require('util').format; + + var url = format("mongodb://%s,%s,%s/%s?replicaSet=%s&readPreference=%s" + , "localhost:27017", + , "localhost:27018" + , "localhost:27019" + , "exampleDb" + , "foo" + , "secondaryPreferred"); + + MongoClient.connect(url, function(err db) { + if(!err) { + console.log("We are connected"); + + var collection = db.collection('somecollection'); + collection.find({}).setReadPreference(new ReadPreference(ReadPreference.SECONDARY_PREFERRED, {"dc1":"ny"})).toArray(function(err, items) { + // Done reading from secondary if available + }) + } + }); + + +------ +Mongos +------ + + There is now a seperate Server type for Mongos that handles not only Mongos read preferences for Mongo DB but also failover and picking the nearest Mongos proxy to your application. To use simply do + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , format = require('util').format; + + var url = format("mongodb://%s,%s/%s" + , "localhost:50000", + , "localhost:50001" + , "exampleDb"); + + MongoClient.connect(url, function(err db) { + if(!err) { + console.log("We are connected"); + } + + db.close(); + }); + + + + Read preferences also work with Mongos from Mongo DB 2.2 or higher allowing you to create more complex deployment setups. + + +---------------------------- +Aggregation framework helper +---------------------------- + + The MongoDB aggregation framework provides a means to calculate aggregate values without having to use map-reduce. While map-reduce is powerful, using map-reduce is more difficult than necessary for many simple aggregation tasks, such as totaling or averaging field values. + + + + The driver supports the aggregation framework by adding a helper at the collection level to execute an aggregation pipeline against the documents in that collection. Below is a simple example of using the aggregation framework to perform a group by tags. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient; + + // Some docs for insertion + var docs = [{ + title : "this is my title", author : "bob", posted : new Date() , + pageViews : 5, tags : [ "fun" , "good" , "fun" ], other : { foo : 5 }, + comments : [ + { author :"joe", text : "this is cool" }, { author :"sam", text : "this is bad" } + ]}]; + + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + // Create a collection + db.createCollection('test', function(err, collection) { + // Insert the docs + collection.insert(docs, {safe:true}, function(err, result) { + + // Execute aggregate, notice the pipeline is expressed as an Array + collection.aggregate([ + { $project : { + author : 1, + tags : 1 + }}, + { $unwind : "$tags" }, + { $group : { + _id : {tags : "$tags"}, + authors : { $addToSet : "$author" } + }} + ], function(err, result) { + console.dir(result); + db.close(); + }); + }); + }); + }); + + +----------------------------------- +Replicaset improvements and changes +----------------------------------- + + Replicasets now return to the driver when a primary has been identified allowing for faster connect time meaning the application does not have to wait for the whole set to be identified before being able to run. That said any secondary queries using read preference **ReadPreference.SECONDARY** might fail until at least one secondary is up. To aid in development of layers above the driver now emits to new events. + + + * **open** is emitted when the driver is ready to be used. + * **fullsetup** is emitted once the whole replicaset is up and running + + + + To ensure better control over timeouts when attempting to connect to replicaset members that might be down there is now two timeout settings. + + + * **connectTimeoutMS:** set the timeout for the intial connect to the mongod or mongos instance. + * **socketTimeoutMS:** set the timeout for established connections to the mongod or mongos instance. + + +--------------------------------- +High availability "on" by default +--------------------------------- + + The high availability code has been rewritten to run outside a setTimeout allowing for better control and handling. It's also on by default now. It can be disabled using the following settings on the ReplSet class. + + + * **ha** {Boolean, default:true}, turn on high availability. + * **haInterval** {Number, default:2000}, time between each replicaset status check. + + This allows the driver to discover new replicaset members or replicaset members who left the set and then returned. + + +-------------------------------- +Better stream support for GridFS +-------------------------------- + + GridFS now supports the streaming api's for node allowing you to pipe content either into or out of a Gridstore object making it easy to work with other streaming api's available. + + + + A simple example is shown below for how to stream from a file on disk to a gridstore object. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , fs = require('fs') + , GridStore = mongo.GridStore; + + // Some docs for insertion + var docs = [{ + title : "this is my title", author : "bob", posted : new Date() , + pageViews : 5, tags : [ "fun" , "good" , "fun" ], other : { foo : 5 }, + comments : [ + { author :"joe", text : "this is cool" }, { author :"sam", text : "this is bad" } + ]}]; + + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + // Set up gridStore + var gridStore = new GridStore(client, "test_stream_write", "w"); + // Create a file reader stream to an object + var fileStream = fs.createReadStream("./test/gridstore/test_gs_working_field_read.pdf"); + gridStore.on("close", function(err) { + // Just read the content and compare to the raw binary + GridStore.read(client, "test_stream_write", function(err, gridData) { + var fileData = fs.readFileSync("./test/gridstore/test_gs_working_field_read.pdf"); + test.deepEqual(fileData, gridData); + test.done(); + }) + }); + + // Pipe it through to the gridStore + fileStream.pipe(gridStore); + }) + + + + A simple example is shown below for how to stream from a gridfs file to a file on disk. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , fs = require('fs') + , GridStore = mongo.GridStore; + + MongoClient.connect("mongodb://localhost:27017/exampleDb", function(err, db) { + // Set up gridStore + var gridStore = new GridStore(client, "test_stream_write_2", "w"); + gridStore.writeFile("./test/gridstore/test_gs_working_field_read.pdf", function(err, result) { + // Open a readable gridStore + gridStore = new GridStore(client, "test_stream_write_2", "r"); + // Create a file write stream + var fileStream = fs.createWriteStream("./test_stream_write_2.tmp"); + fileStream.on("close", function(err) { + // Read the temp file and compare + var compareData = fs.readFileSync("./test_stream_write_2.tmp"); + var originalData = fs.readFileSync("./test/gridstore/test_gs_working_field_read.pdf"); + test.deepEqual(originalData, compareData); + test.done(); + }) + // Pipe out the data + gridStore.pipe(fileStream); + }); + }) + + +------------- +toBSON method +------------- + + If in an object now has a toBSON function it will be called to for custom serialization of the object instance. This can be used to just serialize wanted fields. Deserializing is not affected by this and the application is responsible for deflating objects again. + + + + A simple example below + + + .. code-block:: javascript + + var customObject = { + a:1 + b:2 + toBSON: function() { + return {a:this.a} + } + } + + +--------------------------- +Much faster BSON C++ parser +--------------------------- + + Thanks to the awesome people at Lucasfilm Singapore we have a new BSON C++ serializer/deserializer that performs on average 40-50% faster than the current implementation. + + +------------------- +Other minor changes +------------------- + * Connection pool is now set to 5 by default. Override if there is need for either a bigger or smaller pool per node process. + * Gridfs now ensures an index on the chunks collection on file_id. + diff --git a/_sources/driver-articles/index.txt b/_sources/driver-articles/index.txt new file mode 100644 index 00000000000..ecf95659ccb --- /dev/null +++ b/_sources/driver-articles/index.txt @@ -0,0 +1,9 @@ +================== +Updates +================== + +.. toctree:: + :maxdepth: 2 + + mongoclient + anintroductionto1_1and2_2 diff --git a/_sources/driver-articles/mongoclient.txt b/_sources/driver-articles/mongoclient.txt new file mode 100644 index 00000000000..f86ae80641c --- /dev/null +++ b/_sources/driver-articles/mongoclient.txt @@ -0,0 +1,273 @@ +===================================================== +MongoClient or how to connect in a new and better way +===================================================== + + From driver version **1.2** we are introduction a new connection Class that has the same name across all out official drivers. This is to ensure that we present a recognizable front for all our API's. This does not mean you existing application will break but that we encourage you to use the new connection api to simplify your application development. + + + + Further more we are making the new connection class **MongoClient** acknowledge all write to MongoDB in contrast to the existing connection class Db that has acknowledgements turned off. Let's take a tour of the MongoClient functions. + + + .. code-block:: javascript + + MongoClient = function(server, options); + + MongoClient.prototype.open + + MongoClient.prototype.close + + MongoClient.prototype.db + + MongoClient.connect + + + + Outlined above is the complete MongoClient interface. The methods **open** , **close** and **db** work very similar to the existing methods on the **Db** class. The main difference if you noticed is that the constructor is missing the **database name** from Db. Let's show a simple connection using **open** as a code example speaks a thousand words. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient + , Server = require('mongodb').Server; + + var mongoClient = new MongoClient(new Server('localhost', 27017)); + mongoClient.open(function(err, mongoClient) { + var db1 = mongoClient.db("mydb"); + + mongoClient.close(); + }); + + + + Notice that you configure the MongoClient just as you would have done the Db object. The main difference is that you access the db instances using the **db** method on the MongoClient object instead of using the Db instance directly as you would previously. Don't that seem more intuitive then the previous API. Also MongoClient supports the same options as the previous Db instance you would have created. + + + + So with a minimal change in our app we can apply the new MongoClient connection code. But there is more and one direction you might consider int the future. That is the mongodb connection string. + + +------------------------- +The URL connection format +------------------------- + .. code-block:: javascript + + mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] + + + + The URL format is unified across official drivers from 10gen with some options not supported on some driver due to natural reasons. The ones not supported by the Node.js driver are left out for simplicities sake. + + + * **mongodb://** is a required prefix to identify that this is a string in the standard connection format. + * **username:password@** are optional. If given, the driver will attempt to login to a database after connecting to a database server. + * **host1** is the only required part of the URI. It identifies either a hostname, IP address, or unix domain socket + * **:portX** is optional and defaults to :27017 if not provided. + * **/database** is the name of the database to login to and thus is only relevant if the username:password@ syntax is used. If not specified the "admin" database will be used by default. + * **?options** are connection options. Note that if database is absent there is still a / required between the last host and the ? introducing the options. Options are name=value pairs and the pairs are separated by "&". For any unrecognized or unsupported option, a driver should log a warning and continue processing. A driver should not support any options that are not explicitly defined in this specification. This is in order to reduce the likelihood that different drivers will support overlapping that differ in small but incompatible ways (like different name, different values, or different default value). + + + * **replicaSet=name** * The driver verifies that the name of the replica set it connects to matches this name. Implies that the hosts given are a seed list, and the driver will attempt to find all members of the set. * No default value. + + + * **ssl=true|false|prefer** + * true: the driver initiates each connections with SSL * false: the driver initiates each connection without SSL * prefer: the driver tries to initiate each connection with SSL, and falls back to without SSL if it fails. * Default value is false. + * **connectTimeoutMS=ms** + * How long a connection can take to be opened before timing out. * Current driver behavior already differs on this, so default must be left to each driver. For new implementations, the default should be to never timeout. + * **socketTimeoutMS=ms** + * How long a send or receive on a socket can take before timing out. * Current driver behavior already differs on this, so default must be left to each driver. For new implementations, the default should be to never timeout. + + + * **maxPoolSize=n:** The maximum number of connections in the connection pool * Default value is 5 + + + + More detailed information about write concerns can be found at `http://www.mongodb.org/display/DOCS/getLastError+Command `_ + + + * **w=wValue** + * For numeric values above 1, the driver adds { w : wValue } to the getLastError command. * wValue is typically a number, but can be any string in order to allow for specifications like "majority" * Default value is 1. + * wValue == -1 ignore network errors * wValue == 0 no write acknowledgement * wValue == 1 perform a write acknowledgement * wValue == 2 perform a write acknowledgement across primary and one secondary * wValue == 'majority' perform a write acknowledgement across the majority of servers in the replicaset * wValue == 'tag name' perform a write acknowledgement against the replicaset tag name + * **wtimeoutMS=ms** + * The driver adds { wtimeout : ms } to the getlasterror command. * Used in combination with w * No default value + * **journal=true|false** + * true: Sync to journal. * false: the driver does not add j to the getlasterror command * Default value is false + * **fsync=true|false** + * true: Sync to disk. * false: the driver does not add fsync to the getlasterror command * Default value is false * If conflicting values for fireAndForget, and any write concern are passed the driver should raise an exception about the conflict. + + + * **authSource=string:** Used when the user for authentication is stored in another database using indirect authentication. * Default value is null + + + * **slaveOk=true|false:** Whether a driver connected to a replica set will send reads to slaves/secondaries. + * Default value is false + * **readPreference=enum:** The read preference for this connection. If set, it overrides any slaveOk value. + * Enumerated values: * primary * primaryPreferred * secondary * secondaryPreferred * nearest * Default value is primary + * **readPreferenceTags=string.** A representation of a tag set as a comma-separated list of colon-separated key-value pairs, e.g. **dc:ny,rack:1** . Spaces should be stripped from beginning and end of all keys and values. To specify a list of tag sets, using multiple readPreferenceTags, e.g. **readPreferenceTags=dc:ny,rack:1&readPreferenceTags=dc:ny&readPreferenceTags=** + * Note the empty value, it provides for fallback to any other secondary server if none is available * Order matters when using multiple readPreferenceTags * There is no default value + + +------------------- +MongoClient.connect +------------------- + + The url format can be used with MongoClient.connect. Where possible MongoClient will pick the best possible default values for options but they can be overridden. This includes setting **auto_reconnect to true** and **native_parser to true if it's available** . Below are some example on how to connect to a single server a replicaset and a sharded system using **MongoClient.connect** + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient; + + MongoClient.connect("mongodb://localhost:27017/integration_test", function(err, db) { + test.equal(null, err); + test.ok(db != null); + + db.collection("replicaset_mongo_client_collection").update({a:1}, {b:1}, {upsert:true}, function(err, result) { + test.equal(null, err); + test.equal(1, result); + + db.close(); + test.done(); + }); + }); + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient; + + MongoClient.connect("mongodb://localhost:30000,localhost:30001/integration_test_?w=0&readPreference=secondary", function(err, db) { + test.equal(null, err); + test.ok(db != null); + + db.collection("replicaset_mongo_client_collection").update({a:1}, {b:1}, {upsert:true}, function(err, result) { + test.equal(null, err); + test.equal(1, result); + + db.close(); + test.done(); + }); + }); + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient; + + MongoClient.connect("mongodb://localhost:50000,localhost:50001/integration_test_?w=0&readPreference=secondary", function(err, db) { + test.equal(null, err); + test.ok(db != null); + + db.collection("replicaset_mongo_client_collection").update({a:1}, {b:1}, {upsert:true}, function(err, result) { + test.equal(null, err); + test.equal(1, result); + + db.close(); + test.done(); + }); + }); + + + + Notice that when connecting to the sharded system it's pretty much the same url as for connecting to the replicaset. This is because the driver itself figures out if it's a replicaset or a set of Mongos proxies it's connecting to. No special care is needed to specify if it's one or the other. This is in contrast to having to use the **ReplSet** or **Mongos** instances when using the **open** command. + + +--------------------------- +MongoClient.connect options +--------------------------- + + The connect function also takes a hash of options divided into db/server/replset/mongos allowing you to tweak options not directly supported by the unified url string format. To use these options you do pass in a has like this. + + + .. code-block:: javascript + + var MongoClient = require('mongodb').MongoClient; + + MongoClient.connect("mongodb://localhost:27017/integration_test_?", { + db: { + native_parser: false + }, + server: { + socketOptions: { + connectTimeoutMS: 500 + } + }, + replSet: {}, + mongos: {} + }, function(err, db) { + test.equal(null, err); + test.ok(db != null); + + db.collection("replicaset_mongo_client_collection").update({a:1}, {b:1}, {upsert:true}, function(err, result) { + test.equal(null, err); + test.equal(1, result); + + db.close(); + test.done(); + }); + }); + + + + Below are all the options supported for db/server/replset/mongos. + + +---------------------------------------------------------------------------------------------------- +db: A hash of options at the db level overriding or adjusting functionality not supported by the url +---------------------------------------------------------------------------------------------------- + * **w** , {Number/String, > -1 || 'majority'} the write concern for the operation where < 1 is no acknowledgment of write and w >= 1 or w = 'majority' acknowledges the write + * **wtimeout** , {Number, 0} set the timeout for waiting for write concern to finish (combines with w option) + * **fsync** , (Boolean, default:false) write waits for fsync before returning + * **journal** , (Boolean, default:false) write waits for journal sync before returning + * **readPreference** {String}, the preferred read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY *PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY* PREFERRED, ReadPreference.NEAREST). + * **native_parser** {Boolean, default:false}, use c++ bson parser. + * **forceServerObjectId** {Boolean, default:false}, force server to create _id fields instead of client. + * **pkFactory** {Object}, object overriding the basic ObjectID primary key generation. + * **serializeFunctions** {Boolean, default:false}, serialize functions. + * **raw** {Boolean, default:false}, perform operations using raw bson buffers. + * **recordQueryStats** {Boolean, default:false}, record query statistics during execution. + * **retryMiliSeconds** {Number, default:5000}, number of milliseconds between retries. + * **numberOfRetries** {Number, default:5}, number of retries off connection. + + +----------------------------------------------------------------------- +server: A hash of options at the server level not supported by the url. +----------------------------------------------------------------------- + .. code-block:: javascript + + * **readPreference** {String, default:null}, set's the read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST) + * **ssl** {Boolean, default:false}, use ssl connection (needs to have a mongod server with ssl support) + * **slaveOk** {Boolean, default:false}, legacy option allowing reads from secondary, use **readPrefrence** instead. + * **poolSize** {Number, default:1}, number of connections in the connection pool, set to 1 as default for legacy reasons. + * **socketOptions** {Object, default:null}, an object containing socket options to use (noDelay:(boolean), keepAlive:(number), connectTimeoutMS:(number), socketTimeoutMS:(number)) + * **logger** {Object, default:null}, an object representing a logger that you want to use, needs to support functions debug, log, error **({error:function(message, object) {}, log:function(message, object) {}, debug:function(message, object) {}})**. + * **auto_reconnect** {Boolean, default:false}, reconnect on error. + * **disableDriverBSONSizeCheck** {Boolean, default:false}, force the server to error if the BSON message is to big + + +------------------------------------------------------------------------- +replSet: A hash of options at the replSet level not supported by the url. +------------------------------------------------------------------------- + .. code-block:: javascript + + * **ha** {Boolean, default:true}, turn on high availability. + * **haInterval** {Number, default:2000}, time between each replicaset status check. + * **reconnectWait** {Number, default:1000}, time to wait in milliseconds before attempting reconnect. + * **retries** {Number, default:30}, number of times to attempt a replicaset reconnect. + * **rs_name** {String}, the name of the replicaset to connect to. + * **socketOptions** {Object, default:null}, an object containing socket options to use (noDelay:(boolean), keepAlive:(number), connectTimeoutMS:(number), socketTimeoutMS:(number)) + * **readPreference** {String}, the preferred read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST). + * **strategy** {String, default:null}, selection strategy for reads choose between (ping and statistical, default is round-robin) + * **secondaryAcceptableLatencyMS** {Number, default:15}, sets the range of servers to pick when using NEAREST (lowest ping ms + the latency fence, ex: range of 1 to (1 + 15) ms) + * **connectArbiter** {Boolean, default:false}, sets if the driver should connect to arbiters or not. + + +----------------------------------------------------------------------- +mongos: A hash of options at the mongos level not supported by the url. +----------------------------------------------------------------------- + .. code-block:: javascript + + * **socketOptions** {Object, default:null}, an object containing socket options to use (noDelay:(boolean), keepAlive:(number), connectTimeoutMS:(number), socketTimeoutMS:(number)) + * **ha** {Boolean, default:true}, turn on high availability, attempts to reconnect to down proxies + * **haInterval** {Number, default:2000}, time between each replicaset status check. + diff --git a/_sources/github/github.txt b/_sources/github/github.txt new file mode 100644 index 00000000000..7d5bcce231d --- /dev/null +++ b/_sources/github/github.txt @@ -0,0 +1,1044 @@ +============================================== +Github libraries and projects using the driver +============================================== + +Web frameworks using MongoDB +---------------------------- + +.. topic:: nwt + + Node Web Toolkit - nwt is a modern javascript framework. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 21:2:6 on 11/10/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/nwtjs/nwt + * - **Clone Url** + - https://github.com/nwtjs/nwt.git + * - **Forks** + - 4 + * - **Watchers** + - 16 + +.. topic:: caboose + + Coffeescript-happy express-based server-side MVC framework loosely based on rails + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 6:56:54 on 16/11/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/mattinsler/caboose + * - **Clone Url** + - https://github.com/mattinsler/caboose.git + * - **Forks** + - 6 + * - **Watchers** + - 17 + +.. topic:: node-bread + + file based static website/blog generator + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 23:50:52 on 7/7/2012 + * - **Homepage** + - http://https://npmjs.org/package/bread + * - **Url** + - https://github.com/pvorb/node-bread + * - **Clone Url** + - https://github.com/pvorb/node-bread.git + * - **Forks** + - 3 + * - **Watchers** + - 3 + +.. topic:: arrjs + + HTTP and WebSocket application routing + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 17:2:2 on 8/1/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/tjanczuk/arrjs + * - **Clone Url** + - https://github.com/tjanczuk/arrjs.git + * - **Forks** + - 2 + * - **Watchers** + - 25 + +Object Document Modeling Libraries +---------------------------------- + +.. topic:: noid + + ODM for MongoDB/Node.js with CoffeeScript + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 1:20:59 on 22/11/2010 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/chrisgibson/noid + * - **Clone Url** + - https://github.com/chrisgibson/noid.git + * - **Forks** + - 1 + * - **Watchers** + - 8 + +.. topic:: mongoose + + Node.JS MongoDB utility library with ORM-like functionality + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 3:40:3 on 14/2/2013 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/LearnBoost/mongoose + * - **Clone Url** + - https://github.com/LearnBoost/mongoose.git + * - **Forks** + - 378 + * - **Watchers** + - 2883 + +.. topic:: mongo-model + + Model for MongoDB (Node.JS) + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 11:37:19 on 1/6/2012 + * - **Homepage** + - http://http://alexeypetrushin.github.com/mongo-model + * - **Url** + - https://github.com/alexeypetrushin/mongo-model + * - **Clone Url** + - https://github.com/alexeypetrushin/mongo-model.git + * - **Forks** + - 5 + * - **Watchers** + - 51 + +.. topic:: leaf + + A MongoDB ORM for Node + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 20:2:4 on 29/7/2010 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/colladow/leaf + * - **Clone Url** + - https://github.com/colladow/leaf.git + * - **Forks** + - 1 + * - **Watchers** + - 6 + +Content Management Systems +-------------------------- + +.. topic:: nocr-mongo + + nodejs Content Repository implementation for mongodb + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 6:25:45 on 18/3/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/karacos/nocr-mongo + * - **Clone Url** + - https://github.com/karacos/nocr-mongo.git + * - **Forks** + - 0 + * - **Watchers** + - 4 + +.. topic:: calipso + + Calipso is a simple NodeJS content management system based on Express, Connect & Mongoose. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 3:51:41 on 25/0/2013 + * - **Homepage** + - http://calip.so + * - **Url** + - https://github.com/cliftonc/calipso + * - **Clone Url** + - https://github.com/cliftonc/calipso.git + * - **Forks** + - 183 + * - **Watchers** + - 1045 + +Grid FS libraries or tools +-------------------------- + +.. topic:: nettle + + On-the-fly processing framework for Node.js and MongoDB + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 20:36:16 on 11/6/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/scttnlsn/nettle + * - **Clone Url** + - https://github.com/scttnlsn/nettle.git + * - **Forks** + - 0 + * - **Watchers** + - 9 + +Wrapper libraries to ease the use of development or provide simple ODM like behaviours +-------------------------------------------------------------------------------------- + +.. topic:: node-mongoskin + + The future layer for node-mongodb-native. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 3:50:53 on 4/7/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/guileen/node-mongoskin + * - **Clone Url** + - https://github.com/guileen/node-mongoskin.git + * - **Forks** + - 7 + * - **Watchers** + - 41 + +.. topic:: mongoq + + Use mongoDB like this: mongoq('mongodb://localhost/db').collection('users').find().toArray(function(error, docs){}); + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 7:17:31 on 19/11/2011 + * - **Homepage** + - http://http://zzdhidden.github.com/mongoq + * - **Url** + - https://github.com/zzdhidden/mongoq + * - **Clone Url** + - https://github.com/zzdhidden/mongoq.git + * - **Forks** + - 2 + * - **Watchers** + - 54 + +.. topic:: mongolia + + Flexible non-magical layer for the nodejs MongoDB driver + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 14:3:17 on 3/11/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/masylum/mongolia + * - **Clone Url** + - https://github.com/masylum/mongolia.git + * - **Forks** + - 12 + * - **Watchers** + - 105 + +.. topic:: mongojs + + a simple mongo module that implements the mongo api + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 0:14:0 on 14/2/2013 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/gett/mongojs + * - **Clone Url** + - https://github.com/gett/mongojs.git + * - **Forks** + - 39 + * - **Watchers** + - 326 + +.. topic:: mongode + + Thin wrapper around node-mongodb-native that provides a simpler alternative calling style and provides collection binding. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 14:17:30 on 29/7/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/milewise/mongode + * - **Clone Url** + - https://github.com/milewise/mongode.git + * - **Forks** + - 7 + * - **Watchers** + - 56 + +.. topic:: node-mongodb-wrapper + + A wrapper for node-mongodb-native as close as possible to the command-line javascript driver. Why learn two interfaces? + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 19:38:37 on 24/0/2013 + * - **Homepage** + - http://http://i.tv + * - **Url** + - https://github.com/idottv/node-mongodb-wrapper + * - **Clone Url** + - https://github.com/idottv/node-mongodb-wrapper.git + * - **Forks** + - 9 + * - **Watchers** + - 45 + +.. topic:: mdoq-mongodb + + simplified mongodb-native api w/ middleware + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 17:56:15 on 29/4/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/ritch/mdoq-mongodb + * - **Clone Url** + - https://github.com/ritch/mdoq-mongodb.git + * - **Forks** + - 0 + * - **Watchers** + - 3 + +.. topic:: ferret + + Adorable mongodb library for node.js + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 15:2:55 on 2/7/2011 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/Coreh/ferret + * - **Clone Url** + - https://github.com/Coreh/ferret.git + * - **Forks** + - 3 + * - **Watchers** + - 9 + +.. topic:: easymongo + + Easy MongoDB + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 16:28:18 on 8/0/2013 + * - **Homepage** + - http://http://simonenko.su/projects/easymongo + * - **Url** + - https://github.com/meritt/easymongo + * - **Clone Url** + - https://github.com/meritt/easymongo.git + * - **Forks** + - 3 + * - **Watchers** + - 12 + +REST api's around MongoDB or resource based libraries +----------------------------------------------------- + +.. topic:: mongodb-rest + + REST Server for MongoDB (using node.js) + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 21:12:16 on 20/11/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/tdegrunt/mongodb-rest + * - **Clone Url** + - https://github.com/tdegrunt/mongodb-rest.git + * - **Forks** + - 50 + * - **Watchers** + - 212 + +.. topic:: lazyBum + + Node.js RESTful web framework. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 3:35:54 on 10/11/2011 + * - **Homepage** + - http://http://streetsaheadllc.com + * - **Url** + - https://github.com/streets-ahead/lazyBum + * - **Clone Url** + - https://github.com/streets-ahead/lazyBum.git + * - **Forks** + - 1 + * - **Watchers** + - 2 + +.. topic:: GECK + + A lightweight, resource-based API framework for node.js + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 17:22:29 on 23/7/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/Qard/GECK + * - **Clone Url** + - https://github.com/Qard/GECK.git + * - **Forks** + - 1 + * - **Watchers** + - 10 + +Test helpers and libraries +-------------------------- + +.. topic:: mongodb-fixtures + + Fixtures for mongodb + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 9:59:58 on 10/5/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/tdegrunt/mongodb-fixtures + * - **Clone Url** + - https://github.com/tdegrunt/mongodb-fixtures.git + * - **Forks** + - 1 + * - **Watchers** + - 9 + +.. topic:: node-database-cleaner + + The simplest way to clean your database after tests + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 12:35:16 on 7/4/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/emerleite/node-database-cleaner + * - **Clone Url** + - https://github.com/emerleite/node-database-cleaner.git + * - **Forks** + - 8 + * - **Watchers** + - 37 + +Tools or applications to manage your MongoDB's +---------------------------------------------- + +.. topic:: mon4mongo + + MongoDB management web server boosted with nodejs & express. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 7:35:50 on 13/2/2013 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/erhangundogan/mon4mongo + * - **Clone Url** + - https://github.com/erhangundogan/mon4mongo.git + * - **Forks** + - 0 + * - **Watchers** + - 6 + +Queue libraries using MongoDB +----------------------------- + +.. topic:: mojo + + Node.js job queue backed by MongoDB + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 17:16:38 on 24/1/2013 + * - **Homepage** + - http://http://wereHamster.github.com/mojo + * - **Url** + - https://github.com/wereHamster/mojo + * - **Clone Url** + - https://github.com/wereHamster/mojo.git + * - **Forks** + - 3 + * - **Watchers** + - 10 + +.. topic:: karait + + A ridiculously simple queuing system, with clients in various languages, built on top of MongoDB. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 20:21:36 on 4/11/2011 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/bcoe/karait + * - **Clone Url** + - https://github.com/bcoe/karait.git + * - **Forks** + - 6 + * - **Watchers** + - 77 + +Logging libraries or applications +--------------------------------- + +.. topic:: logmeup-server + + LogMeUp Server - View any log files real-time in your web browser. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 21:53:23 on 17/4/2012 + * - **Homepage** + - http://http://logmeup.com + * - **Url** + - https://github.com/jprichardson/logmeup-server + * - **Clone Url** + - https://github.com/jprichardson/logmeup-server.git + * - **Forks** + - 0 + * - **Watchers** + - 6 + +.. topic:: nodeEventStore + + EventStore Implementation in node.js + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 10:14:54 on 10/4/2012 + * - **Homepage** + - http://http:/jamuhl.github.com/nodeEventStore/ + * - **Url** + - https://github.com/KABA-CCEAC/nodeEventStore + * - **Clone Url** + - https://github.com/KABA-CCEAC/nodeEventStore.git + * - **Forks** + - 2 + * - **Watchers** + - 4 + +.. topic:: devnull + + dev/null, a powerful logging module for Node.js.. Because logging to dev/null is fast! <3 + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 9:53:54 on 2/0/2013 + * - **Homepage** + - http://dev/null + * - **Url** + - https://github.com/observing/devnull + * - **Clone Url** + - https://github.com/observing/devnull.git + * - **Forks** + - 0 + * - **Watchers** + - 21 + +Monitoring applications or libraries +------------------------------------ + +.. topic:: Informant + + A node based server monitor to get real-time status of your ops... + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 21:20:40 on 31/0/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/base698/Informant + * - **Clone Url** + - https://github.com/base698/Informant.git + * - **Forks** + - 0 + * - **Watchers** + - 16 + +General frameworks over MongoDB +------------------------------- + +.. topic:: DJS + + A media sharing framework. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 22:48:0 on 14/2/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/khwang/DJS + * - **Clone Url** + - https://github.com/khwang/DJS.git + * - **Forks** + - 4 + * - **Watchers** + - 23 + +.. topic:: databank + + NoSQL abstraction layer for Node.js + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 15:40:19 on 14/0/2013 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/evanp/databank + * - **Clone Url** + - https://github.com/evanp/databank.git + * - **Forks** + - 2 + * - **Watchers** + - 48 + +Translation libraries or frameworks +----------------------------------- + +.. topic:: dialect-http + + http client to manage your dialect translations + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 1:43:11 on 8/11/2011 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/masylum/dialect-http + * - **Clone Url** + - https://github.com/masylum/dialect-http.git + * - **Forks** + - 8 + * - **Watchers** + - 27 + +.. topic:: dialect + + Translations for nodejs + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 9:21:6 on 22/2/2012 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/masylum/dialect + * - **Clone Url** + - https://github.com/masylum/dialect.git + * - **Forks** + - 22 + * - **Watchers** + - 102 + +Libraries or Applications for analytics +--------------------------------------- + +.. topic:: cube + + Cube: A system for time series visualization. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 2:33:34 on 8/1/2013 + * - **Homepage** + - http://http://square.github.com/cube + * - **Url** + - https://github.com/square/cube + * - **Clone Url** + - https://github.com/square/cube.git + * - **Forks** + - 243 + * - **Watchers** + - 2458 + +Libraries for the connect middleware +------------------------------------ + +.. topic:: connect-session-mongo + + MongoDB Session Storage for Connect Middleware + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 5:6:21 on 24/2/2012 + * - **Homepage** + - http://https://github.com/bartt/connect-session-mongo + * - **Url** + - https://github.com/bartt/connect-session-mongo + * - **Clone Url** + - https://github.com/bartt/connect-session-mongo.git + * - **Forks** + - 8 + * - **Watchers** + - 14 + +.. topic:: connect-mongodb + + SessionStorage for connect's session middleware + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 17:52:14 on 21/0/2013 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/masylum/connect-mongodb + * - **Clone Url** + - https://github.com/masylum/connect-mongodb.git + * - **Forks** + - 60 + * - **Watchers** + - 178 + +.. topic:: connect-mongo + + MongoDB session store for Connect. + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/active.png + * - **Last push** + - 15:2:26 on 21/0/2013 + * - **Homepage** + - http://http://kcbanner.github.com/connect-mongo/ + * - **Url** + - https://github.com/kcbanner/connect-mongo + * - **Clone Url** + - https://github.com/kcbanner/connect-mongo.git + * - **Forks** + - 67 + * - **Watchers** + - 207 + +Libraries or applications for continous integration +--------------------------------------------------- + +.. topic:: concrete + + Simple continuous integration server written with NodeJS and CoffeeScript + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 20:18:30 on 9/6/2012 + * - **Homepage** + - http://http://ryankee.github.com/concrete + * - **Url** + - https://github.com/ryankee/concrete + * - **Clone Url** + - https://github.com/ryankee/concrete.git + * - **Forks** + - 39 + * - **Watchers** + - 248 + +Exampe applications +------------------- + +.. topic:: nodejs-blackboard + + Testing nodejs (with socket.io) + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 3:50:52 on 4/6/2012 + * - **Homepage** + - http://node.kamikazepanda.com + * - **Url** + - https://github.com/gotik/nodejs-blackboard + * - **Clone Url** + - https://github.com/gotik/nodejs-blackboard.git + * - **Forks** + - 8 + * - **Watchers** + - 27 + +.. topic:: node-mongo-cms + + standard blog written in nodejs (express) and mongodb + + .. list-table:: + :widths: 25 85 + :header-rows: 0 + + * - **Activity Level** + - .. image:: ../static/nonactive.png + * - **Last push** + - 19:8:38 on 30/9/2011 + * - **Homepage** + - http:// + * - **Url** + - https://github.com/dynamicguy/node-mongo-cms + * - **Clone Url** + - https://github.com/dynamicguy/node-mongo-cms.git + * - **Forks** + - 0 + * - **Watchers** + - 5 + diff --git a/_sources/index.txt b/_sources/index.txt new file mode 100644 index 00000000000..686a0d0e5b3 --- /dev/null +++ b/_sources/index.txt @@ -0,0 +1,97 @@ +================================= +The Node.JS MongoDB Driver Manual +================================= + +Usage +----- + +.. toctree:: + :maxdepth: 1 + + markdown-docs/index + +Driver Updates +-------------- + +.. toctree:: + :maxdepth: 3 + + driver-articles/index + + +Tutorials +--------- + +.. toctree:: + :maxdepth: 3 + + api-articles/index + +API Documentation +----------------- + +.. toctree:: + :maxdepth: 2 + + api-generated/index + api-bson-generated/index + + +External Tutorials +------------------ + + * http://howtonode.org/express-mongodb, Blog rolling with mongoDB, express and Node.js + * http://nodetuts.com/tutorials/18-mongodb-and-mongoose.html, Node Tuts - episode 18 - Node.js, MongoDB and Mongoose from Pedro Teixeira on Vimeo. + * http://howtonode.org/node-js-and-mongodb-getting-started-with-mongojs, Node.js and MongoDB - Getting started with MongoJS + +Video Presentations about different topics around the MongoDB and Node.js +------------------------------------------------------------------------- + +.. toctree:: + :maxdepth: 3 + + content/nodejsvideo + content/awesomeappsvideo + content/tutorials + +Projects and Libraries using the driver +--------------------------------------- + +.. toctree:: + :maxdepth: 1 + + github/github + +MongoDB in General +------------------ +There are two main places to learn about MongoDB itself. I've included links below. + + * http://www.mongodb.org, all the documentation for the database + * http://www.10gen.com/presentations, lots of presentations and slides about using the database. + +There are also some very good books out these days + + * `MongoDB: The Definitive Guide `_ + * `MongoDB in Action `_ + * `The Definitive Guide to MongoDB: The NoSQL Database for Cloud and Desktop Computing `_ + * `50 Tips and Tricks for MongoDB Developers `_ + * `Scaling MongoDB `_ + * `MongoDB and Python: Patterns and processes for the popular document-oriented database `_ + * `MongoDB and PHP `_ + * `PHP and MongoDB Web Development Beginner's Guide `_ + * `Node Web Development `_ + * `Document Design for MongoDB `_ + +Changelog +--------- + +.. toctree:: + :maxdepth: 1 + + changelog/changelog + +Indices +------- + +- :ref:`genindex` +- :ref:`search` \ No newline at end of file diff --git a/_sources/markdown-docs/collections.txt b/_sources/markdown-docs/collections.txt new file mode 100644 index 00000000000..e34d7f622fd --- /dev/null +++ b/_sources/markdown-docs/collections.txt @@ -0,0 +1,222 @@ +=========== +Collections +=========== + + See also: + + + * `Database `_ + * `Queries `_ + + +------------------ +Collection objects +------------------ + + Collection object is a pointer to a specific collection in the `database `_ . If you want to `insert `_ new records or + `query `_ existing ones then you need to have a valid collection object. + + + + **NB** Collection names can't start or end with a period nor contain a dollar sign! ( ``.tes$t`` is not allowed) + + +-------------------- +Creating collections +-------------------- + + Collections can be created with ``createCollection`` + + + .. code-block:: javascript + + db.createCollection([[name[, options]], callback) + + + + where ``name`` is the name of the collection, options a set of configuration parameters and ``callback`` is a callback function. ``db`` is the database object. + + + + The first parameter for the callback is the error object (null if no error) and the second one is the pointer to the newly created collection. If strict mode is on and the table exists, the operation yields in error. With strict mode off (default) the function simple returns the pointer to the existing collection and does not truncate it. + + + .. code-block:: javascript + + db.createCollection("test", function(err, collection){ + collection.insert({"test":"value"}); + }); + + +---------------------------- +Creating collections options +---------------------------- + + Several options can be passed to the ``createCollection`` function with ``options`` parameter. + + + .. code-block:: javascript + + * `raw` - driver returns documents as bson binary Buffer objects, `default:false` + + + * ``collectionName`` is the name of the collection (not including the database name as a prefix) + * ``db`` is the pointer to the corresponding database object + + + + Example of usage: + + + .. code-block:: javascript + + console.log("Collection name: "+collection.collectionName) + + +------------------------- +List existing collections +------------------------- + + Collections can be listed with ``collectionNames`` + + + .. code-block:: javascript + + db.collectionNames(callback); + + + + ``callback`` gets two parameters - an error object (if error occured) and an array of collection names as strings. + + + + Collection names also include database name, so a collection named ``posts`` in a database ``blog`` will be listed as ``blog.posts`` . + + + + Additionally there's system collections which should not be altered without knowing exactly what you are doing, these sollections can be identified with ``system`` prefix. For example ``posts.system.indexes`` . + + + + Example: + + + .. code-block:: javascript + + var mongodb = require("mongodb"), + mongoserver = new mongodb.Server("localhost"), + db_connector = new mongodb.Db("blog", mongoserver); + + db_connector.open(function(err, db){ + db.collectionNames(function(err, collections){ + console.log(collections); // ["blog.posts", "blog.system.indexes"] + }); + }); + + +---------------- +List collections +---------------- + + Collection objects can be listed with database method ``collections`` + + + .. code-block:: javascript + + db.collections(callback) + + + + Where ``callback`` gets two parameters - an error object (if an error occured) and an array of collection objects. + + +--------------------- +Selecting collections +--------------------- + + Existing collections can be opened with ``collection`` + + + .. code-block:: javascript + + db.collection([[name[, options]], callback); + + + + If strict mode is off, then a new collection is created if not already present. + + +----------------------------- +Selecting collections options +----------------------------- + + Several options can be passed to the ``collection`` function with ``options`` parameter. + + + .. code-block:: javascript + + * `raw` - driver returns documents as bson binary Buffer objects, `default:false` + + +-------------------- +Renaming collections +-------------------- + + A collection can be renamed with collection method ``rename`` + + + .. code-block:: javascript + + collection.rename(new_name, callback); + + + + Passing the optional dropTarget boolean as the thrid parameter will allow overwritting of existing collections + + + .. code-block:: javascript + + collection.rename(new_name, {dropTarget:true}, callback); + + +--------------------------------- +Removing records from collections +--------------------------------- + + Records can be erased from a collection with ``remove`` + + + .. code-block:: javascript + + collection.remove([[query[, options]], callback]); + + + + Where + + + * ``query`` is the query that records to be removed need to match. If not set all records will be removed + * ``options`` indicate advanced options. For example use ``{safe: true}`` when using callbacks + * ``callback`` callback function that gets two parameters - an error object (if an error occured) and the count of removed records + + +-------------------- +Removing collections +-------------------- + + A collection can be dropped with ``drop`` + + + .. code-block:: javascript + + collection.drop(callback); + + + + or with ``dropCollection`` + + + .. code-block:: javascript + + db.dropCollection(collection_name, callback) + diff --git a/_sources/markdown-docs/database.txt b/_sources/markdown-docs/database.txt new file mode 100644 index 00000000000..316f6993cdd --- /dev/null +++ b/_sources/markdown-docs/database.txt @@ -0,0 +1,228 @@ +======== +Database +======== + + The first thing to do in order to make queries to the database is to open one. This can be done with the ``Db`` constructor. + + + .. code-block:: javascript + + var mongodb = require("mongodb"), + mongoserver = new mongodb.Server(host, port, server_options), + db_connector = new mongodb.Db(name, mongoserver, db_options); + + db_connector.open(callback); + + + * ``host`` is a server hostname or IP + * ``port`` is a MongoDB port, use ``mongodb.Connection.DEFAULT_PORT`` for default (27017) + * ``server_options`` see *Server options* + * ``name`` is the databse name that needs to be opened, database will be created automatically if it doesn't yet exist + * ``db_options`` see *DB options* + + +-------------- +Server options +-------------- + + Several options can be passed to the ``Server`` constructor with ``options`` parameter. + + + * ``auto_reconnect`` - to reconnect automatically, ``default:false`` + * ``poolSize`` - specify the number of connections in the pool ``default:5`` + * ``socketOptions`` - a collection of pr socket settings + + +-------------- +Socket options +-------------- + + Several options can be set for the ``socketOptions`` . + + + * ``timeout`` = set seconds before connection times out ``default:0`` + * ``noDelay`` = Disables the Nagle algorithm ``default:true`` + * ``keepAlive`` = Set if keepAlive is used ``default:0`` , which means no keepAlive, set higher than 0 for keepAlive + * ``encoding`` = 'ascii'|'utf8'|'base64' ``default:null`` + + +---------- +DB options +---------- + + Several options can be passed to the ``Db`` constructor with ``options`` parameter. + + + * ``native_parser`` - if true, use native BSON parser + * ``strict`` - sets *strict mode* , if true then existing collections can't be "recreated" etc. + * ``pk`` - custom primary key factory to generate ``_id`` values (see Custom primary keys). + * ``forceServerObjectId`` - generation of objectid is delegated to the mongodb server instead of the driver. default is false + * ``retryMiliSeconds`` - specify the number of milliseconds between connection attempts ``default:5000`` + * ``numberOfRetries`` - specify the number of retries for connection attempts ``default:3`` + * ``reaper`` - enable/disable reaper (true/false) ``default:false`` + * ``reaperInterval`` - specify the number of milliseconds between each reaper attempt ``default:10000`` + * ``reaperTimeout`` - specify the number of milliseconds for timing out callbacks that don't return ``default:30000`` + * ``raw`` - driver expects Buffer raw bson document, ``default:false`` + * ``logger`` - object specifying error(), debug() and log() functions + + +------------------ +Opening a database +------------------ + + Database can be opened with Db method ``open`` . + + + .. code-block:: javascript + + db_connector.open(callback); + + + + ``callback`` is a callback function which gets 2 parameters - an error object (or null, if no errors occured) and a database object. + + + + Resulting database object can be used for creating and selecting `collections `_ . + + + .. code-block:: javascript + + db_connector.open(function(err, db){ + db.collection(...); + }); + + + * ``databaseName`` is the name of the database + * ``serverConfig`` includes information about the server ( ``serverConfig.host`` , ``serverConfig.port`` etc.) + * ``state`` indicates if the database is connected or not + * ``strict`` indicates if *strict mode* is on (true) or off (false, default) + * ``version`` indicates the version of the MongoDB database + + + * ``close`` to indicate that the connection to the database was closed + + + + For example + + + .. code-block:: javascript + + db.on("close", function(error){ + console.log("Connection to the database was closed!"); + }); + + + + NB! If ``auto_reconnect`` was set to true when creating the server, then the connection will be automatically reopened on next database operation. Nevertheless the ``close`` event will be fired. + + +----------------------------------------- +Sharing the connections over multiple dbs +----------------------------------------- + + To share the connection pool across multiple databases you database instance has method ``db`` + + + .. code-block:: javascript + + db_connector.db(name) + + + + this returns a new ``db`` instance that shares the connections off the previous instance but will send all commands to the database ``name`` . This allows for better control of resource usage in a multiple database scenario. + + +------------------- +Deleting a database +------------------- + + To delete a database you need a pointer to it first. Deletion can be done with method ``dropDatabase`` . + + + .. code-block:: javascript + + db_connector.open(function(err, db){ + if (err) { throw err; } + db.dropDatabase(function(err) { + if (err) { throw err; } + console.log("database has been dropped!"); + }); + }); + + +------------------- +Custom primary keys +------------------- + + Every record in the database has an unique primary key called ``_id`` . Default primary keys are 12 byte hashes but a custom key generator can be used for something else. If you set ``_id`` "by hand" when inserting records then you can use whatever you want, primary key factory generates ``_id`` values only for records without ones. + + + + Example 1: No need to generate primary key, as its already defined: + + + .. code-block:: javascript + + collection.insert({name:"Daniel", _id:"12345"}); + + + + Example 2: No primary key, so it needs to be generated before save: + + + .. code-block:: javascript + + collectionn.insert({name:"Daniel"}); + + + + Custom primary key factory is actually an object with method ``createPK`` which returns a primary key. The context (value for ``this`` ) for ``createPK`` is left untouched. + + + .. code-block:: javascript + + var CustomPKFactory = { + counter:0, + createPk: function() { + return ++this.counter; + } + } + + db_connector = new mongodb.Db(name, mongoserver, {pk: CustomPKFactory}); + + +-------------- +debug commands +-------------- + + In order to debug the commands sent to the database you can add a ``logger`` object to the ``DB options`` . Make sure also the property ``doDebug`` is set. + + + + Example + + + .. code-block:: javascript + + options = {} + options.logger = {}; + options.logger.doDebug = true; + options.logger.debug = function (message, object) { + // print the mongo command: + // "writing command to mongodb" + console.log(message); + + // print the collection name + console.log(object.json.collectionName) + + // print the json query sent to MongoDB + console.log(object.json.query) + + // print the binary object + console.log(object.binary) + } + + var db = new Db('some_database', new Server(...), options); + diff --git a/_sources/markdown-docs/gridfs.txt b/_sources/markdown-docs/gridfs.txt new file mode 100644 index 00000000000..c0378bdaa55 --- /dev/null +++ b/_sources/markdown-docs/gridfs.txt @@ -0,0 +1,235 @@ +========= +GridStore +========= + + GridFS is a scalable MongoDB *filesystem* for storing and retrieving large files. The default limit for a MongoDB record is 16MB, so to store data that is larger than this limit, GridFS can be used. GridFS shards the data into smaller chunks automatically. See `MongoDB documentation `_ for details. + + + + GridStore is a single file inside GridFS that can be managed by the script. + + +-------------- +Open GridStore +-------------- + + Opening a GridStore (a single file in GridFS) is a bit similar to opening a database. At first you need to create a GridStore object and then ``open`` it. + + + .. code-block:: javascript + + var gs = new mongodb.GridStore(db, filename, mode[, options]) + + + + Where + + + * ``db`` is the database object + * ``filename`` is the name of the file in GridFS that needs to be accessed/created + * ``mode`` indicated the operation, can be one of: * "r" (Read): Looks for the file information in fs.files collection, or creates a new id for this object. * "w" (Write): Erases all chunks if the file already exist. * "w+" (Append): Finds the last chunk, and keeps writing after it. + * ``options`` can be used to specify some metadata for the file, for example ``content_type`` , ``metadata`` and ``chunk_size`` + + + + Example: + + + .. code-block:: javascript + + var gs = new mongodb.GridStore(db, "test.png", "w", { + "content_type": "image/png", + "metadata":{ + "author": "Daniel" + }, + "chunk_size": 1024*4 + }); + + + + When GridStore object is created, it needs to be opened. + + + .. code-block:: javascript + + gs.open(callback); + + + + ``callback`` gets two parameters - and error object (if error occured) and the GridStore object. + + + + Opened GridStore object has a set of useful properties + + + * ``gs.length`` - length of the file in bytes + * ``gs.contentType`` - the content type for the file + * ``gs.uploadDate`` - when the file was uploaded + * ``gs.metadata`` - metadata that was saved with the file + * ``gs.chunkSize`` - chunk size + + + + Example + + + .. code-block:: javascript + + gs.open(function(err, gs){ + console.log("this file was uploaded at "+gs.uploadDate); + }); + + +-------------------- +Writing to GridStore +-------------------- + + Writing can be done with ``write`` + + + .. code-block:: javascript + + gs.write(data, callback) + + + + where ``data`` is a ``Buffer`` or a string, callback gets two parameters - an error object (if error occured) and result value which indicates if the write was successful or not. + + + + While the GridStore is not closed, every write is appended to the opened GridStore. + + +--------------------------- +Writing a file to GridStore +--------------------------- + + This function opens the GridStore, streams the contents of the file into GridStore, and closes the GridStore. + + + .. code-block:: javascript + + gs.writeFile( file, callback ) + + + + where + + + * ``file`` is a file descriptor, or a string file path + * ``callback`` is a function with two parameters - error object (if error occured) and the GridStore object. + + +---------------------- +Reading from GridStore +---------------------- + + Reading from GridStore can be done with ``read`` + + + .. code-block:: javascript + + gs.read([size], callback) + + + + where + + + * ``size`` is the length of the data to be read + * ``callback`` is a callback function with two parameters - error object (if an error occured) and data (binary string) + + +------------------------ +Streaming from GridStore +------------------------ + + You can stream data as it comes from the database using ``stream`` + + + .. code-block:: javascript + + gs.stream([autoclose=false]) + + + + where + + + * ``autoclose`` If true current GridStore will be closed when EOF and 'close' event will be fired + + + + The function returns `read stream `_ based on this GridStore file. It supports the events 'read', 'error', 'close' and 'end'. + + +------------------ +Delete a GridStore +------------------ + + GridStore files can be unlinked with ``unlink`` + + + .. code-block:: javascript + + mongodb.GridStore.unlink(db, name, callback) + + + + Where + + + * ``db`` is the database object + * ``name`` is either the name of a GridStore object or an array of GridStore object names + * ``callback`` is the callback function + + +--------------------- +Closing the GridStore +--------------------- + + GridStore needs to be closed after usage. This can be done with ``close`` + + + .. code-block:: javascript + + gs.close(callback) + + +--------------------------------------- +Check the existance of a GridStore file +--------------------------------------- + + Checking if a file exists in GridFS can be done with ``exist`` + + + .. code-block:: javascript + + mongodb.GridStore.exist(db, filename, callback) + + + + Where + + + * ``db`` is the database object + * ``filename`` is the name of the file to be checked or a regular expression + * ``callback`` is a callback function with two parameters - an error object (if an error occured) and a boolean value indicating if the file exists or not + + +---------------------- +Seeking in a GridStore +---------------------- + + Seeking can be done with ``seek`` + + + .. code-block:: javascript + + gs.seek(position); + + + + This function moves the internal pointer to the specified position. + diff --git a/_sources/markdown-docs/index.txt b/_sources/markdown-docs/index.txt new file mode 100644 index 00000000000..fb4c72b563b --- /dev/null +++ b/_sources/markdown-docs/index.txt @@ -0,0 +1,10 @@ +.. toctree:: + :maxdepth: 1 + + collections + database + gridfs + indexes + insert + queries + replicaset diff --git a/_sources/markdown-docs/indexes.txt b/_sources/markdown-docs/indexes.txt new file mode 100644 index 00000000000..88da01ff89c --- /dev/null +++ b/_sources/markdown-docs/indexes.txt @@ -0,0 +1,143 @@ +======= +Indexes +======= + + Indexes are needed to make queries faster. For example if you need to find records by a field named *username* and the field has a related index set, then the query will be a lot faster compared to if the index was not present. + + + + See `MongoDB documentation `_ for details. + + +--------------------------------- +Create indexes with createIndex() +--------------------------------- + + ``createIndex`` adds a new index to a collection. For checking if the index was already set, use ``ensureIndex`` instead. + + + .. code-block:: javascript + + collection.createIndex(index[, options], callback) + + + + or + + + .. code-block:: javascript + + db.createIndex(collectionname, index[, options], callback) + + + + where + + + * ``index`` is the field or fields to be indexed. See *index field* + * ``options`` are options, for example ``{sparse: true}`` to include only records that have indexed field set or ``{unique: true}`` for unique indexes. If the ``options`` is a boolean value, then it indicates if it's an unique index or not. + * ``callback`` gets two parameters - an error object (if an error occured) and the name for the newly created index + + +--------------------------------- +Ensure indexes with ensureIndex() +--------------------------------- + + Same as ``createIndex`` with the difference that the index is checked for existence before adding to avoid duplicate indexes. + + +----------- +Index field +----------- + + Index field can be a simple string like ``"username"`` to index certain field (in this case, a field named as *username* ). + + + .. code-block:: javascript + + collection.ensureIndex("username",callback) + + + + It is possible to index fields inside nested objects, for example ``"user.firstname"`` to index field named *firstname* inside a document named *user* . + + + .. code-block:: javascript + + collection.ensureIndex("user.firstname",callback) + + + + It is also possible to create mixed indexes to include several fields at once. + + + .. code-block:: javascript + + collection.ensureIndex({firstname:1, lastname:1}, callback) + + + + or with tuples + + + .. code-block:: javascript + + collection.ensureIndex([["firstname", 1], ["lastname", 1]], callback) + + + + The number value indicates direction - if it's 1, then it is an ascending value, if it's -1 then it's descending. For example if you have documents with a field *date* and you want to sort these records in descending order then you might want to add corresponding index + + + .. code-block:: javascript + + collection.ensureIndex({date:-1}, callback) + + +------------------------------- +Remove indexes with dropIndex() +------------------------------- + + All indexes can be dropped at once with ``dropIndexes`` + + + .. code-block:: javascript + + collection.dropIndexes(callback) + + + + ``callback`` gets two parameters - an error object (if an error occured) and a boolean value true if operation succeeded. + + +--------------------------------------------- +Get index information with indexInformation() +--------------------------------------------- + + ``indexInformation`` can be used to fetch some useful information about collection indexes. + + + .. code-block:: javascript + + collection.indexInformation(callback) + + + + Where ``callback`` gets two parameters - an error object (if an error occured) and an index information object. + + + + The keys in the index object are the index names and the values are tuples of included fields. + + + + For example if a collection has two indexes - as a default an ascending index for the ``_id`` field and an additonal descending index for ``"username"`` field, then the index information object would look like the following + + + .. code-block:: javascript + + { + "_id":[["_id", 1]], + "username_-1":[["username", -1]] + } + diff --git a/_sources/markdown-docs/insert.txt b/_sources/markdown-docs/insert.txt new file mode 100644 index 00000000000..b96e5a0e61c --- /dev/null +++ b/_sources/markdown-docs/insert.txt @@ -0,0 +1,190 @@ +====================== +Inserting and updating +====================== + + See also: + + + * `Database `_ + * `Collections `_ + + +------ +Insert +------ + + Records can be inserted to a collection with ``insert`` + + + .. code-block:: javascript + + collection.insert(docs[, options, callback]) + + + + Where + + + * ``docs`` is a single document object or an array of documents + * ``options`` is an object of parameters, if you use a callback, set ``safe`` to true - this way the callback is executed *after* the record is saved to the database, if ``safe`` is false (default) callback is fired immediately and thus doesn't make much sense. + * ``callback`` - callback function to run after the record is inserted. Set ``safe`` to true in ``options`` when using callback. First parameter for callback is the error object (if an error occured) and the second is an array of records inserted. + + + + For example + + + .. code-block:: javascript + + var document = {name:"David", title:"About MongoDB"}; + collection.insert(document, {safe: true}, function(err, records){ + console.log("Record added as "+records[0]._id); + }); + + + + If trying to insert a record with an existing ``_id`` value, then the operation yields in error. + + + .. code-block:: javascript + + collection.insert({_id:1}, {safe:true}, function(err, doc){ + // no error, inserted new document, with _id=1 + collection.insert({_id:1}, {safe:true}, function(err, doc){ + // error occured since _id=1 already existed + }); + }); + + +---- +Save +---- + + Shorthand for insert/update is ``save`` - if ``_id`` value set, the record is updated if it exists or inserted if it does not; if the ``_id`` value is not set, then the record is inserted as a new one. + + + .. code-block:: javascript + + collection.save({_id:"abc", user:"David"},{safe:true}, callback) + + + + ``callback`` gets two parameters - an error object (if an error occured) and the record if it was inserted or ``1`` if the record was updated. + + +------ +Update +------ + + Updates can be done with ``update`` + + + .. code-block:: javascript + + collection.update(criteria, update[, options[, callback]]); + + + + Where + + + * ``criteria`` is a query object to find records that need to be updated (see `Queries `_ + * ``update`` is the replacement object + * ``options`` is an options object (see below) + * ``callback`` is the callback to be run after the records are updated. Has two parameters, the first is an error object (if error occured), the second is the count of records that were modified. + + + + There are several option values that can be used with an update + + + * ``safe`` - run callback only after the update is done, defaults to false + * ``multi`` - update all records that match the query object, default is false (only the first one found is updated) + * ``upsert`` - if true and no records match the query, insert ``update`` as a new record + * ``raw`` - driver returns updated document as bson binary Buffer, ``default:false`` + + + + If the replacement object is a document, the matching documents will be replaced (except the ``_id`` values if no ``_id`` is set). + + + .. code-block:: javascript + + collection.update({_id:"123"}, {author:"Jessica", title:"Mongo facts"}); + + + + The example above will replace the document contents of id=123 with the replacement object. + + + + To update only selected fields, ``$set`` operator needs to be used. Following replacement object replaces author value but leaves everything else intact. + + + .. code-block:: javascript + + collection.update({_id:"123"}, {$set: {author:"Jessica"}}); + + + + See `MongoDB documentation `_ for all possible operators. + + +--------------- +Find and Modify +--------------- + + To update and retrieve the contents for one single record you can use ``findAndModify`` . + + + .. code-block:: javascript + + collection.findAndModify(criteria, sort, update[, options, callback]) + + + + Where + + + * ``criteria`` is the query object to find the record + * ``sort`` indicates the order of the matches if there's more than one matching record. The first record on the result set will be used. See `Queries->find->options->sort `_ for the format. + * ``update`` is the replacement object + * ``options`` define the behavior of the function + * ``callback`` is the function to run after the update is done. Has two parameters - error object (if error occured) and the record that was updated. + + + + Options object can be used for the following options: + + + * ``remove`` - if set to true (default is false), removes the record from the collection. Callback function still gets the object but it doesn't exist in the collection any more. + * ``new`` - if set to true, callback function returns the modified record. Default is false (original record is returned) + * ``upsert`` - if set to true and no record matched to the query, replacement object is inserted as a new record + + + .. code-block:: javascript + + var mongodb = require('mongodb'), + server = new mongodb.Server("127.0.0.1", 27017, {}); + + new mongodb.Db('test', server, {}).open(function (error, client) { + if (error) throw error; + var collection = new mongodb.Collection(client, 'test_collection'); + collection.findAndModify( + {hello: 'world'}, // query + [['_id','asc']], // sort order + {$set: {hi: 'there'}}, // replacement, replaces only the field "hi" + {}, // options + function(err, object) { + if (err){ + console.warn(err.message); // returns error if no matching object found + }else{ + console.dir(object); + } + }); + }); + + + + + diff --git a/_sources/markdown-docs/queries.txt b/_sources/markdown-docs/queries.txt new file mode 100644 index 00000000000..7d2d380a797 --- /dev/null +++ b/_sources/markdown-docs/queries.txt @@ -0,0 +1,436 @@ +======= +Queries +======= + + See also: + + + * `Database `_ + * `Collections `_ + + +-------------------------- +Making queries with find() +-------------------------- + + `Collections `_ can be queried with ``find`` . + + + .. code-block:: javascript + + collection.find(query[[[, fields], options], callback]); + + + + Where + + + * ``query`` - is a query object, defining the conditions the documents need to apply + * ``fields`` - indicates which fields should be included in the response (default is all) + * ``options`` - defines extra logic (sorting options, paging etc.) + * ``raw`` - driver returns documents as bson binary Buffer objects, ``default:false`` + + + + The result for the query is actually a cursor object. This can be used directly or converted to an array. + + + .. code-block:: javascript + + var cursor = collection.find({}); + cursor.each(...); + + + + To indicate which fields must or must no be returned ``fields`` value can be used. For example the following ``fields`` value + + + .. code-block:: javascript + + { + "name": true, + "title": true + } + + + + retrieves fields ``name`` and ``title`` (and as a default also ``_id`` ) but not any others. + + +----------------------------------- +Find first occurence with findOne() +----------------------------------- + + ``findOne`` is a convinence method finding and returning the first match of a query while regular ``find`` returns a cursor object instead. Use it when you expect only one record, for example when querying with ``_id`` or another unique property. + + + .. code-block:: javascript + + collection.findOne([query], callback) + + + + Where + + + * ``query`` is a query object or an ``_id`` value + * ``callback`` has two parameters - an error object (if an error occured) and the document object. + + + + Example: + + + .. code-block:: javascript + + collection.findOne({_id: doc_id}, function(err, document) { + console.log(document.name); + }); + + +---------- +_id values +---------- + + Default ``_id`` values are 12 byte binary hashes. You can alter the format with custom Primary Key factories (see *Custom Primarky Keys* in `Database `_ . + + + + In order to treat these binary _id values as strings it would be wise to convert binary values to hex strings. This can be done with ``toHexString`` property. + + + .. code-block:: javascript + + var idHex = document._id.toHexString(); + + + + Hex strings can be reverted back to binary (for example to perform queries) with ``ObjectID.createFromHexString`` + + + .. code-block:: javascript + + {_id: ObjectID.createFromHexString(idHex)} + + + + When inserting new records it is possible to use custom ``_id`` values as well which do not need to be binary hashes, for example strings. + + + .. code-block:: javascript + + collection.insert({_id: "abc", ...}); + collection.findOne({_id: "abc"},...); + + + + This way it is not necessary to convert ``_id`` values to hex strings and back. + + +------------ +Query object +------------ + + The simplest query object is an empty one ``{}`` which matches every record in the database. + + + + To make a simple query where one field must match to a defined value, one can do it as simply as + + + .. code-block:: javascript + + {fieldname: "fieldvalue"} + + + + This query matches all the records that a) have fields called *fieldname* and b) its value is *"fieldvalue"* . + + + + For example if we have a collection of blog posts where the structure of the records is ``{title, author, contents}`` and we want to retrieve all the posts for a specific author then we can do it like this: + + + .. code-block:: javascript + + posts = pointer_to_collection; + posts.find({author:"Daniel"}).toArray(function(err, results){ + console.log(results); // output all records + }); + + + + If the queried field is inside an object then that can be queried also. For example if we have a record with the following structure: + + + .. code-block:: javascript + + { + user: { + name: "Daniel" + } + } + + + + Then we can query the "name" field like this: ``{"user.name":"Daniel"}`` + + + + If more than one fieldname is specified, then it's an AND query + + + .. code-block:: javascript + + { + key1: "value1", + name2: "value2" + } + + + + Whis query matches all records where *key1* is *"value1"* and *key2* is *"value2"* + + + + OR queries are a bit trickier but doable with the ``$or`` operator. Query operator takes an array which includes a set of query objects and at least one of these must match a document before it is retrieved + + + .. code-block:: javascript + + { + $or:[ + {author:"Daniel"}, + {author:"Jessica"} + ] + } + + + + This query match all the documents where author is Daniel or Jessica. + + + + To mix AND and OR queries, you just need to use $or as one of regular query fields. + + + .. code-block:: javascript + + { + title:"MongoDB", + $or:[ + {author:"Daniel"}, + {author:"Jessica"} + ] + } + + + + Conditional operators ``<`` , ``<=`` , ``>`` , ``>=`` and ``!=`` can't be used directly, as the query object format doesn't support it but the same can be achieved with their aliases ``$lt`` , ``$lte`` , ``$gt`` , ``$gte`` and ``$ne`` . When a field value needs to match a conditional, the value must be wrapped into a separate object. + + + .. code-block:: javascript + + {"fieldname":{$gte:100}} + + + + This query defines that *fieldname* must be greater than or equal to ``100`` . + + + + Conditionals can also be mixed to create ranges. + + + .. code-block:: javascript + + {"fieldname": {$lte:10, $gte:100}} + + + + Queried field values can also be matched with regular expressions + + + .. code-block:: javascript + + {author:/^Daniel/} + + + + In addition to OR and conditional there's some more operators: + + + * ``$in`` - specifies an array of possible matches, ``{"name":{$in:[1,2,3]}}`` + * ``$nin`` - specifies an array of unwanted matches + * ``$all`` - array value must match to the condition ``{"name":{$all:[1,2,3]}}`` + * ``$exists`` - checks for existence of a field ``{"name":{$exists:true}}`` + * ``$mod`` - check for a modulo ``{"name":{$mod:{3,2}}`` is the same as ``"name" % 3 == 2`` + * ``$size`` - checks the size of an array value ``{"name": {$size:2}}`` matches arrays *name* with 2 elements + + +--------------------------------- +Queries inside objects and arrays +--------------------------------- + + If you have a document with nested objects/arrays then the keys inside these nested objects can still be used for queries. + + + + For example with the following document + + + .. code-block:: javascript + + { + "_id": idvalue, + "author":{ + "firstname":"Daniel", + "lastname": "Defoe" + }, + "books":[ + { + "title":"Robinson Crusoe" + "year": 1714 + } + ] + } + + + + not only the ``_id`` field can be used as a query field - also the ``firstname`` and even ``title`` can be used. This can be done when using nested field names as strings, concated with periods. + + + .. code-block:: javascript + + collection.find({"author.firstname":"Daniel}) + + + + Works even inside arrays + + + .. code-block:: javascript + + collection.find({"books.year":1714}) + + +------------- +Query options +------------- + + Query options define the behavior of the query. + + + .. code-block:: javascript + + var options = { + "limit": 20, + "skip": 10, + "sort": "title" + } + + collection.find({}, options).toArray(...); + + + + Paging can be achieved with option parameters ``limit`` and ``skip`` + + + .. code-block:: javascript + + { + "limit": 20, + "skip" 10 + } + + + + retrieves 10 elements starting from 20 + + + + Sorting can be acieved with option parameter ``sort`` which takes an array of sort preferences + + + .. code-block:: javascript + + { + "sort": [['field1','asc'], ['field2','desc']] + } + + + + With single ascending field the array can be replaced with the name of the field. + + + .. code-block:: javascript + + { + "sort": "name" + } + + + + Option parameter ``explain`` turns the query into an explain query. + + +------- +Cursors +------- + + Cursor objects are the results for queries and can be used to fetch individual fields from the database. + + + + ``cursor.nextObject(function(err, doc){})`` retrieves the next record from database. If doc is null, then there weren't any more records. + + + + ``cursor.each(function(err, doc){})`` retrieves all matching records one by one. + + + + ``cursor.toArray(function(err, docs){})`` converts the cursor object into an array of all the matching records. Probably the most convenient way to retrieve results but be careful with large datasets as every record is loaded into memory. + + + .. code-block:: javascript + + collection.find().toArray(function(err, docs){ + console.log("retrieved records:"); + console.log(docs); + }); + + + + ``cursor.rewind()`` resets the internal pointer in the cursor to the beginning. + + +---------------- +Counting matches +---------------- + + Counting total number of found matches can be done against cursors with method ``count`` . + + + .. code-block:: javascript + + cursor.count(callback) + + + + Where + + + * ``callback`` is the callback function with two parameters - an error object (if an error occured) and the number on matches as an integer. + + + + Example + + + .. code-block:: javascript + + cursor.count(function(err, count){ + console.log("Total matches: "+count); + }); + diff --git a/_sources/markdown-docs/replicaset.txt b/_sources/markdown-docs/replicaset.txt new file mode 100644 index 00000000000..74047572057 --- /dev/null +++ b/_sources/markdown-docs/replicaset.txt @@ -0,0 +1,83 @@ +=========== +Replicasets +=========== +------------ +Introduction +------------ + + Replica sets is the asynchronous master/slave replication added to Mongodb that takes care off all the failover and recovery for the member nodes. According to the mongodb documentation a replicaset is + + + * Two or more nodes that are copies of each other + * Automatic assignment of a primary(master) node if none is available + * Drivers that automatically detect the new master and send writes to it + + + + More information at `Replicasets `_ + + +------------ +Driver usage +------------ + + To create a new replicaset follow the instructions on the mongodb site to setup the config and the replicaset instances. Then using the driver. + + + .. code-block:: javascript + + var replSet = new ReplSetServers( [ + new Server( 127.0.0.1, 30000), + new Server( 127.0.0.1, 30001), + new Server( 127.0.0.1, 30002) + ], + {rs_name:RS.name} + ); + + var db = new Db('integration_test_', replSet); + db.open(function(err, p_db) { + // Do you app stuff :) + }) + + + + The ReplSetSrvers object has the following parameters + + + .. code-block:: javascript + + var replSet = new ReplSetSrvers(servers, options) + + + + Where + + + * ``servers`` is an array of ``Server`` objects + * ``options`` can contain the following options + + +------------------ +Replicaset options +------------------ + + Several options can be passed to the ``Replicaset`` constructor with ``options`` parameter. + + + * ``rs_name`` is the name of the replicaset you configured when you started the server, you can have multiple replicasets running on your servers. + * ``read_secondary`` set's the driver to read from secondary servers (slaves) instead of only from the primary(master) server. + * ``socketOptions`` - a collection of pr socket settings + + +-------------- +Socket options +-------------- + + Several options can be set for the ``socketOptions`` . + + + * ``timeout`` = set seconds before connection times out ``default:0`` + * ``noDelay`` = Disables the Nagle algorithm ``default:true`` + * ``keepAlive`` = Set if keepAlive is used ``default:0`` , which means no keepAlive, set higher than 0 for keepAlive + * ``encoding`` = 'ascii'|'utf8'|'base64' ``default:null`` + diff --git a/_static/active.png b/_static/active.png new file mode 100644 index 00000000000..2373e4ffb42 Binary files /dev/null and b/_static/active.png differ diff --git a/_static/ajax-loader.gif b/_static/ajax-loader.gif new file mode 100644 index 00000000000..61faf8cab23 Binary files /dev/null and b/_static/ajax-loader.gif differ diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 00000000000..43e8bafaf35 --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,540 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 170px; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + width: 30px; +} + +img { + border: 0; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- general body styles --------------------------------------------------- */ + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.field-list ul { + padding-left: 1em; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px 7px 0 7px; + background-color: #ffe; + width: 40%; + float: right; +} + +p.sidebar-title { + font-weight: bold; +} + +/* -- topics ---------------------------------------------------------------- */ + +div.topic { + border: 1px solid #ccc; + padding: 7px 7px 0 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dl { + margin-bottom: 0; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + border: 0; + border-collapse: collapse; +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +dl { + margin-bottom: 15px; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dt:target, .highlighted { + background-color: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.refcount { + color: #060; +} + +.optional { + font-size: 1.3em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +td.linenos pre { + padding: 5px 0px; + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + margin-left: 0.5em; +} + +table.highlighttable td { + padding: 0 0.5em 0 0.5em; +} + +tt.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +tt.descclassname { + background-color: transparent; +} + +tt.xref, a tt { + background-color: transparent; + font-weight: bold; +} + +h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/comment-bright.png b/_static/comment-bright.png new file mode 100644 index 00000000000..551517b8c83 Binary files /dev/null and b/_static/comment-bright.png differ diff --git a/_static/comment-close.png b/_static/comment-close.png new file mode 100644 index 00000000000..09b54be46da Binary files /dev/null and b/_static/comment-close.png differ diff --git a/_static/comment.png b/_static/comment.png new file mode 100644 index 00000000000..92feb52b882 Binary files /dev/null and b/_static/comment.png differ diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 00000000000..d4619fdfb10 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,247 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +} + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s == 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * small function to check if an array contains + * a given item. + */ +jQuery.contains = function(arr, item) { + for (var i = 0; i < arr.length; i++) { + if (arr[i] == item) + return true; + } + return false; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node) { + if (node.nodeType == 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { + var span = document.createElement("span"); + span.className = className; + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this); + }); + } + } + return this.each(function() { + highlight(this); + }); +}; + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated == 'undefined') + return string; + return (typeof translated == 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated == 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) == 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this == '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); diff --git a/_static/down-pressed.png b/_static/down-pressed.png new file mode 100644 index 00000000000..6f7ad782782 Binary files /dev/null and b/_static/down-pressed.png differ diff --git a/_static/down.png b/_static/down.png new file mode 100644 index 00000000000..3003a88770d Binary files /dev/null and b/_static/down.png differ diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 00000000000..d18082e397e Binary files /dev/null and b/_static/file.png differ diff --git a/_static/jquery.js b/_static/jquery.js new file mode 100644 index 00000000000..7c243080233 --- /dev/null +++ b/_static/jquery.js @@ -0,0 +1,154 @@ +/*! + * jQuery JavaScript Library v1.4.2 + * http://jquery.com/ + * + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * Copyright 2010, The Dojo Foundation + * Released under the MIT, BSD, and GPL Licenses. + * + * Date: Sat Feb 13 22:33:48 2010 -0500 + */ +(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/, +Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&& +(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this, +a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b=== +"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this, +function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b
a"; +var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected, +parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent= +false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n= +s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true, +applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando]; +else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this, +a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b=== +w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i, +cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected= +c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); +a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g, +function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split("."); +k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a), +C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B=0){a.type= +e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&& +f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive; +if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data", +e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a, +"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a, +d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, +e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift(); +t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D|| +g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()}, +CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m, +g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)}, +text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}}, +setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return hl[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h= +h[3];l=0;for(m=h.length;l=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m=== +"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g, +h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&& +q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML=""; +if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="

";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}(); +(function(){var g=s.createElement("div");g.innerHTML="
";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}: +function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f0)for(var j=d;j0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j= +{},i;if(f&&a.length){e=0;for(var o=a.length;e-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a=== +"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode", +d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")? +a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType=== +1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/"},F={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div
","
"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d= +c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this}, +wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})}, +prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b, +this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild); +return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja, +""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]); +return this}else{e=0;for(var j=d.length;e0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["", +""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]===""&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e= +c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]? +c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja= +function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter= +Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a, +"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f= +a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b= +a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=//gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!== +"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("
").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this}, +serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), +function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href, +global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&& +e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)? +"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache=== +false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B= +false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since", +c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E|| +d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x); +g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status=== +1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b=== +"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional; +if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration=== +"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]|| +c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start; +this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now= +this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem, +e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b
"; +a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b); +c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a, +d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top- +f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset": +"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in +e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window); diff --git a/docs/sphinx-docs/source/static/logo-mongodb.png b/_static/logo-mongodb.png similarity index 100% rename from docs/sphinx-docs/source/static/logo-mongodb.png rename to _static/logo-mongodb.png diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 00000000000..da1c5620d10 Binary files /dev/null and b/_static/minus.png differ diff --git a/docs/sphinx-docs/themes/mongodb/static/mongodb-docs.css_t b/_static/mongodb-docs.css similarity index 57% rename from docs/sphinx-docs/themes/mongodb/static/mongodb-docs.css_t rename to _static/mongodb-docs.css index 8797db98daa..6ab1fccf0ed 100644 --- a/docs/sphinx-docs/themes/mongodb/static/mongodb-docs.css_t +++ b/_static/mongodb-docs.css @@ -14,10 +14,10 @@ /* -- page layout ----------------------------------------------------------- */ body { - font-family: Arial, sans-serif; + font-family: helvetica,arial,sans-serif; font-size: 100%; - background-color: #111; - color: #555; + background-color: #402817; + color: black; margin: 0; padding: 0; } @@ -25,6 +25,7 @@ body { div.documentwrapper { float: left; width: 100%; + background-color: #F3F4EB; } div.bodywrapper { @@ -36,64 +37,69 @@ hr { } div.document { - background-color: #eee; + background-color: white; + text-shadow: none; } div.body { - background-color: #ffffff; - color: #3E4349; + background-color: white; + color: black; padding: 0 30px 30px 30px; font-size: 0.9em; } div.footer { - color: #555; + color: white; width: 100%; padding: 13px 0; text-align: center; font-size: 75%; + border-top: 1px solid black; } div.footer a { - color: #444; + color: #989898; text-decoration: underline; } div.related { - background-color: #6BA81E; - line-height: 32px; + background-color: #402817; + margin-bottom: 10px; color: #fff; - text-shadow: 0px 1px 0 #444; font-size: 0.9em; + } div.related a { color: #E2F3CC; } +div.related ul { + padding: 0 10px 0; +} + +div.footer-nav div.related ul { + padding: 5px 10px 0; +} + div.sphinxsidebar { - font-size: 0.75em; - line-height: 1.5em; + font-size: 0.72em; + line-height: 1.4em; + width: 230px; } div.sphinxsidebarwrapper{ - padding: 20px 0; + background-color: #F3F4EB; + padding: 12px 5px; } -div.sphinxsidebar h3, -div.sphinxsidebar h4 { - font-family: Arial, sans-serif; - color: #222; - font-size: 1.2em; - font-weight: normal; +div.sphinxsidebar h3 { + font-family: helvetica,arial,sans-serif; + color: black; + font-size: 1.5em; + font-weight: bold; margin: 0; - padding: 5px 10px; - background-color: #ddd; - text-shadow: 1px 1px 0 white -} - -div.sphinxsidebar h4{ - font-size: 1.1em; + padding: 10px 0 0 10px; } div.sphinxsidebar h3 a { @@ -101,21 +107,31 @@ div.sphinxsidebar h3 a { } div.sphinxsidebar p { - color: #888; - padding: 5px 20px; + color: #333333; + margin: 12px 0 5px 12px; + padding: 0 12p; +} + +div.sphinxsidebar form { + margin-top: 5px; } div.sphinxsidebar p.logo { color: #888; padding: 0px; - margin-top: -10px; + margin: -18px 5px 5px; } div.sphinxsidebar p.topless { } +div.sphinxsidebar ul ul, div.sphinxsidebar ul.want-points { + list-style: square outside none; + margin-left: 15px; +} + div.sphinxsidebar ul { - margin: 10px 20px; + margin: 10px; padding: 0; color: #000; } @@ -129,9 +145,17 @@ div.sphinxsidebar input { font-family: sans-serif; font-size: 1em; } - div.sphinxsidebar input[type=text]{ margin-left: 20px; + width: 11em !important; +} +div.sphinxsidebar input[type="submit"] { + text-align: center; + width: 4.5em !important; +} + +p.searchtip { + font-size: 93%; } /* -- body styles ----------------------------------------------------------- */ @@ -152,21 +176,18 @@ div.body h3, div.body h4, div.body h5, div.body h6 { - font-family: Arial, sans-serif; - background-color: #BED4EB; - font-weight: normal; - color: #212224; - margin: 30px 0px 10px 0px; - padding: 5px 0 5px 10px; - text-shadow: 0px 1px 0 white -} - -div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } -div.body h2 { font-size: 150%; background-color: #C8D5E3; } -div.body h3 { font-size: 120%; background-color: #D8DEE3; } -div.body h4 { font-size: 110%; background-color: #D8DEE3; } -div.body h5 { font-size: 100%; background-color: #D8DEE3; } -div.body h6 { font-size: 100%; background-color: #D8DEE3; } + font-family: helvetica,arial,sans-serif; + font-weight: bold; + margin: 30px 0px 0px 0px; + padding: 0; +} + +div.body h1 { border-top: 15px solid white; margin-top: 0; font-size: 200%; } +div.body h2 { font-size: 150%; } +div.body h3 { font-size: 120%; } +div.body h4 { font-size: 110%; } +div.body h5 { font-size: 100%; } +div.body h6 { font-size: 100%; } a.headerlink { color: #c60f0f; @@ -192,23 +213,45 @@ div.highlight{ background-color: white; } -div.note { +dl.binary { + display: none; +} + +div.topic { background-color: #eee; +} + +div.warning { + background-color: #ffe4e4; + border: 1px solid #f66; +} +div.admonition { + margin: 15px; + padding: 7px; +} + +div.note { + background-color: #F3F4EB; border: 1px solid #ccc; } div.seealso { background-color: #ffc; border: 1px solid #ff6; + padding-bottom: 7px; } -div.topic { - background-color: #eee; +div.admonition-optional { + background-color: #ffffee; + border: 1px solid #dfdfdf; + color: #666666; + margin: 10px 25px; } -div.warning { - background-color: #ffe4e4; - border: 1px solid #f66; +div.admonition-example { + background-color: #F3F4EB; + border: 1px solid #ccc; + color: #3E4349; } p.admonition-title { @@ -219,6 +262,10 @@ p.admonition-title:after { content: ":"; } +div.admonition pre { + margin: 10px 0; +} + pre { padding: 10px; background-color: White; @@ -226,7 +273,7 @@ pre { line-height: 1.2em; border: 1px solid #C6C9CB; font-size: 1.1em; - margin: 1.5em 0 1.5em 0; + margin: 20px; -webkit-box-shadow: 1px 1px 1px #d8d8d8; -moz-box-shadow: 1px 1px 1px #d8d8d8; } @@ -240,7 +287,7 @@ tt { } .viewcode-back { - font-family: Arial, sans-serif; + font-family: helvetica,arial,sans-serif; } div.viewcode-block:target { @@ -248,3 +295,8 @@ div.viewcode-block:target { border-top: 1px solid #ac9; border-bottom: 1px solid #ac9; } + +p.first { + margin-bottom: 0px !important; + margin-top: 10px !important; +} \ No newline at end of file diff --git a/_static/nonactive.png b/_static/nonactive.png new file mode 100644 index 00000000000..4097e0b1f07 Binary files /dev/null and b/_static/nonactive.png differ diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 00000000000..b3cb37425ea Binary files /dev/null and b/_static/plus.png differ diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 00000000000..1a14f2ae1ab --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,62 @@ +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #303030 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0040D0 } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/reset.css b/_static/reset.css new file mode 100644 index 00000000000..78d0b6e37c5 --- /dev/null +++ b/_static/reset.css @@ -0,0 +1,46 @@ +/* http://meyerweb.com/eric/tools/css/reset/ + v2.0 | 20110126 + License: none (public domain) +*/ + +html, body, div, span, applet, object, iframe, +h1, h2, h3, h4, h5, h6, p, blockquote, pre, +a, abbr, acronym, address, big, cite, code, +del, dfn, em, img, ins, kbd, q, s, samp, +small, strike, strong, sub, sup, tt, var, +b, u, i, center, +dl, dt, dd, ol, ul, li, +fieldset, form, label, legend, +table, caption, tbody, tfoot, thead, tr, th, td, +article, aside, canvas, details, embed, +figure, figcaption, footer, header, hgroup, +menu, nav, output, ruby, section, summary, +time, mark, audio, video { + font-size: 100%; + font: inherit; + vertical-align: baseline; +} +/* HTML5 display-role reset for older browsers */ +article, aside, details, figcaption, figure, +footer, header, hgroup, menu, nav, section { + display: block; +} +body { + line-height: 1; +} +ol, ul { + list-style: none; +} +blockquote, q { + quotes: none; +} +blockquote:before, blockquote:after, +q:before, q:after { + content: ''; + content: none; +} +table { + border-collapse: collapse; + border-spacing: 0; +} + diff --git a/_static/searchtools.js b/_static/searchtools.js new file mode 100644 index 00000000000..663be4c909b --- /dev/null +++ b/_static/searchtools.js @@ -0,0 +1,560 @@ +/* + * searchtools.js_t + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilties for the full-text search. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words, hlwords is the list of normal, unstemmed + * words. the first one is used to find the occurance, the + * latter for highlighting it. + */ + +jQuery.makeSearchSummary = function(text, keywords, hlwords) { + var textLower = text.toLowerCase(); + var start = 0; + $.each(keywords, function() { + var i = textLower.indexOf(this.toLowerCase()); + if (i > -1) + start = i; + }); + start = Math.max(start - 120, 0); + var excerpt = ((start > 0) ? '...' : '') + + $.trim(text.substr(start, 240)) + + ((start + 240 - text.length) ? '...' : ''); + var rv = $('
').text(excerpt); + $.each(hlwords, function() { + rv = rv.highlightText(this, 'highlighted'); + }); + return rv; +} + + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + + +/** + * Search Module + */ +var Search = { + + _index : null, + _queued_query : null, + _pulse_status : -1, + + init : function() { + var params = $.getQueryParameters(); + if (params.q) { + var query = params.q[0]; + $('input[name="q"]')[0].value = query; + this.performSearch(query); + } + }, + + loadIndex : function(url) { + $.ajax({type: "GET", url: url, data: null, success: null, + dataType: "script", cache: true}); + }, + + setIndex : function(index) { + var q; + this._index = index; + if ((q = this._queued_query) !== null) { + this._queued_query = null; + Search.query(q); + } + }, + + hasIndex : function() { + return this._index !== null; + }, + + deferQuery : function(query) { + this._queued_query = query; + }, + + stopPulse : function() { + this._pulse_status = 0; + }, + + startPulse : function() { + if (this._pulse_status >= 0) + return; + function pulse() { + Search._pulse_status = (Search._pulse_status + 1) % 4; + var dotString = ''; + for (var i = 0; i < Search._pulse_status; i++) + dotString += '.'; + Search.dots.text(dotString); + if (Search._pulse_status > -1) + window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something + */ + performSearch : function(query) { + // create the required interface elements + this.out = $('#search-results'); + this.title = $('

' + _('Searching') + '

').appendTo(this.out); + this.dots = $('').appendTo(this.title); + this.status = $('

').appendTo(this.out); + this.output = $('