Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Attempting reformating

  • Loading branch information...
commit a0105bbf6989f866b7c2394262388c83a52387b1 1 parent 7403712
@christkv authored
View
168 dev/tools/build-docs.js
@@ -51,109 +51,93 @@ var testClasses = [
// Read all the templates
var templates = [
{tag:'index', path:'./dev/tools/doc-templates/index.ejs'},
+ {tag:'index_no_header', path:'./dev/tools/doc-templates/index_no_header.ejs'},
{tag:'class', path:'./dev/tools/doc-templates/class.ejs'},
{tag:'function', path:'./dev/tools/doc-templates/function.ejs'}
]
-// Output directory
-var outputDirectory = "./docs/sphinx-docs/source/api-generated"
-
-// Force create the directory for the generated docs
-exec('rm -rf ' + outputDirectory, function (error, stdout, stderr) {});
-exec('mkdir ' + outputDirectory, function (error, stdout, stderr) {});
-
-// ----------------------------------------------------------------------------
-// PROCESS Driver API
-// ----------------------------------------------------------------------------
-// Extract meta data from source files
-var dataObjects = docs.extractLibraryMetaData(apiClasses);
-// Filter out and prepare the test Objects hash
-var testObjects = docs.buildTestHash(docs.extractLibraryMetaData(testClasses));
-// Read all the templates
-var templates = docs.readAllTemplates(templates);
-// Render all the classes that are decorated
-docs.renderAllTemplates(outputDirectory, templates, dataObjects, testObjects, {index_title:'Driver API'});
-
-// ----------------------------------------------------------------------------
-// PROCESS BSON API
-// ----------------------------------------------------------------------------
-// Output directory
-var outputDirectory2 = "./docs/sphinx-docs/source/api-bson-generated"
-// Force create the directory for the generated docs
-exec('rm -rf ' + outputDirectory2, function (error, stdout, stderr) {});
-exec('mkdir ' + outputDirectory2, function (error, stdout, stderr) {});
-
-var apiClasses2 = [
- // {tag:"objectid", path:"./lib/mongodb/bson/objectid.js"},
- // {tag:"binary", path:"./lib/mongodb/bson/binary.js"},
- // {tag:"code", path:"./lib/mongodb/bson/code.js"},
- // {tag:"code", path:"./lib/mongodb/bson/db_ref.js"},
- // {tag:"double", path:"./lib/mongodb/bson/double.js"},
- // {tag:"maxkey", path:"./lib/mongodb/bson/max_key.js"},
- // {tag:"symbol", path:"./lib/mongodb/bson/symbol.js"},
- // {tag:"timestamp", path:"./lib/mongodb/bson/timestamp.js"},
- // {tag:"long", path:"./lib/mongodb/bson/long.js"},
- {tag:"bson", path:"./lib/mongodb/bson/bson.js"}
- ];
-
-// Read all the templates
-var templates2 = [
- {tag:'index', path:'./dev/tools/doc-templates/index.ejs'},
- {tag:'class', path:'./dev/tools/doc-templates/class.ejs'},
- {tag:'function', path:'./dev/tools/doc-templates/function.ejs'}
-]
-
-// Extract meta data from source files
-var dataObjects2 = docs.extractLibraryMetaData(apiClasses2);
-// Filter out and prepare the test Objects hash
-var testObjects2 = docs.buildTestHash(docs.extractLibraryMetaData(testClasses));
-// Render all the classes that are decorated
-docs.renderAllTemplates(outputDirectory2, templates, dataObjects2, testObjects2, {index_title:'Binary JSON API'});
-
-// ----------------------------------------------------------------------------
-// PROCESS MARKDOWN DOCUMENTS TO STRUCTURED TEXT
-// ----------------------------------------------------------------------------
-var articles = [
- {name:"NodeKOArticle1", output:"NodeKOArticle1.rst", path:"./docs/articles/NodeKOArticle1.md"},
- {name:"NodeKOArticle2", output:"NodeKOArticle2.rst", path:"./docs/articles/NodeKOArticle2.md"}
- ];
-
-// Output directory
-var outputDirectory = "./docs/sphinx-docs/source/api-articles"
-docs.writeMarkDownFile(outputDirectory, articles, templates);
-
-
+// // Output directory
+// var outputDirectory = "./docs/sphinx-docs/source/api-generated"
+//
// // Force create the directory for the generated docs
// exec('rm -rf ' + outputDirectory, function (error, stdout, stderr) {});
// exec('mkdir ' + outputDirectory, function (error, stdout, stderr) {});
//
-// var names = [];
+// // ----------------------------------------------------------------------------
+// // PROCESS Driver API
+// // ----------------------------------------------------------------------------
+// // Extract meta data from source files
+// var dataObjects = docs.extractLibraryMetaData(apiClasses);
+// // Filter out and prepare the test Objects hash
+// var testObjects = docs.buildTestHash(docs.extractLibraryMetaData(testClasses));
+// Read all the templates
+var templates = docs.readAllTemplates(templates);
+// // Render all the classes that are decorated
+// docs.renderAllTemplates(outputDirectory, templates, dataObjects, testObjects, {index_title:'Driver API'});
+//
+// // ----------------------------------------------------------------------------
+// // PROCESS BSON API
+// // ----------------------------------------------------------------------------
+// // Output directory
+// var outputDirectory2 = "./docs/sphinx-docs/source/api-bson-generated"
+// // Force create the directory for the generated docs
+// exec('rm -rf ' + outputDirectory2, function (error, stdout, stderr) {});
+// exec('mkdir ' + outputDirectory2, function (error, stdout, stderr) {});
//
-// // Process all the articles
-// for(var i = 0 ; i < articles.length; i++) {
-// // Fetch the article markdown content
-// var article = fs.readFileSync(articles[i].path).toString();
-// // Convert the text into restructured text for sphinx
-// var text = docs.transformMarkdownToStructuredText(article);
-// // Write out the content
-// fs.writeFileSync(format("%s/%s", outputDirectory, articles[i].output.toLowerCase()), text);
-// names.push(articles[i].name.toLowerCase());
-//
-// // var _markdown = new markdown.Markdown.parse(article);
-// // console.dir(markdown)
-// // console.dir(markdown.parse(article))
-// // var markdownDoc = _markdown.parse(article);
+// var apiClasses2 = [
+// // {tag:"objectid", path:"./lib/mongodb/bson/objectid.js"},
+// // {tag:"binary", path:"./lib/mongodb/bson/binary.js"},
+// // {tag:"code", path:"./lib/mongodb/bson/code.js"},
+// // {tag:"code", path:"./lib/mongodb/bson/db_ref.js"},
+// // {tag:"double", path:"./lib/mongodb/bson/double.js"},
+// // {tag:"maxkey", path:"./lib/mongodb/bson/max_key.js"},
+// // {tag:"symbol", path:"./lib/mongodb/bson/symbol.js"},
+// // {tag:"timestamp", path:"./lib/mongodb/bson/timestamp.js"},
+// // {tag:"long", path:"./lib/mongodb/bson/long.js"},
+// {tag:"bson", path:"./lib/mongodb/bson/bson.js"}
+// ];
//
-//
-// // console.log("--------------------------------------------------------------------------------")
-// // console.dir(markdownDoc)
-//
-// // console.log(article.toString())
-// }
+// // Read all the templates
+// var templates2 = [
+// {tag:'index', path:'./dev/tools/doc-templates/index.ejs'},
+// {tag:'index_no_header', path:'./dev/tools/doc-templates/index_no_header.ejs'},
+// {tag:'class', path:'./dev/tools/doc-templates/class.ejs'},
+// {tag:'function', path:'./dev/tools/doc-templates/function.ejs'}
+// ]
//
-// // Just write out the index
-// var indexContent = ejs.render(templates['index'], {entries:names, format:format, title:'Articles'});
-// fs.writeFileSync(format("%s/%s", outputDirectory, 'index.rst'), indexContent);
+// // Extract meta data from source files
+// var dataObjects2 = docs.extractLibraryMetaData(apiClasses2);
+// // Filter out and prepare the test Objects hash
+// var testObjects2 = docs.buildTestHash(docs.extractLibraryMetaData(testClasses));
+// // Render all the classes that are decorated
+// docs.renderAllTemplates(outputDirectory2, templates, dataObjects2, testObjects2, {index_title:'Binary JSON API'});
+//
+// // ----------------------------------------------------------------------------
+// // PROCESS MARKDOWN DOCUMENTS TO STRUCTURED TEXT
+// // ----------------------------------------------------------------------------
+//
+// // Transform the tutorials
+// var articles = [
+// {name:"NodeKOArticle1", output:"NodeKOArticle1.rst", path:"./docs/articles/NodeKOArticle1.md"},
+// {name:"NodeKOArticle2", output:"NodeKOArticle2.rst", path:"./docs/articles/NodeKOArticle2.md"}
+// ];
+// // Tranform the markdown to restructured text
+// docs.writeMarkDownFile("./docs/sphinx-docs/source/api-articles", articles, templates,
+// {title:'Articles', template:'index'});
+
+// Transform the tutorials
+var articles = [
+ // {name:"collections", output:"collections.rst", path:"./docs/collections.md"},
+ {name:"database", output:"database.rst", path:"./docs/database.md"},
+ // {name:"gridfs", output:"gridfs.rst", path:"./docs/gridfs.md"},
+ // {name:"indexes", output:"indexes.rst", path:"./docs/indexes.md"},
+ // {name:"insert", output:"insert.rst", path:"./docs/insert.md"},
+ // {name:"queries", output:"queries.rst", path:"./docs/queries.md"},
+ // {name:"replicaset", output:"replicaset.rst", path:"./docs/replicaset.md"}
+ ];
+// Tranform the markdown to restructured text
+docs.writeMarkDownFile("./docs/sphinx-docs/source/markdown-docs", articles, templates,
+ {title:'Using the driver', template:'index_no_header'});
View
14 dev/tools/docs.js
@@ -31,6 +31,7 @@ var convert_tree_to_rs = function(nodes, documentLines) {
// Go through all the tags and render
for(var i = 0; i < nodes.length; i++) {
var line = nodes[i];
+ // console.dir(line)
if(Array.isArray(line)) {
switch(line[0]) {
@@ -69,7 +70,10 @@ var convert_tree_to_rs = function(nodes, documentLines) {
documentLines.push('\n');
break;
case 'link':
- documentLines.push(format("`%s <%s>`_", line[2], line[1].href));
+ documentLines.push(format("`%s <%s>`_", line[2], line[1].href.replace(".md", ".html")));
+ break;
+ case 'inlinecode':
+ documentLines.push(format("``%s``", line[1]));
break;
case 'code_block':
// Unpack code block
@@ -115,10 +119,14 @@ var convert_tree_to_rs = function(nodes, documentLines) {
// Merge the docs in
documentLines.push(format(" * %s", listitemLines.join(' ').trim()));
break;
+ case 'em':
+ documentLines.push(format("*%s*", line[1]));
+ break;
case 'strong':
documentLines.push(format("**%s**", line[1]));
break;
default:
+ console.dir(line)
break;
}
}
@@ -127,7 +135,7 @@ var convert_tree_to_rs = function(nodes, documentLines) {
return documentLines;
}
-exports.writeMarkDownFile = function(outputDirectory, articles, templates) {
+exports.writeMarkDownFile = function(outputDirectory, articles, templates, options) {
// Force create the directory for the generated docs
exec('rm -rf ' + outputDirectory, function (error, stdout, stderr) {});
exec('mkdir ' + outputDirectory, function (error, stdout, stderr) {});
@@ -147,7 +155,7 @@ exports.writeMarkDownFile = function(outputDirectory, articles, templates) {
}
// Just write out the index
- var indexContent = ejs.render(templates['index'], {entries:names, format:format, title:'Articles'});
+ var indexContent = ejs.render(templates[options.template], {entries:names, format:format, title:options.title});
fs.writeFileSync(format("%s/%s", outputDirectory, 'index.rst'), indexContent);
}
View
10 docs/collections.md
@@ -21,11 +21,7 @@ Collections can be created with `createCollection`
where `name` is the name of the collection, options a set of configuration parameters and `callback` is a callback function. `db` is the database object.
-
-The first parameter for
-the callback is the error object (null if no error) and the second one is the pointer to the newly created
-collection. If strict mode is on and the table exists, the operation yields in error. With strict mode off (default)
-the function simple returns the pointer to the existing collection and does not truncate it.
+The first parameter for the callback is the error object (null if no error) and the second one is the pointer to the newly created collection. If strict mode is on and the table exists, the operation yields in error. With strict mode off (default) the function simple returns the pointer to the existing collection and does not truncate it.
db.createCollection("test", function(err, collection){
collection.insert({"test":"value"});
@@ -57,11 +53,9 @@ Collections can be listed with `collectionNames`
Collection names also include database name, so a collection named `posts` in a database `blog` will be listed as `blog.posts`.
-Additionally there's system collections which should not be altered without knowing exactly what you are doing, these sollections
-can be identified with `system` prefix. For example `posts.system.indexes`.
+Additionally there's system collections which should not be altered without knowing exactly what you are doing, these sollections can be identified with `system` prefix. For example `posts.system.indexes`.
Example:
-
var mongodb = require("mongodb"),
mongoserver = new mongodb.Server("localhost"),
View
25 docs/database.md
@@ -18,13 +18,17 @@ The first thing to do in order to make queries to the database is to open one. T
## Server options
Several options can be passed to the `Server` constructor with `options` parameter.
- * `auto_reconnect` - to reconnect automatically, `default:false`
- * `poolSize` - specify the number of connections in the pool `default:1`
- * `socketOptions` - a collection of pr socket settings
- * `timeout` = set seconds before connection times out `default:0`
- * `noDelay` = Disables the Nagle algorithm `default:true`
- * `keepAlive` = Set if keepAlive is used `default:0`, which means no keepAlive, set higher than 0 for keepAlive
- * `encoding` = ['ascii', 'utf8', or 'base64'] `default:null`
+* `auto_reconnect` - to reconnect automatically, `default:false`
+* `poolSize` - specify the number of connections in the pool `default:1`
+* `socketOptions` - a collection of pr socket settings
+
+## Socket options
+Several options can be set for the `socketOptions`.
+
+* `timeout` = set seconds before connection times out `default:0`
+* `noDelay` = Disables the Nagle algorithm `default:true`
+* `keepAlive` = Set if keepAlive is used `default:0`, which means no keepAlive, set higher than 0 for keepAlive
+* `encoding` = 'ascii'|'utf8'|'base64' `default:null`
## DB options
@@ -41,7 +45,6 @@ Several options can be passed to the `Db` constructor with `options` parameter.
* `reaperTimeout` - specify the number of milliseconds for timing out callbacks that don't return `default:30000`
* `raw` - driver expects Buffer raw bson document, `default:false`
-
## Opening a database
Database can be opened with Db method `open`.
@@ -94,8 +97,7 @@ To delete a database you need a pointer to it first. Deletion can be done with m
## Custom primary keys
-Every record in the database has an unique primary key called `_id`. Default primary keys are 12 byte hashes but a custom key generator can be used for something else. If you set `_id` "by hand" when
-inserting records then you can use whatever you want, primary key factory generates `_id` values only for records without ones.
+Every record in the database has an unique primary key called `_id`. Default primary keys are 12 byte hashes but a custom key generator can be used for something else. If you set `_id` "by hand" when inserting records then you can use whatever you want, primary key factory generates `_id` values only for records without ones.
Example 1: No need to generate primary key, as its already defined:
@@ -105,8 +107,7 @@ Example 2: No primary key, so it needs to be generated before save:
collectionn.insert({name:"Daniel"});
-Custom primary key factory is actually an object with method `createPK` which returns a primary key.
-The context (value for `this`) for `createPK` is left untouched.
+Custom primary key factory is actually an object with method `createPK` which returns a primary key. The context (value for `this`) for `createPK` is left untouched.
var CustomPKFactory = {
counter:0,
View
4 docs/gridfs.md
@@ -1,9 +1,7 @@
GridStore
======
-GridFS is a scalable MongoDB *filesystem* for storing and retrieving large files. The default limit for a MongoDB record is
-16MB, so to store data that is larger than this limit, GridFS can be used. GridFS shards the data into smaller chunks automatically.
-See [MongoDB documentation](http://www.mongodb.org/display/DOCS/GridFS+Specification) for details.
+GridFS is a scalable MongoDB *filesystem* for storing and retrieving large files. The default limit for a MongoDB record is 16MB, so to store data that is larger than this limit, GridFS can be used. GridFS shards the data into smaller chunks automatically. See [MongoDB documentation](http://www.mongodb.org/display/DOCS/GridFS+Specification) for details.
GridStore is a single file inside GridFS that can be managed by the script.
View
6 docs/indexes.md
@@ -1,8 +1,7 @@
Indexes
=======
-Indexes are needed to make queries faster. For example if you need to find records by a field named *username* and
-the field has a related index set, then the query will be a lot faster compared to if the index was not present.
+Indexes are needed to make queries faster. For example if you need to find records by a field named *username* and the field has a related index set, then the query will be a lot faster compared to if the index was not present.
See [MongoDB documentation](http://www.mongodb.org/display/DOCS/Indexes) for details.
@@ -44,8 +43,7 @@ or with tuples
collection.ensureIndex([["firstname", 1], ["lastname", 1]], callback)
-The number value indicates direction - if it's 1, then it is an ascending value,
-if it's -1 then it's descending. For example if you have documents with a field *date* and you want to sort these records in descending order then you might want to add corresponding index
+The number value indicates direction - if it's 1, then it is an ascending value, if it's -1 then it's descending. For example if you have documents with a field *date* and you want to sort these records in descending order then you might want to add corresponding index
collection.ensureIndex({date:-1}, callback)
View
3  docs/insert.md
@@ -73,8 +73,7 @@ If the replacement object is a document, the matching documents will be replaced
The example above will replace the document contents of id=123 with the replacement object.
-To update only selected fields, `$set` operator needs to be used. Following replacement object
-replaces author value but leaves everything else intact.
+To update only selected fields, `$set` operator needs to be used. Following replacement object replaces author value but leaves everything else intact.
collection.update({_id:"123"}, {$set: {author:"Jessica"}});
View
14 docs/queries.md
@@ -80,9 +80,7 @@ To make a simple query where one field must match to a defined value, one can do
This query matches all the records that a) have fields called *fieldname* and b) its value is *"fieldvalue"*.
-For example if we have a collection of blog posts where the structure of the
-records is `{title, author, contents}` and we want
-to retrieve all the posts for a specific author then we can do it like this:
+For example if we have a collection of blog posts where the structure of the records is `{title, author, contents}` and we want to retrieve all the posts for a specific author then we can do it like this:
posts = pointer_to_collection;
posts.find({author:"Daniel"}).toArray(function(err, results){
@@ -136,9 +134,7 @@ To mix AND and OR queries, you just need to use $or as one of regular query fiel
### Conditionals
-Conditional operators `<`, `<=`, `>`, `>=` and `!=` can't be used directly, as the query object format doesn't support it but the same
-can be achieved with their aliases `$lt`, `$lte`, `$gt`, `$gte` and `$ne`. When a field value needs to match a conditional, the value
-must be wrapped into a separate object.
+Conditional operators `<`, `<=`, `>`, `>=` and `!=` can't be used directly, as the query object format doesn't support it but the same can be achieved with their aliases `$lt`, `$lte`, `$gt`, `$gte` and `$ne`. When a field value needs to match a conditional, the value must be wrapped into a separate object.
{"fieldname":{$gte:100}}
@@ -186,8 +182,7 @@ For example with the following document
]
}
-not only the `_id` field can be used as a query field - also the `firstname` and even `title` can be used. This can be done when
-using nested field names as strings, concated with periods.
+not only the `_id` field can be used as a query field - also the `firstname` and even `title` can be used. This can be done when using nested field names as strings, concated with periods.
collection.find({"author.firstname":"Daniel})
@@ -250,8 +245,7 @@ Cursor objects are the results for queries and can be used to fetch individual f
### toArray
-`cursor.toArray(function(err, docs){})` converts the cursor object into an array of all the matching records. Probably the
-most convenient way to retrieve results but be careful with large datasets as every record is loaded into memory.
+`cursor.toArray(function(err, docs){})` converts the cursor object into an array of all the matching records. Probably the most convenient way to retrieve results but be careful with large datasets as every record is loaded into memory.
collection.find().toArray(function(err, docs){
console.log("retrieved records:");
View
8 docs/sphinx-docs/source/index.rst
@@ -2,6 +2,14 @@
The Node.JS MongoDB Driver Manual
=================================
+Usage
+-----
+
+.. toctree::
+ :maxdepth: 1
+
+ markdown-docs/index
+
Tutorials
---------
Please sign in to comment.
Something went wrong with that request. Please try again.