diff --git a/Makefile b/Makefile index cb27a33104f..231de812839 100644 --- a/Makefile +++ b/Makefile @@ -159,36 +159,15 @@ apidoc_sources = $(wildcard doc/api/*.markdown) apidocs = $(addprefix out/,$(apidoc_sources:.markdown=.html)) \ $(addprefix out/,$(apidoc_sources:.markdown=.json)) -apidoc_dirs = out/doc out/doc/api/ out/doc/api/assets out/doc/about out/doc/community out/doc/download out/doc/logos out/doc/images +apidoc_dirs = out/doc out/doc/api/ out/doc/api/assets apiassets = $(subst api_assets,api/assets,$(addprefix out/,$(wildcard doc/api_assets/*))) -doc_images = $(addprefix out/,$(wildcard doc/images/* doc/*.jpg doc/*.png)) - website_files = \ - out/doc/index.html \ - out/doc/v0.4_announcement.html \ - out/doc/cla.html \ out/doc/sh_main.js \ - out/doc/sh_javascript.min.js \ - out/doc/sh_vim-dark.css \ - out/doc/sh.css \ - out/doc/favicon.ico \ - out/doc/pipe.css \ - out/doc/about/index.html \ - out/doc/community/index.html \ - out/doc/download/index.html \ - out/doc/logos/index.html \ - out/doc/changelog.html \ - $(doc_images) - -doc: $(apidoc_dirs) $(website_files) $(apiassets) $(apidocs) tools/doc/ blog node - -blogclean: - rm -rf out/blog - -blog: doc/blog out/Release/node tools/blog - out/Release/node tools/blog/generate.js doc/blog/ out/blog/ doc/blog.html doc/rss.xml + out/doc/sh_javascript.min.js + +doc: $(apidoc_dirs) $(website_files) $(apiassets) $(apidocs) tools/doc/ node $(apidoc_dirs): mkdir -p $@ @@ -199,9 +178,6 @@ out/doc/api/assets/%: doc/api_assets/% out/doc/api/assets/ out/doc/changelog.html: ChangeLog doc/changelog-head.html doc/changelog-foot.html tools/build-changelog.sh node bash tools/build-changelog.sh -out/doc/%.html: doc/%.html node - cat $< | sed -e 's|__VERSION__|'$(VERSION)'|g' > $@ - out/doc/%: doc/% cp -r $< $@ @@ -218,9 +194,6 @@ email.md: ChangeLog tools/email-footer.md blog.html: email.md cat $< | ./node tools/doc/node_modules/.bin/marked > $@ -blog-upload: blog - rsync -r out/blog/ node@nodejs.org:~/web/nodejs.org/blog/ - website-upload: doc rsync -r out/doc/ node@nodejs.org:~/web/nodejs.org/ ssh node@nodejs.org '\ diff --git a/deps/npm/CONTRIBUTING.md b/deps/npm/CONTRIBUTING.md new file mode 100644 index 00000000000..7a60ed2a2bb --- /dev/null +++ b/deps/npm/CONTRIBUTING.md @@ -0,0 +1,5 @@ +## Before you submit a new issue + +* Check if there's a simple solution in the [Troubleshooting](https://github.com/npm/npm/wiki/Troubleshooting) wiki. +* [Search for similar issues](https://github.com/npm/npm/search?q=Similar%20issues&type=Issues). +* Ensure your new issue conforms to the [Contributing Guidelines](https://github.com/npm/npm/wiki/Contributing-Guidelines). diff --git a/deps/npm/LICENSE b/deps/npm/LICENSE index 764bb871843..2c0baba4017 100644 --- a/deps/npm/LICENSE +++ b/deps/npm/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) Isaac Z. Schlueter +Copyright (c) npm, Inc. and Contributors All rights reserved. npm is released under the Artistic License 2.0. @@ -214,7 +214,7 @@ ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------- -"Node.js" and "node" trademark Joyent, Inc. npm is not officially +"Node.js" and "node" trademark Joyent, Inc. npm is not officially part of the Node.js project, and is neither owned by nor officially affiliated with Joyent, Inc. diff --git a/deps/npm/README.md b/deps/npm/README.md index d01f7a28884..e2f0f79b5be 100644 --- a/deps/npm/README.md +++ b/deps/npm/README.md @@ -220,7 +220,7 @@ If this concerns you, inspect the source before using packages. When you find issues, please report them: * web: - + * email: diff --git a/deps/npm/doc/cli/npm-install.md b/deps/npm/doc/cli/npm-install.md index 618ebab0f86..f9d2104cd55 100644 --- a/deps/npm/doc/cli/npm-install.md +++ b/deps/npm/doc/cli/npm-install.md @@ -146,9 +146,9 @@ after packing it up into a tarball (b). Examples: - git+ssh://git@github.com:isaacs/npm.git#v1.0.27 - git+https://isaacs@github.com/isaacs/npm.git - git://github.com/isaacs/npm.git#v1.0.27 + git+ssh://git@github.com:npm/npm.git#v1.0.27 + git+https://isaacs@github.com/npm/npm.git + git://github.com/npm/npm.git#v1.0.27 You may combine multiple arguments, and even multiple types of arguments. For example: diff --git a/deps/npm/doc/cli/npm-ls.md b/deps/npm/doc/cli/npm-ls.md index d80a2cc27a0..21f54264c7f 100644 --- a/deps/npm/doc/cli/npm-ls.md +++ b/deps/npm/doc/cli/npm-ls.md @@ -61,6 +61,12 @@ Show parseable output instead of tree view. List packages in the global install prefix instead of in the current project. +### depth + +* Type: Int + +Max display depth of the dependency tree. + ## SEE ALSO * npm-config(1) diff --git a/deps/npm/doc/cli/npm-outdated.md b/deps/npm/doc/cli/npm-outdated.md index 4d583537873..aa2a7d5dd16 100644 --- a/deps/npm/doc/cli/npm-outdated.md +++ b/deps/npm/doc/cli/npm-outdated.md @@ -14,6 +14,43 @@ The resulting field 'wanted' shows the latest version according to the version specified in the package.json, the field 'latest' the very latest version of the package. +## CONFIGURATION + +### json + +* Default: false +* Type: Boolean + +Show information in JSON format. + +### long + +* Default: false +* Type: Boolean + +Show extended information. + +### parseable + +* Default: false +* Type: Boolean + +Show parseable output instead of tree view. + +### global + +* Default: false +* Type: Boolean + +Check packages in the global install prefix instead of in the current +project. + +### depth + +* Type: Int + +Max depth for checking dependency tree. + ## SEE ALSO * npm-update(1) diff --git a/deps/npm/doc/cli/npm.md b/deps/npm/doc/cli/npm.md index 0f9b30b226e..903aa1079e2 100644 --- a/deps/npm/doc/cli/npm.md +++ b/deps/npm/doc/cli/npm.md @@ -114,7 +114,7 @@ easily by doing `npm view npm contributors`. If you would like to contribute, but don't know what to work on, check the issues list or ask on the mailing list. -* +* * ## BUGS @@ -122,7 +122,7 @@ the issues list or ask on the mailing list. When you find issues, please report them: * web: - + * email: diff --git a/deps/npm/doc/files/package.json.md b/deps/npm/doc/files/package.json.md index 8e6f97de8f8..09f0bec7021 100644 --- a/deps/npm/doc/files/package.json.md +++ b/deps/npm/doc/files/package.json.md @@ -257,7 +257,7 @@ Do it like this: "repository" : { "type" : "git" - , "url" : "http://github.com/isaacs/npm.git" + , "url" : "http://github.com/npm/npm.git" } "repository" : diff --git a/deps/npm/doc/misc/npm-coding-style.md b/deps/npm/doc/misc/npm-coding-style.md index e9333d0ecc2..b6a4a620fb6 100644 --- a/deps/npm/doc/misc/npm-coding-style.md +++ b/deps/npm/doc/misc/npm-coding-style.md @@ -133,7 +133,7 @@ string message to the callback. Stack traces are handy. ## Logging -Logging is done using the [npmlog](https://github.com/isaacs/npmlog) +Logging is done using the [npmlog](https://github.com/npm/npmlog) utility. Please clean up logs when they are no longer helpful. In particular, diff --git a/deps/npm/doc/misc/npm-config.md b/deps/npm/doc/misc/npm-config.md index 485d4ca4500..2df31f3a732 100644 --- a/deps/npm/doc/misc/npm-config.md +++ b/deps/npm/doc/misc/npm-config.md @@ -401,7 +401,7 @@ The value `npm init` should use by default for the package author's homepage. ### init.license -* Default: "BSD-2-Clause" +* Default: "ISC" * Type: String The value `npm init` should use by default for the package license. @@ -466,7 +466,7 @@ The default is "http", which shows http, warn, and error output. * Type: Stream This is the stream that is passed to the -[npmlog](https://github.com/isaacs/npmlog) module at run time. +[npmlog](https://github.com/npm/npmlog) module at run time. It cannot be set from the command line, but if you are using npm programmatically, you may wish to send logs to somewhere other than diff --git a/deps/npm/doc/misc/npm-faq.md b/deps/npm/doc/misc/npm-faq.md index d27c7e73540..c2288389c5b 100644 --- a/deps/npm/doc/misc/npm-faq.md +++ b/deps/npm/doc/misc/npm-faq.md @@ -315,12 +315,12 @@ in a web browser. This will also tell you if you are just unable to access the internet for some reason. If the registry IS down, let me know by emailing or posting -an issue at . We'll have +an issue at . We'll have someone kick it or something. ## Why no namespaces? -Please see this discussion: +Please see this discussion: tl;dr - It doesn't actually make things better, and can make them worse. @@ -338,7 +338,7 @@ There is not sufficient need to impose namespace rules on everyone. Post an issue on the github project: -* +* ## Why does npm hate me? diff --git a/deps/npm/doc/misc/npm-registry.md b/deps/npm/doc/misc/npm-registry.md index 514bce2f835..a8c4b0200d3 100644 --- a/deps/npm/doc/misc/npm-registry.md +++ b/deps/npm/doc/misc/npm-registry.md @@ -14,7 +14,7 @@ account information. The official public npm registry is at . It is powered by a CouchDB database at . The code for the couchapp is -available at . npm user accounts +available at . npm user accounts are CouchDB users, stored in the database. diff --git a/deps/npm/html/doc/README.html b/deps/npm/html/doc/README.html index 7bfa82592c6..d457ec95ebe 100644 --- a/deps/npm/html/doc/README.html +++ b/deps/npm/html/doc/README.html @@ -226,7 +226,7 @@

BUGS

When you find issues, please report them:

Be sure to include all of the output from the npm command that didn't work @@ -239,7 +239,7 @@

SEE ALSO

- + - - - - - - - diff --git a/doc/api/timers.markdown b/doc/api/timers.markdown index 8395ac6779b..7ba209e5ee9 100644 --- a/doc/api/timers.markdown +++ b/doc/api/timers.markdown @@ -8,7 +8,7 @@ this module in order to use them. ## setTimeout(callback, delay, [arg], [...]) To schedule execution of a one-time `callback` after `delay` milliseconds. Returns a -`timeoutId` for possible use with `clearTimeout()`. Optionally you can +`timeoutObject` for possible use with `clearTimeout()`. Optionally you can also pass arguments to the callback. It is important to note that your callback will probably not be called in exactly @@ -16,17 +16,17 @@ It is important to note that your callback will probably not be called in exactl the callback will fire, nor of the ordering things will fire in. The callback will be called as close as possible to the time specified. -## clearTimeout(timeoutId) +## clearTimeout(timeoutObject) Prevents a timeout from triggering. ## setInterval(callback, delay, [arg], [...]) To schedule the repeated execution of `callback` every `delay` milliseconds. -Returns a `intervalId` for possible use with `clearInterval()`. Optionally +Returns a `intervalObject` for possible use with `clearInterval()`. Optionally you can also pass arguments to the callback. -## clearInterval(intervalId) +## clearInterval(intervalObject) Stops a interval from triggering. @@ -51,7 +51,7 @@ request the timer hold the program open. If the timer is already `ref`d calling To schedule the "immediate" execution of `callback` after I/O events callbacks and before `setTimeout` and `setInterval` . Returns an -`immediateId` for possible use with `clearImmediate()`. Optionally you +`immediateObject` for possible use with `clearImmediate()`. Optionally you can also pass arguments to the callback. Callbacks for immediates are queued in the order in which they were created. @@ -59,6 +59,6 @@ The entire callback queue is processed every event loop iteration. If you queue an immediate from a inside an executing callback that immediate won't fire until the next event loop iteration. -## clearImmediate(immediateId) +## clearImmediate(immediateObject) Stops an immediate from triggering. diff --git a/doc/blog.html b/doc/blog.html deleted file mode 100644 index f6d9cf805b1..00000000000 --- a/doc/blog.html +++ /dev/null @@ -1,241 +0,0 @@ - - - - - - - - - - <%= title || "Node.js Blog" %> - - - - -
- -
- -
-
- - - -
- -
-

<%- title %>

- <% if (typeof post !== 'undefined') { - // just one post on this page - %> -

<%- - post.date.toUTCString().replace(/ GMT$/, '') + ' UTC' + - (post.author ? ' - ' + post.author : '') + - (post.category ? ' - ' + - post.category + '' : '') - %>

- - <%- post.content %> - -

Please post feedback and comments on - the Node.JS - user mailing list.
- Please post bugs and feature requests on - the Node.JS - github repository.

- - <% - if (post.next || post.prev) { - if (post.prev) { - %>

← <%= - post.prev.title - %>

- <% - } - if (post.next) { - %>

<%= - post.next.title - %> →

- <% - } - } - } else { // not single post page - if (paginated && total > 1 ) { - if (page > 0) { - // add 1 to all of the displayed numbers, because - // humans are not zero-indexed like they ought to be. - %> -

- ← Page <%- page %> -

- <% - } - if (page < total - 1) { %> -

- Page <%- page + 2 %> → -

- <% - } - } - - posts.forEach(function(post) { - %> -
-

- -

<%- - post.date.toUTCString().replace(/ GMT$/, '') + ' UTC' + - (post.author ? ' - ' + post.author : '') + - (post.category ? ' - ' + - post.category + '' : '') - %>

- - <%- post.content %> -
- <% - }); - - if (paginated && total > 1 ) { - if (page > 0) { - // add 1 to all of the displayed numbers, because - // humans are not zero-indexed like they ought to be. - %> -

- ← Page <%- page %> -

- <% - } - if (page < total - 1) { %> -

- Page <%- page + 2 %> → -

- <% - } - } // pagination - } // not a single post - %> -
-
- - - - - - - - - diff --git a/doc/blog/README.md b/doc/blog/README.md deleted file mode 100644 index 24fba493440..00000000000 --- a/doc/blog/README.md +++ /dev/null @@ -1,28 +0,0 @@ -title: README.md -status: private - -# How This Blog Works - -Each `.md` file in this folder structure is a blog post. It has a -few headers and a markdown body. (HTML is allowed in the body as well.) - -The relevant headers are: - -1. title -2. author -3. status: Only posts with a status of "publish" are published. -4. category: The "release" category is treated a bit specially. -5. version: Only relevant for "release" category. -6. date -7. slug: The bit that goes on the url. Must be unique, will be - generated from the title and date if missing. - -Posts in the "release" category are only shown in the main lists when -they are the most recent release for that version family. The stable -branch supersedes its unstable counterpart, so the presence of a `0.8.2` -release notice will cause `0.7.10` to be hidden, but `0.6.19` would -be unaffected. - -The folder structure in the blog source does not matter. Organize files -here however makes sense. The metadata will be sorted out in the build -later. diff --git a/doc/blog/Uncategorized/an-easy-way-to-build-scalable-network-programs.md b/doc/blog/Uncategorized/an-easy-way-to-build-scalable-network-programs.md deleted file mode 100644 index dc0980b4b83..00000000000 --- a/doc/blog/Uncategorized/an-easy-way-to-build-scalable-network-programs.md +++ /dev/null @@ -1,16 +0,0 @@ -title: An Easy Way to Build Scalable Network Programs -author: ryandahl -date: Tue Oct 04 2011 15:39:56 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: an-easy-way-to-build-scalable-network-programs - -Suppose you're writing a web server which does video encoding on each file upload. Video encoding is very much compute bound. Some recent blog posts suggest that Node.js would fail miserably at this. - -Using Node does not mean that you have to write a video encoding algorithm in JavaScript (a language without even 64 bit integers) and crunch away in the main server event loop. The suggested approach is to separate the I/O bound task of receiving uploads and serving downloads from the compute bound task of video encoding. In the case of video encoding this is accomplished by forking out to ffmpeg. Node provides advanced means of asynchronously controlling subprocesses for work like this. - -It has also been suggested that Node does not take advantage of multicore machines. Node has long supported load-balancing connections over multiple processes in just a few lines of code - in this way a Node server will use the available cores. In coming releases we'll make it even easier: just pass --balance on the command line and Node will manage the cluster of processes. - -Node has a clear purpose: provide an easy way to build scalable network programs. It is not a tool for every problem. Do not write a ray tracer with Node. Do not write a web browser with Node. Do however reach for Node if tasked with writing a DNS server, DHCP server, or even a video encoding server. - -By relying on the kernel to schedule and preempt computationally expensive tasks and to load balance incoming connections, Node appears less magical than server platforms that employ userland scheduling. So far, our focus on simplicity and transparency has paid off: the number of success stories from developers and corporations who are adopting the technology continues to grow. diff --git a/doc/blog/Uncategorized/bnoordhuis-departure.md b/doc/blog/Uncategorized/bnoordhuis-departure.md deleted file mode 100644 index e49baf7a675..00000000000 --- a/doc/blog/Uncategorized/bnoordhuis-departure.md +++ /dev/null @@ -1,17 +0,0 @@ -title: Ben Noordhuis's Departure -date: Tue Dec 3 14:13:57 PST 2013 -slug: bnoordhuis-departure - -As of this past weekend, Ben Noordhuis has decided to step away from -Node.js and libuv, and is no longer acting as a core committer. - -Ben has done a tremendous amount of great work in the past. We're sad -to lose the benefit of his continued hard work and expertise, and -extremely grateful for what he has added to Node.js and libuv over the -years. - -Many of you already have expressed your opinion regarding recent -drama, and I'd like to ask that you please respect our wishes to let -this issue rest, so that we can all focus on the road forward. - -Thanks. diff --git a/doc/blog/Uncategorized/development-environment.md b/doc/blog/Uncategorized/development-environment.md deleted file mode 100644 index e6015e04cd1..00000000000 --- a/doc/blog/Uncategorized/development-environment.md +++ /dev/null @@ -1,25 +0,0 @@ -title: Development Environment -author: ryandahl -date: Mon Apr 04 2011 20:16:27 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: development-environment - -If you're compiling a software package because you need a particular version (e.g. the latest), then it requires a little bit more maintenance than using a package manager like dpkg. Software that you compile yourself should *not* go into /usr, it should go into your home directory. This is part of being a software developer. - -One way of doing this is to install everything into $HOME/local/$PACKAGE. Here is how I install node on my machine:
./configure --prefix=$HOME/local/node-v0.4.5 && make install
- -To have my paths automatically set I put this inside my $HOME/.zshrc:
PATH="$HOME/local/bin:/opt/local/bin:/usr/bin:/sbin:/bin"
-LD_LIBRARY_PATH="/opt/local/lib:/usr/local/lib:/usr/lib"
-for i in $HOME/local/*; do
-  [ -d $i/bin ] && PATH="${i}/bin:${PATH}"
-  [ -d $i/sbin ] && PATH="${i}/sbin:${PATH}"
-  [ -d $i/include ] && CPATH="${i}/include:${CPATH}"
-  [ -d $i/lib ] && LD_LIBRARY_PATH="${i}/lib:${LD_LIBRARY_PATH}"
-  [ -d $i/lib/pkgconfig ] && PKG_CONFIG_PATH="${i}/lib/pkgconfig:${PKG_CONFIG_PATH}"
-  [ -d $i/share/man ] && MANPATH="${i}/share/man:${MANPATH}"
-done
- -Node is under sufficiently rapid development that everyone should be compiling it themselves. A corollary of this is that npm (which should be installed alongside Node) does not require root to install packages. - -CPAN and RubyGems have blurred the lines between development tools and system package managers. With npm we wish to draw a clear line: it is not a system package manager. It is not for installing firefox or ffmpeg or OpenSSL; it is for rapidly downloading, building, and setting up Node packages. npm is a development tool. When a program written in Node becomes sufficiently mature it should be distributed as a tarball, .deb, .rpm, or other package system. It should not be distributed to end users with npm. diff --git a/doc/blog/Uncategorized/evolving-the-node-js-brand.md b/doc/blog/Uncategorized/evolving-the-node-js-brand.md deleted file mode 100644 index e631a227027..00000000000 --- a/doc/blog/Uncategorized/evolving-the-node-js-brand.md +++ /dev/null @@ -1,34 +0,0 @@ -title: Evolving the Node.js Brand -author: Emily Tanaka-Delgado -date: Mon Jul 11 2011 12:02:45 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: evolving-the-node-js-brand - -To echo Node’s evolutionary nature, we have refreshed the identity to help mark an exciting time for developers, businesses and users who benefit from the pioneering technology. - -Building a brand - -We began exploring elements to express Node.js and jettisoned preconceived notions about what we thought Node should look like, and focused on what Node is: kinetic,connectedscalablemodularmechanical and organic. Working with designer Chris Glass, our explorations emphasized Node's dynamism and formed a visual language based on structure, relationships and interconnectedness. - - - -Inspired by process visualization, we discovered pattern, form, and by relief, the hex shape. The angled infrastructure encourages energy to move through the letterforms. - - - -This language can expand into the organic network topography of Node or distill down into a single hex connection point. - -This scaling represents the dynamic nature of Node in a simple, distinct manner. - - - -We look forward to exploring this visual language as the technology charges into a very promising future. - - - -We hope you'll have fun using it. - -To download the new logo, visit nodejs.org/logos. - - diff --git a/doc/blog/Uncategorized/growing-up.md b/doc/blog/Uncategorized/growing-up.md deleted file mode 100644 index 10e1d58d006..00000000000 --- a/doc/blog/Uncategorized/growing-up.md +++ /dev/null @@ -1,12 +0,0 @@ -title: Growing up -author: ryandahl -date: Thu Dec 15 2011 11:59:15 GMT-0800 (PST) -status: publish -category: Uncategorized -slug: growing-up - -This week Microsoft announced support for Node in Windows Azure, their cloud computing platform. For the Node core team and the community, this is an important milestone. We've worked hard over the past six months reworking Node's machinery to support IO completion ports and Visual Studio to provide a good native port to Windows. The overarching goal of the port was to expand our user base to the largest number of developers. Happily, this has paid off in the form of being a first class citizen on Azure. Many users who would have never used Node as a pure unix tool are now up and running on the Windows platform. More users translates into a deeper and better ecosystem of modules, which makes for a better experience for everyone. - -We also redesigned our website - something that we've put off for a long time because we felt that Node was too nascent to dedicate marketing to it. But now that we have binary distributions for Macintosh and Windows, have bundled npm, and are serving millions of users at various companies, we felt ready to indulge in a new website and share of a few of our success stories on the home page. - -Work is on-going. We continue to improve the software, making performance improvements and adding isolate support, but Node is growing up. diff --git a/doc/blog/Uncategorized/jobs-nodejs-org.md b/doc/blog/Uncategorized/jobs-nodejs-org.md deleted file mode 100644 index fe20efe911a..00000000000 --- a/doc/blog/Uncategorized/jobs-nodejs-org.md +++ /dev/null @@ -1,14 +0,0 @@ -title: jobs.nodejs.org -author: ryandahl -date: Thu Mar 24 2011 23:05:22 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: jobs-nodejs-org - -We are starting an official jobs board for Node. There are two goals for this - -1. Promote the small emerging economy around this platform by having a central space for employers to find Node programmers. - -2. Make some money. We work hard to build this platform and taking a small tax for job posts seems a like reasonable "tip jar". - -jobs.nodejs.org diff --git a/doc/blog/Uncategorized/ldapjs-a-reprise-of-ldap.md b/doc/blog/Uncategorized/ldapjs-a-reprise-of-ldap.md deleted file mode 100644 index 7174e2b7cc6..00000000000 --- a/doc/blog/Uncategorized/ldapjs-a-reprise-of-ldap.md +++ /dev/null @@ -1,84 +0,0 @@ -title: ldapjs: A reprise of LDAP -author: mcavage -date: Thu Sep 08 2011 14:25:43 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: ldapjs-a-reprise-of-ldap - -This post has been about 10 years in the making. My first job out of college was at IBM working on the Tivoli Directory Server, and at the time I had a preconceived notion that working on anything related to Internet RFCs was about as hot as you could get. I spent a lot of time back then getting "down and dirty" with everything about LDAP: the protocol, performance, storage engines, indexing and querying, caching, customer use cases and patterns, general network server patterns, etc. Basically, I soaked up as much as I possibly could while I was there. On top of that, I listened to all the "gray beards" tell me about the history of LDAP, which was a bizarre marriage of telecommunications conglomerates and graduate students. The point of this blog post is to give you a crash course in LDAP, and explain what makes ldapjs different. Allow me to be the gray beard for a bit... -

What is LDAP and where did it come from?

- -Directory services were largely pioneered by the telecommunications companies (e.g., AT&T) to allow fast information retrieval of all the crap you'd expect would be in a telephone book and directory. That is, given a name, or an address, or an area code, or a number, or a foo support looking up customer records, billing information, routing information, etc. The efforts of several telcos came to exist in the X.500 standard(s). An X.500 directory is one of the most complicated beasts you can possibly imagine, but on a high note, there's -probably not a thing you can imagine in a directory service that wasn't thought of in there. It is literally the kitchen sink. Oh, and it doesn't run over IP (it's actually on the OSI model). - -Several years after X.500 had been deployed (at telcos, academic institutions, etc.), it became clear that the Internet was "for real." LDAP, the "Lightweight Directory Access Protocol," was invented to act purely as an IP-accessible gateway to an X.500 directory. - -At some point in the early 90's, a graduate student at the University of Michigan (with some help) cooked up the "grandfather" implementation of the LDAP protocol, which wasn't actually a "gateway," but rather a stand-alone implementation of LDAP. Said implementation, like many things at the time, was a process-per-connection concurrency model, and had "backends" (aka storage engine) for the file system and the Unix DB API. At some point the Berkeley Database (BDB) was put in, and still remains the de facto storage engine for most LDAP directories. - -Ok, so some a graduate student at UM wrote an LDAP server that wasn't a gateway. So what? Well, that UM code base turns out to be the thing that pretty much every vendor did a source license for. Those graduate students went off to Netscape later in the 90's, and largely dominated the market of LDAP middleware until Active Directory came along many years later (as far as I know, Active Directory is "from scratch", since while it's "almost" LDAP, it's different in a lot of ways). That Netscape code base was further bought and sold over the years to iPlanet, Sun Microsystems, and Red Hat (I'm probably missing somebody in that chain). It now lives in the Fedora umbrella as '389 Directory Server.' Probably the most popular fork of that code base now is OpenLDAP. - -IBM did the same thing, and the Directory Server I worked on was a fork of the UM code too, but it heavily diverged from the Netscape branches. The divergence was primarily due to: (1) backing to DB2 as opposed to BDB, and (2) needing to run on IBM's big iron like OS/400 and Z series mainframes. - -Macro point is that there have actually been very few "fresh" implementations of LDAP, and it gets a pretty bad reputation because at the end of the day you've got 20 years of "bolt-ons" to grad student code. Oh, and it was born out of ginormous telcos, so of course the protocol is overly complex. - -That said, while there certainly is some wacky stuff in the LDAP protocol itself, it really suffered from poor and buggy implementations more than the fact that LDAP itself was fundamentally flawed. As engine yard pointed out a few years back, you can think of LDAP as the original NoSQL store. -

LDAP: The Good Parts

- -So what's awesome about LDAP? Since it's a directory system it maintains a hierarchy of your data, which as an information management pattern aligns -with _a lot_ of use case (the quintessential example is white pages for people in your company, but subscriptions to SaaS applications, "host groups" -for tracking machines/instances, physical goods tracking, etc., all have use cases that fit that organization scheme). For example, presumably at your job -you have a "reporting chain." Let's say a given record in LDAP (I'll use myself as a guinea pig here) looks like: -
    firstName: Mark
-    lastName: Cavage
-    city: Seattle
-    uid: markc
-    state: Washington
-    mail: mcavagegmailcom
-    phone: (206) 555-1212
-    title: Software Engineer
-    department: 123456
-    objectclass: joyentPerson
-The record for me would live under the tree of engineers I report to (and as an example some other popular engineers under said vice president) would look like: -
                   uid=david
-                    /
-               uid=bryan
-            /      |      \
-      uid=markc  uid=ryah  uid=isaacs
-Ok, so we've got a tree. It's not tremendously different from your filesystem, but how do we find people? LDAP has a rich search filter syntax that makes a lot of sense for key/value data (far more than tacking Map Reduce jobs on does, imo), and all search queries take a "start point" in the tree. Here's an example: let's say I wanted to find all "Software Engineers" in the entire company, a filter would look like: -
     (title="Software Engineer")
-And I'd just start my search from 'uid=david' in the example above. Let's say I wanted to find all software engineers who worked in Seattle: -
     (&(title="Software Engineer")(city=Seattle))
-I could keep going, but the gist is that LDAP has "full" boolean predicate logic, wildcard filters, etc. It's really rich. - -Oh, and on top of the technical merits, better or worse, it's an established standard for both administrators and applications (i.e., most "shipped" intranet software has either a local user repository or the ability to leverage an LDAP server somewhere). So there's a lot of compelling reasons to look at leveraging LDAP. -

ldapjs: Why do I care?

- -As I said earlier, I spent a lot of time at IBM observing how customers used LDAP, and the real items I took away from that experience were: -
    -
  • LDAP implementations have suffered a lot from never having been designed from the ground up for a large number of concurrent connections with asynchronous operations.
  • -
  • There are use cases for LDAP that just don't always fit the traditional "here's my server and storage engine" model. A lot of simple customer use cases wanted an LDAP access point, but not be forced into taking the heavy backends that came with it (they wanted the original gateway model!). There was an entire "sub" industry for this known as "meta directories" back in the late 90's and early 2000's.
  • -
  • Replication was always a sticking point. LDAP vendors all tried to offer a big multi-master, multi-site replication model. It was a lot of "bolt-on" complexity, done before the CAP theorem was written, and certainly before it was accepted as "truth."
  • -
  • Nobody uses all of the protocol. In fact, 20% of the features solve 80% of the use cases (I'm making that number up, but you get the idea).
  • -
- -For all the good parts of LDAP, those are really damned big failing points, and even I eventually abandoned LDAP for the greener pastures of NoSQL somewhere -along the way. But it always nagged at me that LDAP didn't get it's due because of a lot of implementation problems (to be clear, if I could, I'd change some -aspects of the protocol itself too, but that's a lot harder). - -Well, in the last year, I went to work for Joyent, and like everyone else, we have several use problems that are classic directory service problems. If you break down the list I outlined above: -
    -
  • Connection-oriented and asynchronous: Holy smokes batman, node.js is a completely kick-ass event-driven asynchronous server platform that manages connections like a boss. Check!
  • -
  • Lots of use cases: Yeah, we've got some. Man, the sinatra/express paradigm is so easy to slap over anything. How about we just do that and leave as many use cases open as we can. Check!
  • -
  • Replication is hard. CAP is right: There are a lot of distributed databases out vying to solve exactly this problem. At Joyent we went with Riak. Check!
  • -
  • Don't need all of the protocol: I'm lazy. Let's just skip the stupid things most people don't need. Check!
  • -
- -So that's the crux of ldapjs right there. Giving you the ability to put LDAP back into your application while nailing those 4 fundamental problems that plague most existing LDAP deployments. - -The obvious question is how it turned out, and the answer is, honestly, better than I thought it would. When I set out to do this, I actually assumed I'd be shipping a much smaller percentage of the RFC than is there. There's actually about 95% of the core RFC implemented. I wasn't sure if the marriage of this protocol to node/JavaScript would work out, but if you've used express ever, this should be _really_ familiar. And I tried to make it as natural as possible to use "pure" JavaScript objects, rather than requiring the developer to understand ASN.1 (the binary wire protocol) or the LDAP RFC in detail (this one mostly worked out; ldap_modify is still kind of a PITA). - -Within 24 hours of releasing ldapjs on Twitter, there was an implementation of an address book that works with Thunderbird/Evolution, by the end of that weekend there was some slick integration with CouchDB, and ldapjs even got used in one of the node knockout apps. Off to a pretty good start! - -

The Road Ahead

- -Hopefully you've been motivated to learn a little bit more about LDAP and try out ldapjs. The best place to start is probably the guide. After that you'll probably need to pick up a book from back in the day. ldapjs itself is still in its infancy; there's quite a bit of room to add some slick client-side logic (e.g., connection pools, automatic reconnects), easy to use schema validation, backends, etc. By the time this post is live, there will be experimental dtrace support if you're running on Mac OS X or preferably Joyent's SmartOS (shameless plug). And that nagging percentage of the protocol I didn't do will get filled in over time I suspect. If you've got an interest in any of this, send me some pull requests, but most importantly, I just want to see LDAP not just be a skeleton in the closet and get used in places where you should be using it. So get out there and write you some LDAP. diff --git a/doc/blog/Uncategorized/libuv-status-report.md b/doc/blog/Uncategorized/libuv-status-report.md deleted file mode 100644 index 37c8227fe25..00000000000 --- a/doc/blog/Uncategorized/libuv-status-report.md +++ /dev/null @@ -1,45 +0,0 @@ -title: libuv status report -author: ryandahl -date: Fri Sep 23 2011 12:45:50 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: libuv-status-report - -We announced back in July that with Microsoft's support Joyent would be porting Node to Windows. This effort is ongoing but I thought it would be nice to make a status report post about the new platform library libuv which has resulted from porting Node to Windows. - -libuv's purpose is to abstract platform-dependent code in Node into one place where it can be tested for correctness and performance before bindings to V8 are added. Since Node is totally non-blocking, libuv turns out to be a rather useful library itself: a BSD-licensed, minimal, high-performance, cross-platform networking library. - -We attempt to not reinvent the wheel where possible. The entire Unix backend sits heavily on Marc Lehmann's beautiful libraries libev and libeio. For DNS we integrated with Daniel Stenberg's C-Ares. For cross-platform build-system support we're relying on Chrome's GYP meta-build system. - -The current implemented features are: -
    -
  • Non-blocking TCP sockets (using IOCP on Windows)
  • -
  • Non-blocking named pipes
  • -
  • UDP
  • -
  • Timers
  • -
  • Child process spawning
  • -
  • Asynchronous DNS via c-ares or uv_getaddrinfo.
  • -
  • Asynchronous file system APIs uv_fs_*
  • -
  • High resolution time uv_hrtime
  • -
  • Current executable path look up uv_exepath
  • -
  • Thread pool scheduling uv_queue_work
  • -
-The features we are working on still are -
    -
  • File system events (Currently supports inotify, ReadDirectoryChangesW and will support kqueue and event ports in the near future.) uv_fs_event_t
  • -
  • VT100 TTY uv_tty_t
  • -
  • Socket sharing between processes uv_ipc_t (planned API)
  • -
-For complete documentation see the header file: include/uv.h. There are a number of tests in the test directory which demonstrate the API. - -libuv supports Microsoft Windows operating systems since Windows XP SP2. It can be built with either Visual Studio or MinGW. Solaris 121 and later using GCC toolchain. Linux 2.6 or better using the GCC toolchain. Macinotsh Darwin using the GCC or XCode toolchain. It is known to work on the BSDs but we do not check the build regularly. - -In addition to Node v0.5, a number of projects have begun to use libuv: - -We hope to see more people contributing and using libuv in the future! diff --git a/doc/blog/Uncategorized/node-meetup-this-thursday.md b/doc/blog/Uncategorized/node-meetup-this-thursday.md deleted file mode 100644 index 6f38e99988c..00000000000 --- a/doc/blog/Uncategorized/node-meetup-this-thursday.md +++ /dev/null @@ -1,11 +0,0 @@ -title: Node Meetup this Thursday -author: ryandahl -date: Tue Aug 02 2011 21:37:02 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: node-meetup-this-thursday - -http://nodejs.org/meetup/ -http://nodemeetup.eventbrite.com/ - -Three companies will describe their distributed Node applications. Sign up soon, space is limited! diff --git a/doc/blog/Uncategorized/node-office-hours-cut-short.md b/doc/blog/Uncategorized/node-office-hours-cut-short.md deleted file mode 100644 index 743394460cc..00000000000 --- a/doc/blog/Uncategorized/node-office-hours-cut-short.md +++ /dev/null @@ -1,12 +0,0 @@ -title: Node Office Hours Cut Short -author: ryandahl -date: Thu Apr 28 2011 09:04:35 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: node-office-hours-cut-short - -This week office hours are only from 4pm to 6pm. Isaac will be in the Joyent office in SF - everyone else is out of town. Sign up at http://nodeworkup.eventbrite.com/ if you would like to come. - -The week after, Thursday May 5th, we will all be at NodeConf in Portland. - -Normal office hours resume Thursday May 12th. diff --git a/doc/blog/Uncategorized/office-hours.md b/doc/blog/Uncategorized/office-hours.md deleted file mode 100644 index fc2769095f4..00000000000 --- a/doc/blog/Uncategorized/office-hours.md +++ /dev/null @@ -1,12 +0,0 @@ -title: Office Hours -author: ryandahl -date: Wed Mar 23 2011 21:42:47 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: office-hours - -Starting next Thursday Isaac, Tom, and I will be holding weekly office hours at Joyent HQ in San Francisco. Office hours are meant to be subdued working time - there are no talks and no alcohol. Bring your bugs or just come and hack with us. - -Our building requires that everyone attending be on a list so you must sign up at Event Brite. - -We start at 4p and end promptly at 8p. diff --git a/doc/blog/Uncategorized/porting-node-to-windows-with-microsoft%e2%80%99s-help.md b/doc/blog/Uncategorized/porting-node-to-windows-with-microsoft%e2%80%99s-help.md deleted file mode 100644 index cea3e880272..00000000000 --- a/doc/blog/Uncategorized/porting-node-to-windows-with-microsoft%e2%80%99s-help.md +++ /dev/null @@ -1,12 +0,0 @@ -title: Porting Node to Windows With Microsoft’s Help -author: ryandahl -date: Thu Jun 23 2011 15:22:58 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: porting-node-to-windows-with-microsoft%e2%80%99s-help - -I'm pleased to announce that Microsoft is partnering with Joyent in formally contributing resources towards porting Node to Windows. As you may have heard in a talk we gave earlier this year, we have started the undertaking of a native port to Windows - targeting the high-performance IOCP API. - -This requires a rather large modification of the core structure, and we're very happy to have official guidance and engineering resources from Microsoft. Rackspace is also contributing Bert Belder's time to this undertaking. - -The result will be an official binary node.exe releases on nodejs.org, which will work on Windows Azure and other Windows versions as far back as Server 2003. diff --git a/doc/blog/Uncategorized/profiling-node-js.md b/doc/blog/Uncategorized/profiling-node-js.md deleted file mode 100644 index 6f041af3b9f..00000000000 --- a/doc/blog/Uncategorized/profiling-node-js.md +++ /dev/null @@ -1,60 +0,0 @@ -title: Profiling Node.js -author: Dave Pacheco -date: Wed Apr 25 2012 13:48:58 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: profiling-node-js - -It's incredibly easy to visualize where your Node program spends its time using DTrace and node-stackvis (a Node port of Brendan Gregg's FlameGraph tool): - -
    -
  1. Run your Node.js program as usual.
  2. -
  3. In another terminal, run: -
    -$ dtrace -n 'profile-97/execname == "node" && arg1/{
    -    @[jstack(150, 8000)] = count(); } tick-60s { exit(0); }' > stacks.out
    - This will sample about 100 times per second for 60 seconds and emit results to stacks.out. Note that this will sample all running programs called "node". If you want a specific process, replace execname == "node" with pid == 12345 (the process id). -
  4. -
  5. Use the "stackvis" tool to transform this directly into a flame graph. First, install it: -
    $ npm install -g stackvis
    - then use stackvis to convert the DTrace output to a flamegraph: -
    $ stackvis dtrace flamegraph-svg < stacks.out > stacks.svg
    -
  6. -
  7. Open stacks.svg in your favorite browser.
  8. -
- -You'll be looking at something like this: - - - -This is a visualization of all of the profiled call stacks. This example is from the "hello world" HTTP server on the Node.js home page under load. Start at the bottom, where you have "main", which is present in most Node stacks because Node spends most on-CPU time in the main thread. Above each row, you have the functions called by the frame beneath it. As you move up, you'll see actual JavaScript function names. The boxes in each row are not in chronological order, but their width indicates how much time was spent there. When you hover over each box, you can see exactly what percentage of time is spent in each function. This lets you see at a glance where your program spends its time. - -That's the summary. There are a few prerequisites: - -
    -
  • You must gather data on a system that supports DTrace with the Node.js ustack helper. For now, this pretty much means illumos-based systems like SmartOS, including the Joyent Cloud. MacOS users: OS X supports DTrace, but not ustack helpers. The way to get this changed is to contact your Apple developer liaison (if you're lucky enough to have one) or file a bug report at bugreport.apple.com. I'd suggest referencing existing bugs 5273057 and 11206497. More bugs filed (even if closed as dups) show more interest and make it more likely Apple will choose to fix this.
  • -
  • You must be on 32-bit Node.js 0.6.7 or later, built --with-dtrace. The helper doesn't work with 64-bit Node yet. On illumos (including SmartOS), development releases (the 0.7.x train) include DTrace support by default.
  • -
- -There are a few other notes: - -
    -
  • You can absolutely profile apps in production, not just development, since compiling with DTrace support has very minimal overhead. You can start and stop profiling without restarting your program.
  • -
  • You may want to run the stacks.out output through c++filt to demangle C++ symbols. Be sure to use the c++filt that came with the compiler you used to build Node. For example: -
    c++filt < stacks.out > demangled.out
    - then you can use demangled.out to create the flamegraph. -
  • -
  • If you want, you can filter stacks containing a particular function. The best way to do this is to first collapse the original DTrace output, then grep out what you want: -
    -$ stackvis dtrace collapsed < stacks.out | grep SomeFunction > collapsed.out
    -$ stackvis collapsed flamegraph-svg < collapsed.out > stacks.svg
    -
  • -
  • If you've used Brendan's FlameGraph tools, you'll notice the coloring is a little different in the above flamegraph. I ported his tools to Node first so I could incorporate it more easily into other Node programs, but I've also been playing with different coloring options. The current default uses hue to denote stack depth and saturation to indicate time spent. (These are also indicated by position and size.) Other ideas include coloring by module (so V8, JavaScript, libc, etc. show up as different colors.) -
  • -
- -For more on the underlying pieces, see my previous post on Node.js profiling and Brendan's post on Flame Graphs. - -
- -Dave Pacheco blogs at dtrace.org diff --git a/doc/blog/Uncategorized/some-new-node-projects.md b/doc/blog/Uncategorized/some-new-node-projects.md deleted file mode 100644 index 2590f547a0c..00000000000 --- a/doc/blog/Uncategorized/some-new-node-projects.md +++ /dev/null @@ -1,13 +0,0 @@ -title: Some New Node Projects -author: ryandahl -date: Mon Aug 29 2011 08:30:41 GMT-0700 (PDT) -status: publish -category: Uncategorized -slug: some-new-node-projects - -