Skip to content
This repository has been archived by the owner on Dec 17, 2018. It is now read-only.

Commit

Permalink
initial release
Browse files Browse the repository at this point in the history
  • Loading branch information
radekstepan committed Jun 9, 2013
0 parents commit 8fab22b
Show file tree
Hide file tree
Showing 19 changed files with 6,271 additions and 0 deletions.
5 changes: 5 additions & 0 deletions .gitignore
@@ -0,0 +1,5 @@
*.log
node_modules/
config.json
example/downloads/
example/nzbs/
26 changes: 26 additions & 0 deletions README.md
@@ -0,0 +1,26 @@
# nzb-grabber-js

Download/grab binary posts from NNTP (Usenet) servers using Node.js.

```bash
$ npm install nzb-grabber-js
```

Pass an NZB file buffer which will be parsed and all files and their chunks downloaded. Chunks arrive in order so you can append them to an existing file. When all files are downloaded `done` is set.

```coffee-script
NzbGrabber = require 'nzb-grabber-js'

client = new NzbGrabber
'host': 'news.usenetserver.com'
'port': 119,
'user': 'username'
'pass': 'password'
'conn': 4

client.grab nzbFile, (err, filename, chunk, done) ->
fs.appendFile './downloads/' + filename, chunk, (err) ->
console.log 'All files downloaded' if done
```

Have a look into `./example/watch.coffee`.
29 changes: 29 additions & 0 deletions example/watch.coffee
@@ -0,0 +1,29 @@
#!/usr/bin/env coffee
fs = require 'fs'
watchr = require 'watchr'
log = require 'node-logging'

NzbGrabber = require '../src/grabber.coffee'

# Init.
grabber = new NzbGrabber require '../config.json'

# Watching a directory for new nzb files.
watchr.watch
'path': __dirname + '/nzbs'
'listeners':
'error': log.err
'change': (type, path, stat) ->
if type is 'create' and not stat.isDirectory()
if path.match /\.nzb$/i
name = path.split('/').pop()
fs.readFile path, 'utf-8', (err, nzb) ->
log.inf 'Job ' + name.bold + ' queued'
# Grab them.
grabber.grab nzb, (err, filename, chunk, done) ->
return log.bad err if err

# And write them (they arrive in order).
fs.appendFile __dirname + '/downloads/' + filename, chunk, (err) ->
return log.bad err if err
log.inf 'Job ' + name.bold + ' done \u2713' if done
3 changes: 3 additions & 0 deletions index.js
@@ -0,0 +1,3 @@
#!/usr/bin/env node
require('coffee-script');
module.exports = require('./src/grabber.coffee');
23 changes: 23 additions & 0 deletions package.json
@@ -0,0 +1,23 @@
{
"name": "nzb-grabber-js",
"version": "0.1.0",
"dependencies": {
"coffee-script": "1.6.2",
"async": "~0.2.9",
"buffer-crc32": "~0.2.1",
"buffertools": "1.1.1",
"xml2js": "~0.2.7",
"yenc": "~0.9.1",
"node-logging": "0.1.6",
"watchr": "2.4.3"
},
"devDependencies": {
"mocha": "1.8.1"
},
"scripts": {
"test": "./node_modules/.bin/mocha --compilers coffee:coffee-script --reporter spec --ui exports --timeout 5000 --slow 2000 --bail"
},
"author": "Radek <dev@radekstepan.com>",
"readmeFilename": "README.md",
"main": "./index.js"
}
121 changes: 121 additions & 0 deletions src/grabber.coffee
@@ -0,0 +1,121 @@
#!/usr/bin/env coffee
async = require 'async'
log = require 'node-logging'
NNTPWorker = require './nntp.coffee'
nzb = require './nzb.coffee'
yenc = require './yenc.coffee'
buffertools = require 'buffertools'

class NzbGrabber

# These are the workers with the head being the first available one.
workers: []

# Master queue of chunks to grab.
queue: null

# Just save the opts.
constructor: (@opts) ->
throw 'No conections' if !@opts.conn

# Create workers of n concurrent connections.
workers = ( new NNTPWorker(@opts) for i in [0...@opts.conn] )

# Create an async queue running a chunk job.
@queue = async.queue ({ group, article }, cb) ->
# Get the first ready worker.
for worker in workers
unless worker.state is 'BUSY'
# Over to you monkey.
return worker.getArticle group, article, cb

# Concurrency of n.
, @opts.conn

###
Grab files specified in the input NZB package.
@param {String} NZB file as a string from `fs.readFile`
@param {Function} A callback called on an error or on a chunk processed.
@param {String} Error
@param {String} Filename as specified in the article received.
@param {Buffer} A decoded chunk for you to deal with.
@param {Boolean} Are we finished with the package?
@return {void}
###
grab: (input, cb) ->
self = @

# Parse the input file.
async.waterfall [ (cb) ->
nzb input, cb

# For each file to download (series).
, (files, cb) ->
# How many files to do still.
todo = files.length

files.forEach (file) ->
# This will be our filename (from the article).
filename = null
# Cache of processed chunks (to preserve order and when we do not have a filename yet).
cache = []
# How many chunks to do still.
chunks = file.length

# For each chunk.
file.forEach (chunk, i) ->
# Schedule to download this chunk in parallel.
# Need to return them in order so as to easily append to the end of files.
self.queue.push chunk, (err, code, buffer) ->
log.inf 'Article ' + chunk.article.bold + ' received'

# If not found...
if err or not buffer
log.err chunk.subject.bold + ' (' + (i + 1) + '/' + file.length + ') missing'
# Create a buffer of size and fill with zeroes.
decoded = (new Buffer(chunk.bytes)).fill 0
else
# yEnc decode (sync, no complaints, errors fixed by par2).
[ filename, decoded ] = yenc buffer

# Push to cache at a position either way.
cache[i] = decoded
chunks -= 1 # one less to do

# Cache it if we do not have a filename yet.
unless filename
# Completely useless file?
if !chunks
cache = null
cb 'Useless file ' + chunk.subject
else
# Call back with an unbroken sequence of chunks from the start.
j = 0
while j <= file.length
# Get the item.
item = cache[j]
# Return if no chunks to return (break in the chain).
return unless item
# Move the index.
j += 1
# Continue if something was here (still unbroken chain).
continue if typeof(item) is 'boolean'

# Logging of the chunk returned.
# Which chunk is this?
seg = ''
if file.length isnt 1 then seg = ' (' + j + '/' + file.length + ')'
# Is the size unexpected?
if file[j - 1].bytes isnt item.length
log.err 'File ' + filename.bold + seg + ' done ' + item.length + ' bytes, expected ' + file[j - 1].bytes + ' bytes'
else
log.inf 'File ' + filename.bold + seg + ' done ' + item.length + ' bytes'

# Done = no more chunks cache and no more files.
cb null, filename, item, !chunks and !(todo -= 1)
# Say this part was already returned.
cache[j - 1] = yes

], cb

module.exports = NzbGrabber

0 comments on commit 8fab22b

Please sign in to comment.