Permalink
Browse files

Converted to CoffeeScript.

  • Loading branch information...
bigeasy committed Sep 20, 2010
1 parent 6540d94 commit 3e9f2885d7258ea19831b023e9d8cfee6fcc963a
Showing with 374 additions and 20 deletions.
  1. +65 −0 Cakefile
  2. 0 lib/{__internal → }/ieee754.js
  3. +6 −0 package.json
  4. +22 −0 packet.idl
  5. +204 −0 src/packet__.coffee
  6. +57 −0 src/pattern.coffee
  7. +17 −17 vows/packet-test.js
  8. +3 −3 vows/pattern-test.js
View
@@ -0,0 +1,65 @@
+fs = require("fs")
+{exec, spawn} = require("child_process")
+path = require("path")
+idl = require("idl")
+
+compile = (sources) ->
+ coffee = spawn "coffee", "-c -o lib".split(/\s/).concat(sources)
+ coffee.stderr.on "data", (buffer) -> puts buffer.toString()
+ coffee.on "exit", (status) -> process.exit(1) if status != 0
+
+currentBranch = (callback) ->
+ branches = ""
+ git = spawn "git", [ "branch" ]
+ git.stdout.on "data", (buffer) -> branches += buffer.toString()
+ git.stderr.on "data", (buffer) -> puts buffer.toString()
+ git.on "exit", (status) ->
+ process.exit(1) if status != 0
+ branch = /\*\s+(.*)/.exec(branches)[1]
+ callback(branch)
+
+task "gitignore", "create a .gitignore for node-ec2 based on git branch", ->
+ currentBranch (branch) ->
+ gitignore = '''
+ .gitignore
+ lib-cov
+ .DS_Store
+ **/.DS_Store
+
+ '''
+
+ if branch is "gh-pages"
+ gitignore += '''
+ lib/packet.js
+ lib/pattern.js
+ '''
+ else if branch is "master"
+ gitignore += '''
+ documentation
+ index.html
+ lib/packet.js
+ lib/pattern.js
+ '''
+ fs.writeFile(".gitignore", gitignore)
+
+task "index", "rebuild the Node IDL landing page.", ->
+ package = JSON.parse fs.readFileSync "package.json", "utf8"
+ console.log(package)
+ idl.generate "#{package.name}.idl", "index.html"
+
+task "docco", "rebuild the CoffeeScript docco documentation.", ->
+ exec "rm -rf documentation && docco src/*.coffee && cp -rf docs documentation && rm -r docs", (err) ->
+ throw err if err
+
+task "compile", "compile the CoffeeScript into JavaScript", ->
+ path.exists "./lib", (exists) ->
+ fs.mkdirSync("./lib", parseInt(755, 8)) if not exists
+ sources = fs.readdirSync("src")
+ sources = "src/" + source for source in sources when source.match(/\.coffee$/)
+ compile sources
+
+task "clean", "rebuild the CoffeeScript docco documentation.", ->
+ currentBranch (branch) ->
+ if branch is "master"
+ exec "rm -rf documentation lib lib/packet.js lib/pattern.js", (err) ->
+ throw err if err
File renamed without changes.
View
@@ -0,0 +1,6 @@
+{ "name": "packet"
+, "version": "0.0.1"
+, "author": "Alan Gutierrez"
+, "directories": { "lib" : "./lib" }
+, "main": "./lib/packet"
+}
View
@@ -0,0 +1,22 @@
+# Node Packet
+
+An evented binary packet and structure parser for Node.js.
+
+## Synopsis
+
+Use tar as an example?
+
+## Installing
+
+## Reference
+
+namespace: packet
+
+Node Packet exports the ec2 namespace, which provides the {{Structure}},
+{{Parser}} and {{Serializer}} classes.
+
+class: Structure
+
+class: Parser
+
+class: Serializer
View
@@ -0,0 +1,204 @@
+parsePattern = require('./pattern').parse
+ieee754 = require('./ieee754')
+
+shiftify = (arrayish, start, end) ->
+ a = []
+ while start < end
+ a.push arrayish[start++]
+ return a
+
+hex = (bytes) ->
+ h = bytes.map (b) ->
+ if b < 10
+ "0" + b.toString(16)
+ else
+ b.toString(16)
+ h.join("")
+
+pack = (pattern, value) ->
+ if pattern.type == "f"
+ if pattern.bits == 32
+ ieee754.toIEEE754Single value
+ else
+ ieee754.toIEEE754Double value
+ else
+ value
+
+unpack = (bytes, pattern) ->
+ if pattern.type == "h"
+ return hex bytes.reverse()
+ else if pattern.type == "f"
+ if pattern.bits == 32
+ ieee754.fromIEEE754Single(bytes)
+ else
+ ieee754.fromIEEE754Double(bytes)
+ else if pattern.signed
+ value = 0
+ if (bytes[bytes.length - 1] & 0x80) == 0x80
+ top = bytes.length - 1
+ for i in [0...top]
+ value += (~bytes[i] & 0xff) * Math.pow(256, i)
+ # ~1 == -2.
+ # To get the two's compliment as a positive value you use ~1 & 0xff == 254.
+ value += (~(bytes[top] & 0x7f) & 0xff & 0x7f) * Math.pow(256, top)
+ value += 1
+ value *= -1
+ return value
+
+instance = (packets) ->
+ machine = null
+ user = shiftify arguments, 1, arguments.length
+
+ mechanize = (definition, index, value) ->
+ reading = arguments.length == 2
+ pattern = definition.pattern[index]
+ little = pattern.endianness == 'l'
+ bytes = pattern.bytes
+ if pattern.arrayed
+ if reading
+ value = []
+ else
+ value = pack pattern, value
+ else if reading
+ value = 0
+ machine =
+ value: value
+ unpack: if pattern.arrayed then unpack else noop
+ definition: definition
+ index: index
+ offset: if little then 0 else bytes - 1
+ increment: if little then 1 else -1
+ terminal: if little then bytes else -1
+ return machine
+
+ clone = ->
+ args = shiftify arguments, 0, arguments.length
+ args.unshift Object.create(packets)
+ return instance.apply null, args
+
+ noop = (value) -> value
+
+ # Like packet, but no ability to define new named patterns.
+ next = ->
+ shiftable = shiftify arguments, 0, arguments.length
+ nameOrPattern = shiftable.shift()
+ if shiftable.length == 0
+ machine = mechanize packets[nameOrPattern], 0
+ else
+ definition =
+ pattern: packets[nameOrPattern] && packets[nameOrPattern].pattern || parsePattern(nameOrPattern)
+ callback: shiftable.shift()
+ machine = mechanize(definition, 0)
+ packet.apply this, arguments
+
+ packet = ->
+ shiftable = shiftify arguments, 0, arguments.length
+ nameOrPattern = shiftable.shift()
+ if shiftable.length == 0
+ machine = mechanize(packets[nameOrPattern], 0)
+ else
+ patternOrCallback = shiftable.shift()
+ if typeof(patternOrCallback) == 'function'
+ definition =
+ pattern: parsePattern(nameOrPattern)
+ callback: patternOrCallback
+ machine = mechanize definition, 0
+ else
+ packets[name] =
+ pattern: parsePattern(pattern)
+ callback: shiftable.shift() || noop
+
+ outgoing = null
+ send = () ->
+ shiftable = shiftify arguments, 0, arguments.length
+ nameOrPattern = shiftable.shift()
+ if typeof shiftable[shiftable.length - 1] == 'function'
+ definition =
+ pattern: parsePattern(nameOrPattern)
+ callback: shiftable.pop()
+ machine = mechanize(definition , 0, shiftable[0])
+ else
+ machine = mechanize(packets[nameOrPattern], 0, shiftable[0])
+ outgoing = shiftable
+
+ write = (buffer, offset, length) ->
+ offset or= 0
+ length or= buffer.length
+ while machine and offset < length
+ pattern = machine.definition.pattern[machine.index]
+ if pattern.arrayed
+ loop
+ buffer[offset] = machine.value[machine.offset]
+ machine.offset += machine.increment
+ bytesWritten++
+ offset++
+ break if machine.offset is machine.terminal
+ return true if offset is length
+ else
+ loop
+ buffer[offset] = Math.floor(machine.value / Math.pow(256, machine.offset)) & 0xff
+ machine.offset += machine.increment
+ bytesWritten++
+ offset++
+ break if machine.offset is machine.terminal
+ return true if offset is length
+ if ++machine.index is machine.definition.pattern.length
+ machine.definition.callback.apply null, [ engine ]
+ machine = null
+ else
+ machine = mechanize machine.definition, machine.index, outgoing[machine.index]
+ true
+
+ fields = []
+ bytesRead = 0
+ bytesWritten = 0
+ reset = () ->
+ bytesRead = 0
+ bytesWritten = 0
+ machine = null
+
+ engine =
+ next: next
+ getBytesRead: ->
+ bytesRead
+ getBytesWritten: -> bytesWritten
+
+ read = (buffer, offset, length) ->
+ offset or= 0
+ length or= buffer.length
+ b
+ while machine != null and offset < length
+ if machine.definition.pattern[machine.index].arrayed
+ loop
+ b = buffer[offset]
+ bytesRead++
+ offset++
+ machine.value[machine.offset] = b
+ machine.offset += machine.increment
+ break if machine.offset is machine.terminal
+ return true if offset is length
+ else
+ loop
+ b = buffer[offset]
+ bytesRead++
+ offset++
+ machine.value += Math.pow(256, machine.offset) * b
+ machine.offset += machine.increment
+ break if machine.offset == machine.terminal
+ return true if offset == length
+ fields.push(machine.unpack(machine.value, machine.definition.pattern[machine.index]))
+ if ++machine.index == machine.definition.pattern.length
+ fields.push(engine)
+ for p in user
+ fields.push(p)
+ machine.definition.callback.apply null, fields
+ machine = null
+ fields.length = 0
+ else
+ machine = mechanize machine.definition, machine.index
+ true
+
+ { clone, packet, reset, send, write, read }
+
+module.exports.create = () ->
+ instance({})
View
@@ -0,0 +1,57 @@
+# This module is separated for isolation during testing. It is meant to be
+# exposed as part of the public API.
+
+# Parse the `pattern` and add it to the list of `fields`.
+#
+# The `pattern` points to the first character of field in the packet pattern.
+# Each recursive call to `field` will pass the remaining unmached part. The
+# `index` is the current position in the string used to report errors, but not
+# to index into the string.
+field = (fields, pattern, index) ->
+
+ # Match a packet pattern.
+ match = /^(-?)([snbl])(\d+)([fha]?)(.*)$/.exec(pattern)
+ if !match
+ throw new Error "invalid pattern at #{index}"
+
+ # Convert the match into an object.
+ f =
+ signed: !!match[1] || match[4] == "f"
+ endianness: if match[2] == 'n' then 'b' else match[2]
+ bits: parseInt(match[3], 10)
+ type: match[4] || 'n'
+
+ # The remainder of the pattern, if any.
+ rest = match[5]
+
+ # Move the character position up to the bit count.
+ index++ if f.signed
+ index += f.endianness.length
+
+ # Check for a valid character
+ if f.bits == 0 or f.bits % 8
+ throw new Error("bits must be divisible by 8 at " + index)
+ if f.type == "f" and !(f.bits == 32 || f.bits == 64)
+ throw Error("floats can only be 32 or 64 bits at " + index)
+
+ # Move the character position up to the rest of the patternx.
+ index += f.bits.length
+
+ # Set the implicit fields.
+ if (f.bits > 64 && f.type == "n")
+ f.type = "a"
+ f.bytes = f.bits / 8
+ f.arrayed = f.signed || f.bytes > 8 || "ha".indexOf(f.type) != -1
+
+ # Record the new field pattern object.
+ fields.push(f)
+
+ # If we have more characters to parse, recurse.
+ if rest.length != 0
+ field fields, rest, index
+ else
+ fields
+
+# Export the pattern method.
+module.exports.parse = (pattern) ->
+ return field [], pattern, 0
Oops, something went wrong.

0 comments on commit 3e9f288

Please sign in to comment.