Skip to content

Commit

Permalink
Add synchronous API/tests/docs, general cleanup
Browse files Browse the repository at this point in the history
This makes the API simpler/lighter to use in non-node/browserify
environments, or when dealing with synchronous-only APIs.
  • Loading branch information
hughsk committed Nov 7, 2014
1 parent 86fe741 commit 1e1196d
Show file tree
Hide file tree
Showing 12 changed files with 1,021 additions and 71 deletions.
3 changes: 3 additions & 0 deletions .gitignore
@@ -0,0 +1,3 @@
node_modules
npm-debug.log
.DS_Store
5 changes: 4 additions & 1 deletion .npmignore
@@ -1 +1,4 @@
node_modules/
node_modules
npm-debug.log
.DS_Store
test/
10 changes: 10 additions & 0 deletions LICENSE.md
@@ -0,0 +1,10 @@
The MIT License (MIT)
=====================

Copyright (c) 2014 [Chris Dickinson](http://github.com/chrisdickinson)

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
56 changes: 31 additions & 25 deletions README.md
@@ -1,36 +1,42 @@
# glsl-tokenizer

a [readable / writable stream](https://github.com/dominictarr/stream-spec#through-sync-writable-and-readable-aka-filter) that maps input to glsl tokens, if possible.

```javascript

var fs = require('fs')
, tokens = require('glsl-tokenizer')()

fs.createReadStream('some.glsl')
.pipe(tokens)
.on('data', function(token) {
console.log(token.data, token.position, token.type)
})

Maps GLSL string data into GLSL tokens, either synchronously or using a
streaming API.

``` javascript
var tokenString = require('glsl-tokenizer/string')
var tokenStream = require('glsl-tokenizer/stream')
var fs = require('fs')

// Synchronously:
var tokens = tokenString(fs.readFileSync('some.glsl'))

// Streaming API:
fs.createReadStream('some.glsl')
.pipe(tokenStream())
.on('data', function(token) {
console.log(token.data, token.position, token.type)
})
```

# API

### tokens = require('glsl-tokenizer')()
## tokens = require('glsl-tokenizer/string')(src)

return a tokenizer stream instance.
Returns an array of `tokens` given the GLSL source string `src`

emits 'data' events whenever a token is parsed with a token object as output.
## stream = require('glsl-tokenizer/stream')()

# tokens
Emits 'data' events whenever a token is parsed with a token object as output.

```javascript
# Tokens

```javascript
{ 'type': TOKEN_TYPE
, 'data': "string of constituent data"
, 'position': integer position within the data stream }

, 'position': integer position within the GLSL source
, 'line': line number within the GLSL source
, 'column': column number within the GLSL source }
```

The available token types are:
Expand All @@ -39,14 +45,14 @@ The available token types are:
* `line-comment`: `// ... \n`
* `preprocessor`: `# ... \n`
* `operator`: Any operator. If it looks like punctuation, it's an operator.
* `integer`
* `float`: Optionally suffixed with `f`
* `ident`: User defined identifier.
* `builtin`: Builtin function
* `keyword`
* `builtin`: Builtin function.
* `eof`: Emitted on `end`; data will === `'(eof)'`.
* `integer`
* `whitespace`
* `eof`: emitted on `end`; data will === `'(eof)'`.
* `keyword`

# License

MIT
MIT, see [LICENSE.md](LICENSE.md) for further information.
39 changes: 20 additions & 19 deletions index.js
@@ -1,14 +1,12 @@
module.exports = tokenize

var through = require('through')

var literals = require('./lib/literals')
, operators = require('./lib/operators')
, builtins = require('./lib/builtins')

var NORMAL = 999 // <-- never emitted
, TOKEN = 9999 // <-- never emitted
, BLOCK_COMMENT = 0
, TOKEN = 9999 // <-- never emitted
, BLOCK_COMMENT = 0
, LINE_COMMENT = 1
, PREPROCESSOR = 2
, OPERATOR = 3
Expand All @@ -18,7 +16,7 @@ var NORMAL = 999 // <-- never emitted
, BUILTIN = 7
, KEYWORD = 8
, WHITESPACE = 9
, EOF = 10
, EOF = 10
, HEX = 11

var map = [
Expand All @@ -37,14 +35,13 @@ var map = [
]

function tokenize() {
var stream = through(write, end)

var i = 0
, total = 0
, mode = NORMAL
, mode = NORMAL
, c
, last
, content = []
, tokens = []
, token_idx = 0
, token_offs = 0
, line = 1
Expand All @@ -55,11 +52,15 @@ function tokenize() {
, input = ''
, len

return stream
return function(data) {
tokens = []
if (data !== null) return write(data)
return end()
}

function token(data) {
if(data.length) {
stream.queue({
if (data.length) {
tokens.push({
type: map[mode]
, data: data
, position: start
Expand All @@ -71,7 +72,7 @@ function tokenize() {

function write(chunk) {
i = 0
input += chunk.toString()
input += chunk
len = input.length

var last
Expand Down Expand Up @@ -102,7 +103,8 @@ function tokenize() {

total += i
input = input.slice(i)
}
return tokens
}

function end(chunk) {
if(content.length) {
Expand All @@ -111,8 +113,7 @@ function tokenize() {

mode = EOF
token('(eof)')

stream.queue(null)
return tokens
}

function normal() {
Expand Down Expand Up @@ -209,7 +210,7 @@ function tokenize() {

if(c === '.' && content.length) {
while(determine_operator(content));

mode = FLOAT
return i
}
Expand Down Expand Up @@ -239,11 +240,11 @@ function tokenize() {

do {
idx = operators.indexOf(buf.slice(0, buf.length + j).join(''))
if(idx === -1) {
if(idx === -1) {
j -= 1
continue
}

token(operators[idx])

start += operators[idx].length
Expand All @@ -261,7 +262,7 @@ function tokenize() {

content.push(c)
last = c
return i + 1
return i + 1
}

function integer() {
Expand Down
10 changes: 7 additions & 3 deletions package.json
Expand Up @@ -2,7 +2,7 @@
"name": "glsl-tokenizer",
"version": "1.1.1",
"description": "r/w stream of glsl tokens",
"main": "index.js",
"main": "stream.js",
"directories": {
"test": "test"
},
Expand All @@ -12,7 +12,7 @@
"Chris Dickinson <chris@neversaw.us> (http://neversaw.us)"
],
"scripts": {
"test": "node test/index.js"
"test": "node test/index.js | tap-spec"
},
"repository": {
"type": "git",
Expand All @@ -26,6 +26,10 @@
"author": "Chris Dickinson <chris@neversaw.us>",
"license": "MIT",
"dependencies": {
"through": "X.X.X"
"through2": "^0.6.3"
},
"devDependencies": {
"tap-spec": "^1.0.1",
"tape": "^3.0.2"
}
}
27 changes: 27 additions & 0 deletions stream.js
@@ -0,0 +1,27 @@
var through = require('through2').obj
var tokenize = require('./index')

module.exports = createStream

function createStream() {
var generator = tokenize()

return through(write, end)

function write(chunk, _, next) {
flush(this, chunk)
next()
}

function end() {
flush(this, null)
this.push(null)
}

function flush(stream, chunk) {
var tokens = generator(chunk)
for (var i = 0; i < tokens.length; i++) {
stream.push(tokens[i])
}
}
}
13 changes: 13 additions & 0 deletions string.js
@@ -0,0 +1,13 @@
var tokenize = require('./index')

module.exports = tokenizeString

function tokenizeString(str) {
var generator = tokenize()
var tokens = []

tokens = tokens.concat(generator(str))
tokens = tokens.concat(generator(null))

return tokens
}
8 changes: 0 additions & 8 deletions test.js

This file was deleted.

0 comments on commit 1e1196d

Please sign in to comment.