Skip to content
Fetching contributors…
Cannot retrieve contributors at this time
52 lines (40 sloc) 1.51 KB
# The main entry functions for
# [tokenizing](#lexer), [parsing](#grammar), and [compiling](#ast)
# Coco source into JavaScript.
lexer = require \./lexer
# Override Jison's default lexer, so that it can accept
# the generic stream of tokens our lexer produces.
{parser} = require \./parser
parser import
yy : require \./ast
lexer :
lex : -> [tag, @yytext, @yylineno] = @tokens[++@pos] or ['']; tag
setInput : -> @pos = -1; @tokens = it
upcomingInput : -> ''
exports import
VERSION: \0.8.2
# Compiles a string of Coco code to JavaScript.
compile: (code, options) ->
try parser.parse(lexer.lex code)compileRoot options catch
e.message += "\nat #that" if options?filename
throw e
# Parses a string or tokens of Coco code,
# returning the [AST](
ast: -> parser.parse if typeof it is \string then lexer.lex it else it
# Tokenizes a string of Coco code, returning the array of tokens.
tokens: lexer.lex
# Same as `tokens`, except that this skips rewriting.
lex: -> lexer.lex it, {+raw}
# Runs Coco code directly.
run: (code, options) -> do Function exports.compile code, {...options, +bare}
exports.tokens{rewrite} = lexer
# Export AST constructors.
exports.ast import all parser.yy
if require.extensions
# -> [node](#node)
require(\./node) exports
# Attach `require` for debugging.
exports import {require}
# Support Gecko JS Module.
@EXPORTED_SYMBOLS = [\Coco] if ''+this is '[object BackstagePass]'
Jump to Line
Something went wrong with that request. Please try again.