Skip to content
This repository has been archived by the owner on Nov 7, 2019. It is now read-only.

Commit

Permalink
refactor: rewrite to TypeScript
Browse files Browse the repository at this point in the history
  • Loading branch information
zkochan committed Jun 2, 2016
1 parent 7140647 commit 9e176cd
Show file tree
Hide file tree
Showing 95 changed files with 410 additions and 256 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,4 @@ node_modules
.node_repl_history

lib
lib-esnext
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ const mosCore = require('mos-core')
- [mos](https://github.com/mosjs/mos): A pluggable module that injects content into your markdown files via hidden JavaScript snippets
- [mos-plugin-readme](https://github.com/mosjs/mos-plugin-readme): A mos plugin for generating README
- [semantic-release](https://github.com/semantic-release/semantic-release): automated semver compliant package publishing
- [typescript](https://github.com/Microsoft/TypeScript): TypeScript is a language for application scale JavaScript development
- [validate-commit-msg](https://github.com/kentcdodds/validate-commit-msg): Script to validate a commit message follows the conventional changelog standard

<!--/@-->
12 changes: 7 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,24 @@
"version": "0.0.0-placeholder",
"description": "Markdown parser",
"files": [
"lib/",
"lib-esnext/",
"src/"
],
"main": "lib/index.js",
"jsnext:main": "src/index.js",
"jsnext:main": "lib-esnext/index.js",
"scripts": {
"test:code": "mocha --compilers js:babel-register",
"test:code": "tsc && mocha --compilers js:babel-register",
"test": "npm run test:code && npm run lint && mos test",
"lint": "eslint {src,test}/**/*.js",
"lint": "eslint test/",
"commit": "git-cz",
"coverage": "istanbul cover -x \"**/*.spec.js\" node_modules/mocha/bin/_mocha -- --compilers js:babel-register -R spec",
"precoveralls": "istanbul cover -x \"**/*.spec.js\" node_modules/mocha/bin/_mocha --report lcovonly -- --compilers js:babel-register -R spec && npm i coveralls@2",
"coveralls": "cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js",
"postcoveralls": "rm -rf ./coverage",
"md": "mos",
"semantic-release": "semantic-release pre && npm publish && semantic-release post",
"transpile": "babel src --out-dir lib --copy-files",
"tsc": "tsc",
"transpile": "tsc && babel lib-esnext --out-dir lib --copy-files",
"prepublish": "npm run transpile"
},
"repository": {
Expand Down Expand Up @@ -85,6 +86,7 @@
"mos": "^1.3.0",
"mos-plugin-readme": "^1.0.2",
"semantic-release": "^4.3.5",
"typescript": "^1.8.10",
"validate-commit-msg": "^2.6.1"
},
"engines": {
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion src/escape.json → src/escape.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
export default {
"default": [
"\\",
"`",
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[
export default [
"article",
"header",
"aside",
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import {normalizeIdentifier as normalize} from '../../utilities'
* @return {boolean} - Whether `character` can be inside
* an enclosed URI.
*/
function isEnclosedURLCharacter (character) {
const isEnclosedURLCharacter: any = function (character) {
return character !== '>' &&
character !== '[' &&
character !== ']'
Expand Down Expand Up @@ -47,7 +47,7 @@ function isUnclosedURLCharacter (character) {
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `definition` node.
*/
export default function tokenizeDefinition (parser, value, silent) {
const tokenizeDefinition: any = function (parser, value, silent) {
const commonmark = parser.options.commonmark
let index = 0
const length = value.length
Expand Down Expand Up @@ -288,3 +288,5 @@ export default function tokenizeDefinition (parser, value, silent) {

tokenizeDefinition.onlyAtTop = true
tokenizeDefinition.notInBlockquote = true

export default tokenizeDefinition
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ const EXPRESSION_INITIAL_TAB = /^( {4}|\t)?/gm
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `footnoteDefinition` node.
*/
export default function tokenizeFootnoteDefinition (parser, value, silent) {
const tokenizeFootnoteDefinition: any = function (parser, value, silent) {
let index
let length
let subvalue
Expand Down Expand Up @@ -172,3 +172,5 @@ export default function tokenizeFootnoteDefinition (parser, value, silent) {

tokenizeFootnoteDefinition.onlyAtTop = true
tokenizeFootnoteDefinition.notInBlockquote = true

export default tokenizeFootnoteDefinition
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@ const MAX_LINE_HEADING_INDENT = 3
* headers, mapping to their corresponding depth.
*/

const SETEXT_MARKERS = {}

SETEXT_MARKERS['='] = 1
SETEXT_MARKERS['-'] = 2
const SETEXT_MARKERS = {
'=': 1,
'-': 2,
}

/**
* Tokenise a Setext-style heading.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,30 @@ import {TAB_SIZE, RULE_MARKERS} from '../shared-constants'
* list-items.
*/

const LIST_UNORDERED_MARKERS = {}

LIST_UNORDERED_MARKERS['*'] = true
LIST_UNORDERED_MARKERS['+'] = true
LIST_UNORDERED_MARKERS['-'] = true
const LIST_UNORDERED_MARKERS = {
'*': true,
'+': true,
'-': true,
}

/*
* A map of characters which can be used to mark
* list-items after a digit.
*/

const LIST_ORDERED_MARKERS = {}

LIST_ORDERED_MARKERS['.'] = true
const LIST_ORDERED_MARKERS = {
'.': true,
}

/*
* A map of characters which can be used to mark
* list-items after a digit.
*/

const LIST_ORDERED_COMMONMARK_MARKERS = {}

LIST_ORDERED_COMMONMARK_MARKERS['.'] = true
LIST_ORDERED_COMMONMARK_MARKERS[')'] = true
const LIST_ORDERED_COMMONMARK_MARKERS = {
'.': true,
')': true,
}

/**
* Tokenise a list.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import trimTrailingLines from 'trim-trailing-lines'
* @param {Function} eat - Eater.
* @return {Object} - `code` node.
*/
export default function renderCodeBlock (value, language) {
export default function renderCodeBlock (value, language?) {
return {
type: nodeTypes.CODE,
lang: language || null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ const TABLE_ALIGN_NONE = null
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `table` node.
*/
export default function tokenizeTable (parser, value, silent) {
const tokenizeTable: any = function (parser, value, silent) {
/*
* Exit when not in gfm-mode.
*/
Expand Down Expand Up @@ -78,7 +78,7 @@ export default function tokenizeTable (parser, value, silent) {
const alignments = lines.splice(1, 1)[0] || []
index = 0
lineCount--
let alignment = false
let alignment: any = false
const align = []
let hasDash
let first
Expand Down Expand Up @@ -282,3 +282,5 @@ export default function tokenizeTable (parser, value, silent) {
}

tokenizeTable.onlyAtTop = true

export default tokenizeTable
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import nodeTypes from '../node-types'
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `yaml` node.
*/
export default function tokenizeYAMLFrontMatter (parser, value, silent) {
const tokenizeYAMLFrontMatter: any = function (parser, value, silent) {
let subvalue
let content
let index
Expand Down Expand Up @@ -70,3 +70,5 @@ export default function tokenizeYAMLFrontMatter (parser, value, silent) {
}

tokenizeYAMLFrontMatter.onlyAtStart = true

export default tokenizeYAMLFrontMatter
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import isAlphabetic from '../is-alphabetic'
import isNumeric from '../is-numeric'
import isWhiteSpace from '../is-white-space'
import blockElements from '../block-elements.json'
import blockElements from '../block-elements'

/**
* Try to match a closing tag.
Expand All @@ -12,7 +12,7 @@ import blockElements from '../block-elements.json'
* @return {string?} - When applicable, the closing tag at
* the start of `value`.
*/
export default function eatHTMLClosingTag (value, isBlock) {
export default function eatHTMLClosingTag (value, isBlock?) {
let index = 0
const length = value.length
let queue = ''
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ const COMMENT_END_LENGTH = COMMENT_END.length
* @return {string?} - When applicable, the comment at the
* start of `value`.
*/
export default function eatHTMLComment (value, settings) {
function eatHTMLComment (value, settings) {
let index = COMMENT_START_LENGTH
let queue = COMMENT_START
const length = value.length
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import isAlphabetic from '../is-alphabetic'
import isNumeric from '../is-numeric'
import isWhiteSpace from '../is-white-space'
import blockElements from '../block-elements.json'
import blockElements from '../block-elements'

/**
* Check whether `character` can be inside an unquoted
Expand Down Expand Up @@ -29,7 +29,7 @@ function isUnquotedAttributeCharacter (character) {
* @return {boolean} - Whether `character` can be inside
* a double-quoted attribute value.
*/
function isDoubleQuotedAttributeCharacter (character) {
const isDoubleQuotedAttributeCharacter: any = function (character) {
return character !== '"'
}

Expand All @@ -44,7 +44,7 @@ isDoubleQuotedAttributeCharacter.delimiter = '"'
* @return {boolean} - Whether `character` can be inside
* a single-quoted attribute value.
*/
function isSingleQuotedAttributeCharacter (character) {
const isSingleQuotedAttributeCharacter: any = function (character) {
return character !== '\''
}

Expand All @@ -59,7 +59,7 @@ isSingleQuotedAttributeCharacter.delimiter = '\''
* @return {string?} - When applicable, the opening tag at
* the start of `value`.
*/
export default function eatHTMLOpeningTag (value, isBlock) {
export default function eatHTMLOpeningTag (value, isBlock?) {
let index = 0
const length = value.length
let queue = ''
Expand Down
22 changes: 11 additions & 11 deletions src/parse/index.js → src/parse/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@ import nodeTypes from './node-types'
* which can be used as indentation.
*/

const INDENTATION_CHARACTERS = {}

INDENTATION_CHARACTERS[' '] = SPACE_SIZE
INDENTATION_CHARACTERS['\t'] = TAB_SIZE
const INDENTATION_CHARACTERS = {
' ': SPACE_SIZE,
'\t': TAB_SIZE,
}

/**
* Factory to create an entity decoder.
Expand Down Expand Up @@ -81,7 +81,7 @@ function decodeFactory (context) {
* @param {Position} position - Position to start parsing at.
* @param {Function} handler - Node handler.
*/
function decoder (value, position, handler) {
const decoder: any = function (value, position, handler) {
decode(value, {
position: normalize(position),
warning: handleWarning,
Expand Down Expand Up @@ -311,10 +311,10 @@ function parserFactory (processor) {
* A map of two functions which can create list items.
*/

const LIST_ITEM_MAP = {}

LIST_ITEM_MAP.true = renderPedanticListItem
LIST_ITEM_MAP.false = renderNormalListItem
const LIST_ITEM_MAP = {
true: renderPedanticListItem,
false: renderNormalListItem,
}

/**
* Create a list-item using overly simple mechanics.
Expand Down Expand Up @@ -430,7 +430,7 @@ function parserFactory (processor) {
return trimmedLines.join('\n')
}

var parser = {
var parser: any = {
/**
* Set options. Does not overwrite previously set
* options.
Expand Down Expand Up @@ -551,7 +551,7 @@ function parserFactory (processor) {

return parser.tokenizeBlock(value)
.then(children => {
const node = {
const node: any = {
type: nodeTypes.ROOT,
children,
position: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ function locateAutoLink (parser, value, fromIndex) {
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `link` node.
*/
function tokenizeAutoLink (parser, value, silent) {
const tokenizeAutoLink: any = function (parser, value, silent) {
if (value.charAt(0) !== '<') {
return
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
export default tokenizeBreak
import nodeTypes from '../node-types'

const MIN_BREAK_LENGTH = 2
Expand Down Expand Up @@ -39,7 +38,7 @@ function locateBreak (parser, value, fromIndex) {
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `break` node.
*/
function tokenizeBreak (parser, value, silent) {
const tokenizeBreak: any = function (parser, value, silent) {
const breaks = parser.options.breaks
let index = -1
let queue = ''
Expand Down Expand Up @@ -72,3 +71,5 @@ function tokenizeBreak (parser, value, silent) {
}

tokenizeBreak.locator = locateBreak

export default tokenizeBreak
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ function locateInlineCode (parser, value, fromIndex) {
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `inlineCode` node.
*/
export default function tokenizeInlineCode (parser, value, silent) {
const tokenizeInlineCode: any = function (parser, value, silent) {
let index = 0
let queue = ''
let tickQueue = ''
Expand Down Expand Up @@ -121,3 +121,5 @@ export default function tokenizeInlineCode (parser, value, silent) {
}

tokenizeInlineCode.locator = locateInlineCode

export default tokenizeInlineCode
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ function locateDeletion (parser, value, fromIndex) {
* @param {boolean?} [silent] - Whether this is a dry run.
* @return {Node?|boolean} - `delete` node.
*/
export default function tokenizeDeletion (parser, value, silent) {
const tokenizeDeletion: any = function (parser, value, silent) {
let character = ''
let previous = ''
let preceding = ''
Expand Down Expand Up @@ -72,3 +72,5 @@ export default function tokenizeDeletion (parser, value, silent) {
}

tokenizeDeletion.locator = locateDeletion

export default tokenizeDeletion
Loading

0 comments on commit 9e176cd

Please sign in to comment.