Skip to content

Commit

Permalink
fix: #546
Browse files Browse the repository at this point in the history
  • Loading branch information
Jocs committed Nov 7, 2018
1 parent 79ee58e commit 2e133e2
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 7 deletions.
1 change: 1 addition & 0 deletions .github/CHANGELOG.md
Expand Up @@ -13,6 +13,7 @@
- fix: #534
- fix: #535 Application menu is not updated when switching windows
- fix #216 and #311 key binding issues on Linux and Windows
- fix #546 paste issue in table

### 0.13.50

Expand Down
12 changes: 12 additions & 0 deletions src/muya/lib/contentState/pasteCtrl.js
Expand Up @@ -131,6 +131,18 @@ const pasteCtrl = ContentState => {
return this.partialRender()
}

if (/th|td/.test(startBlock.type)) {
const pendingText = text.trim().replace(/\n/g, '<br/>')
startBlock.text += pendingText
const { key } = startBlock
const offset = start.offset + pendingText.length
this.cursor = {
start: { key, offset },
end: { key, offset }
}
return this.partialRender()
}

// handle copyAsHtml
if (copyType === 'copyAsHtml') {
switch (type) {
Expand Down
4 changes: 2 additions & 2 deletions src/muya/lib/parser/parse.js
Expand Up @@ -508,8 +508,8 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
return tokens
}

export const tokenizer = (src, highlights = []) => {
const tokens = tokenizerFac(src, beginRules, inlineRules, 0, true)
export const tokenizer = (src, highlights = [], hasBeginRules = true) => {
const tokens = tokenizerFac(src, hasBeginRules ? beginRules : null, inlineRules, 0, true)
const postTokenizer = tokens => {
for (const token of tokens) {
for (const light of highlights) {
Expand Down
17 changes: 12 additions & 5 deletions src/muya/lib/parser/render/renderBlock/renderLeafBlock.js
@@ -1,7 +1,7 @@
import katex from 'katex'
import mermaid from 'mermaid'
import prism, { loadedCache } from '../../../prism/'
import { CLASS_OR_ID, DEVICE_MEMORY, isInElectron, PREVIEW_DOMPURIFY_CONFIG } from '../../../config'
import { CLASS_OR_ID, DEVICE_MEMORY, isInElectron, PREVIEW_DOMPURIFY_CONFIG, HAS_TEXT_BLOCK_REG } from '../../../config'
import { tokenizer } from '../../parse'
import { snakeToCamel, sanitize, escapeHtml, getLongUniqueId } from '../../../utils'
import { h, htmlToVNode } from '../snabbdom'
Expand Down Expand Up @@ -70,12 +70,19 @@ export default function renderLeafBlock (block, cursor, activeBlocks, matches, u
}
let children = ''
if (text) {
let tokens = null
let tokens = []
if (highlights.length === 0 && this.tokenCache.has(text)) {
tokens = this.tokenCache.get(text)
} else {
tokens = tokenizer(text, highlights)
if (highlights.length === 0 && useCache && DEVICE_MEMORY >= 4) this.tokenCache.set(text, tokens)
} else if (
HAS_TEXT_BLOCK_REG.test(type) &&
functionType !== 'codeLine' &&
functionType !== 'languageInput'
) {
const hasBeginRules = /^(h\d|span|hr)/.test(type)
tokens = tokenizer(text, highlights, hasBeginRules)
if (highlights.length === 0 && useCache && DEVICE_MEMORY >= 4) {
this.tokenCache.set(text, tokens)
}
}
children = tokens.reduce((acc, token) => [...acc, ...this[snakeToCamel(token.type)](h, cursor, block, token)], [])
}
Expand Down

0 comments on commit 2e133e2

Please sign in to comment.