Skip to content

Commit

Permalink
fix: return values from walkTokens
Browse files Browse the repository at this point in the history
  • Loading branch information
UziTech committed Jul 4, 2022
1 parent 0273ab8 commit 9e7ab80
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 172 deletions.
3 changes: 2 additions & 1 deletion src/Lexer.js
Original file line number Diff line number Diff line change
Expand Up @@ -316,8 +316,9 @@ export class Lexer {
return tokens;
}

inline(src, tokens) {
inline(src, tokens = []) {
this.inlineQueue.push({ src, tokens });
return tokens;
}

/**
Expand Down
51 changes: 21 additions & 30 deletions src/Tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ function outputLink(cap, link, raw, lexer) {
href,
title,
text,
tokens: lexer.inlineTokens(text, [])
tokens: lexer.inlineTokens(text)
};
lexer.state.inLink = false;
return token;
Expand Down Expand Up @@ -125,15 +125,13 @@ export class Tokenizer {
}
}

const token = {
return {
type: 'heading',
raw: cap[0],
depth: cap[1].length,
text,
tokens: []
tokens: this.lexer.inline(text)
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

Expand Down Expand Up @@ -354,10 +352,10 @@ export class Tokenizer {
text: cap[0]
};
if (this.options.sanitize) {
const text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
token.type = 'paragraph';
token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
token.tokens = [];
this.lexer.inline(token.text, token.tokens);
token.text = text;
token.tokens = this.lexer.inline(text);
}
return token;
}
Expand Down Expand Up @@ -415,17 +413,15 @@ export class Tokenizer {
// header child tokens
l = item.header.length;
for (j = 0; j < l; j++) {
item.header[j].tokens = [];
this.lexer.inline(item.header[j].text, item.header[j].tokens);
item.header[j].tokens = this.lexer.inline(item.header[j].text);
}

// cell child tokens
l = item.rows.length;
for (j = 0; j < l; j++) {
row = item.rows[j];
for (k = 0; k < row.length; k++) {
row[k].tokens = [];
this.lexer.inline(row[k].text, row[k].tokens);
row[k].tokens = this.lexer.inline(row[k].text);
}
}

Expand All @@ -437,45 +433,40 @@ export class Tokenizer {
lheading(src) {
const cap = this.rules.block.lheading.exec(src);
if (cap) {
const token = {
return {
type: 'heading',
raw: cap[0],
depth: cap[2].charAt(0) === '=' ? 1 : 2,
text: cap[1],
tokens: []
tokens: this.lexer.inline(cap[1])
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

paragraph(src) {
const cap = this.rules.block.paragraph.exec(src);
if (cap) {
const token = {
const text = cap[1].charAt(cap[1].length - 1) === '\n'
? cap[1].slice(0, -1)
: cap[1];
return {
type: 'paragraph',
raw: cap[0],
text: cap[1].charAt(cap[1].length - 1) === '\n'
? cap[1].slice(0, -1)
: cap[1],
tokens: []
text,
tokens: this.lexer.inline(text)
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

text(src) {
const cap = this.rules.block.text.exec(src);
if (cap) {
const token = {
return {
type: 'text',
raw: cap[0],
text: cap[0],
tokens: []
tokens: this.lexer.inline(cap[0])
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

Expand Down Expand Up @@ -644,7 +635,7 @@ export class Tokenizer {
type: 'em',
raw: src.slice(0, lLength + match.index + rLength + 1),
text,
tokens: this.lexer.inlineTokens(text, [])
tokens: this.lexer.inlineTokens(text)
};
}

Expand All @@ -654,7 +645,7 @@ export class Tokenizer {
type: 'strong',
raw: src.slice(0, lLength + match.index + rLength + 1),
text,
tokens: this.lexer.inlineTokens(text, [])
tokens: this.lexer.inlineTokens(text)
};
}
}
Expand Down Expand Up @@ -695,7 +686,7 @@ export class Tokenizer {
type: 'del',
raw: cap[0],
text: cap[2],
tokens: this.lexer.inlineTokens(cap[2], [])
tokens: this.lexer.inlineTokens(cap[2])
};
}
}
Expand Down
45 changes: 30 additions & 15 deletions src/marked.js
Original file line number Diff line number Diff line change
Expand Up @@ -105,13 +105,7 @@ export function marked(src, opt, callback) {
return;
}

try {
const tokens = Lexer.lex(src, opt);
if (opt.walkTokens) {
marked.walkTokens(tokens, opt.walkTokens);
}
return Parser.parse(tokens, opt);
} catch (e) {
function onError(e) {
e.message += '\nPlease report this to https://github.com/markedjs/marked.';
if (opt.silent) {
return '<p>An error occurred:</p><pre>'
Expand All @@ -120,6 +114,23 @@ export function marked(src, opt, callback) {
}
throw e;
}

try {
const tokens = Lexer.lex(src, opt);
if (opt.walkTokens) {
if (opt.async) {
return Promise.all(marked.walkTokens(tokens, opt.walkTokens))
.then(() => {
return Parser.parse(tokens, opt);
})
.catch(onError);
}
marked.walkTokens(tokens, opt.walkTokens);
}
return Parser.parse(tokens, opt);
} catch (e) {
onError(e);
}
}

/**
Expand Down Expand Up @@ -236,10 +247,12 @@ marked.use = function(...args) {
if (pack.walkTokens) {
const walkTokens = marked.defaults.walkTokens;
opts.walkTokens = function(token) {
pack.walkTokens.call(this, token);
let values = [];
values.push(pack.walkTokens.call(this, token));
if (walkTokens) {
walkTokens.call(this, token);
values = values.concat(walkTokens.call(this, token));
}
return values;
};
}

Expand All @@ -256,35 +269,37 @@ marked.use = function(...args) {
*/

marked.walkTokens = function(tokens, callback) {
let values = [];
for (const token of tokens) {
callback.call(marked, token);
values = values.concat(callback.call(marked, token));
switch (token.type) {
case 'table': {
for (const cell of token.header) {
marked.walkTokens(cell.tokens, callback);
values = values.concat(marked.walkTokens(cell.tokens, callback));
}
for (const row of token.rows) {
for (const cell of row) {
marked.walkTokens(cell.tokens, callback);
values = values.concat(marked.walkTokens(cell.tokens, callback));
}
}
break;
}
case 'list': {
marked.walkTokens(token.items, callback);
values = values.concat(marked.walkTokens(token.items, callback));
break;
}
default: {
if (marked.defaults.extensions && marked.defaults.extensions.childTokens && marked.defaults.extensions.childTokens[token.type]) { // Walk any extensions
marked.defaults.extensions.childTokens[token.type].forEach(function(childTokens) {
marked.walkTokens(token[childTokens], callback);
values = values.concat(marked.walkTokens(token[childTokens], callback));
});
} else if (token.tokens) {
marked.walkTokens(token.tokens, callback);
values = values.concat(marked.walkTokens(token.tokens, callback));
}
}
}
}
return values;
};

/**
Expand Down
Loading

0 comments on commit 9e7ab80

Please sign in to comment.