Skip to content

Commit

Permalink
feat: add async option (#2474)
Browse files Browse the repository at this point in the history
* fix: return values from walkTokens

* docs: add async docs

* test: add async test

* docs: add nav to async

* Update docs/USING_PRO.md

Co-authored-by: Steven <steven@ceriously.com>

* test: expect promise

* Update docs/USING_ADVANCED.md

Co-authored-by: Steven <steven@ceriously.com>

Co-authored-by: Steven <steven@ceriously.com>
  • Loading branch information
UziTech and styfle authored Aug 30, 2022
1 parent 33724a3 commit 994b2e6
Show file tree
Hide file tree
Showing 8 changed files with 215 additions and 171 deletions.
1 change: 1 addition & 0 deletions docs/USING_ADVANCED.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ console.log(marked.parse(markdownString));

|Member |Type |Default |Since |Notes |
|:-----------|:---------|:--------|:--------|:-------------|
|async |`boolean` |`false` |4.1.0 |If true, `walkTokens` functions can be async and `marked.parse` will return a promise that resolves when all walk tokens functions resolve.|
|baseUrl |`string` |`null` |0.3.9 |A prefix url for any relative link. |
|breaks |`boolean` |`false` |v0.2.7 |If true, add `<br>` on a single line break (copies GitHub behavior on comments, but not on rendered markdown files). Requires `gfm` be `true`.|
|gfm |`boolean` |`true` |v0.2.1 |If true, use approved [GitHub Flavored Markdown (GFM) specification](https://github.github.com/gfm/).|
Expand Down
72 changes: 72 additions & 0 deletions docs/USING_PRO.md
Original file line number Diff line number Diff line change
Expand Up @@ -438,6 +438,78 @@ console.log(marked.parse('A Description List:\n'
***
<h2 id="async">Async Marked : <code>async</code></h2>
Marked will return a promise if the `async` option is true. The `async` option will tell marked to await any `walkTokens` functions before parsing the tokens and returning an HTML string.
Simple Example:
```js
const walkTokens = async (token) => {
if (token.type === 'link') {
try {
await fetch(token.href);
} catch (ex) {
token.title = 'invalid';
}
}
};

marked.use({ walkTokens, async: true });

const markdown = `
[valid link](https://example.com)
[invalid link](https://invalidurl.com)
`;

const html = await marked.parse(markdown);
```
Custom Extension Example:
```js
const importUrl = {
extensions: [{
name: 'importUrl',
level: 'block',
start(src) { return src.indexOf('\n:'); },
tokenizer(src) {
const rule = /^:(https?:\/\/.+?):/;
const match = rule.exec(src);
if (match) {
return {
type: 'importUrl',
raw: match[0],
url: match[1],
html: '' // will be replaced in walkTokens
};
}
},
renderer(token) {
return token.html;
}
}],
async: true, // needed to tell marked to return a promise
async walkTokens(token) {
if (token.type === 'importUrl') {
const res = await fetch(token.url);
token.html = await res.text();
}
}
};

marked.use(importUrl);

const markdown = `
# example.com
:https://example.com:
`;

const html = await marked.parse(markdown);
```
<h2 id="lexer">The Lexer</h2>
The lexer takes a markdown string and calls the tokenizer functions.
Expand Down
1 change: 1 addition & 0 deletions docs/_document.html
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ <h1>Marked Documentation</h1>
<li><a href="/using_pro#tokenizer">Tokenizer</a></li>
<li><a href="/using_pro#walk-tokens">Walk Tokens</a></li>
<li><a href="/using_pro#extensions">Custom Extensions</a></li>
<li><a href="/using_pro#async">Async Marked</a></li>
<li><a href="/using_pro#lexer">Lexer</a></li>
<li><a href="/using_pro#parser">Parser</a></li>
</ul>
Expand Down
51 changes: 21 additions & 30 deletions src/Tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ function outputLink(cap, link, raw, lexer) {
href,
title,
text,
tokens: lexer.inlineTokens(text, [])
tokens: lexer.inlineTokens(text)
};
lexer.state.inLink = false;
return token;
Expand Down Expand Up @@ -125,15 +125,13 @@ export class Tokenizer {
}
}

const token = {
return {
type: 'heading',
raw: cap[0],
depth: cap[1].length,
text,
tokens: []
tokens: this.lexer.inline(text)
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

Expand Down Expand Up @@ -355,10 +353,10 @@ export class Tokenizer {
text: cap[0]
};
if (this.options.sanitize) {
const text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
token.type = 'paragraph';
token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
token.tokens = [];
this.lexer.inline(token.text, token.tokens);
token.text = text;
token.tokens = this.lexer.inline(text);
}
return token;
}
Expand Down Expand Up @@ -416,17 +414,15 @@ export class Tokenizer {
// header child tokens
l = item.header.length;
for (j = 0; j < l; j++) {
item.header[j].tokens = [];
this.lexer.inline(item.header[j].text, item.header[j].tokens);
item.header[j].tokens = this.lexer.inline(item.header[j].text);
}

// cell child tokens
l = item.rows.length;
for (j = 0; j < l; j++) {
row = item.rows[j];
for (k = 0; k < row.length; k++) {
row[k].tokens = [];
this.lexer.inline(row[k].text, row[k].tokens);
row[k].tokens = this.lexer.inline(row[k].text);
}
}

Expand All @@ -438,45 +434,40 @@ export class Tokenizer {
lheading(src) {
const cap = this.rules.block.lheading.exec(src);
if (cap) {
const token = {
return {
type: 'heading',
raw: cap[0],
depth: cap[2].charAt(0) === '=' ? 1 : 2,
text: cap[1],
tokens: []
tokens: this.lexer.inline(cap[1])
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

paragraph(src) {
const cap = this.rules.block.paragraph.exec(src);
if (cap) {
const token = {
const text = cap[1].charAt(cap[1].length - 1) === '\n'
? cap[1].slice(0, -1)
: cap[1];
return {
type: 'paragraph',
raw: cap[0],
text: cap[1].charAt(cap[1].length - 1) === '\n'
? cap[1].slice(0, -1)
: cap[1],
tokens: []
text,
tokens: this.lexer.inline(text)
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

text(src) {
const cap = this.rules.block.text.exec(src);
if (cap) {
const token = {
return {
type: 'text',
raw: cap[0],
text: cap[0],
tokens: []
tokens: this.lexer.inline(cap[0])
};
this.lexer.inline(token.text, token.tokens);
return token;
}
}

Expand Down Expand Up @@ -645,7 +636,7 @@ export class Tokenizer {
type: 'em',
raw: src.slice(0, lLength + match.index + rLength + 1),
text,
tokens: this.lexer.inlineTokens(text, [])
tokens: this.lexer.inlineTokens(text)
};
}

Expand All @@ -655,7 +646,7 @@ export class Tokenizer {
type: 'strong',
raw: src.slice(0, lLength + match.index + rLength + 1),
text,
tokens: this.lexer.inlineTokens(text, [])
tokens: this.lexer.inlineTokens(text)
};
}
}
Expand Down Expand Up @@ -696,7 +687,7 @@ export class Tokenizer {
type: 'del',
raw: cap[0],
text: cap[2],
tokens: this.lexer.inlineTokens(cap[2], [])
tokens: this.lexer.inlineTokens(cap[2])
};
}
}
Expand Down
1 change: 1 addition & 0 deletions src/defaults.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
export function getDefaults() {
return {
async: false,
baseUrl: null,
breaks: false,
extensions: null,
Expand Down
45 changes: 30 additions & 15 deletions src/marked.js
Original file line number Diff line number Diff line change
Expand Up @@ -105,13 +105,7 @@ export function marked(src, opt, callback) {
return;
}

try {
const tokens = Lexer.lex(src, opt);
if (opt.walkTokens) {
marked.walkTokens(tokens, opt.walkTokens);
}
return Parser.parse(tokens, opt);
} catch (e) {
function onError(e) {
e.message += '\nPlease report this to https://github.com/markedjs/marked.';
if (opt.silent) {
return '<p>An error occurred:</p><pre>'
Expand All @@ -120,6 +114,23 @@ export function marked(src, opt, callback) {
}
throw e;
}

try {
const tokens = Lexer.lex(src, opt);
if (opt.walkTokens) {
if (opt.async) {
return Promise.all(marked.walkTokens(tokens, opt.walkTokens))
.then(() => {
return Parser.parse(tokens, opt);
})
.catch(onError);
}
marked.walkTokens(tokens, opt.walkTokens);
}
return Parser.parse(tokens, opt);
} catch (e) {
onError(e);
}
}

/**
Expand Down Expand Up @@ -236,10 +247,12 @@ marked.use = function(...args) {
if (pack.walkTokens) {
const walkTokens = marked.defaults.walkTokens;
opts.walkTokens = function(token) {
pack.walkTokens.call(this, token);
let values = [];
values.push(pack.walkTokens.call(this, token));
if (walkTokens) {
walkTokens.call(this, token);
values = values.concat(walkTokens.call(this, token));
}
return values;
};
}

Expand All @@ -256,35 +269,37 @@ marked.use = function(...args) {
*/

marked.walkTokens = function(tokens, callback) {
let values = [];
for (const token of tokens) {
callback.call(marked, token);
values = values.concat(callback.call(marked, token));
switch (token.type) {
case 'table': {
for (const cell of token.header) {
marked.walkTokens(cell.tokens, callback);
values = values.concat(marked.walkTokens(cell.tokens, callback));
}
for (const row of token.rows) {
for (const cell of row) {
marked.walkTokens(cell.tokens, callback);
values = values.concat(marked.walkTokens(cell.tokens, callback));
}
}
break;
}
case 'list': {
marked.walkTokens(token.items, callback);
values = values.concat(marked.walkTokens(token.items, callback));
break;
}
default: {
if (marked.defaults.extensions && marked.defaults.extensions.childTokens && marked.defaults.extensions.childTokens[token.type]) { // Walk any extensions
marked.defaults.extensions.childTokens[token.type].forEach(function(childTokens) {
marked.walkTokens(token[childTokens], callback);
values = values.concat(marked.walkTokens(token[childTokens], callback));
});
} else if (token.tokens) {
marked.walkTokens(token.tokens, callback);
values = values.concat(marked.walkTokens(token.tokens, callback));
}
}
}
}
return values;
};

/**
Expand Down
Loading

1 comment on commit 994b2e6

@vercel
Copy link

@vercel vercel bot commented on 994b2e6 Aug 30, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.