Skip to content

Commit

Permalink
Merge pull request #1627 from UziTech/inline-tokens
Browse files Browse the repository at this point in the history
Inline tokens
  • Loading branch information
UziTech authored Apr 8, 2020
2 parents 35bcb7d + 3ca62b3 commit 743ec55
Show file tree
Hide file tree
Showing 17 changed files with 4,045 additions and 1,801 deletions.
1 change: 1 addition & 0 deletions .eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
"one-var": "off",
"no-control-regex": "off",
"no-prototype-builtins": "off",
"no-extra-semi": "error",

"prefer-const": "error",
"no-var": "error"
Expand Down
109 changes: 83 additions & 26 deletions docs/USING_PRO.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ To champion the single-responsibility and open/closed principles, we have tried

<h2 id="renderer">The renderer</h2>

The renderer is...
The renderer defines the output of the parser.

**Example:** Overriding default heading token by adding an embedded anchor tag like on GitHub.

Expand All @@ -29,7 +29,7 @@ renderer.heading = function (text, level) {
};

// Run marked
console.log(marked('# heading+', { renderer: renderer }));
console.log(marked('# heading+', { renderer }));
```

**Output:**
Expand Down Expand Up @@ -91,12 +91,11 @@ slugger.slug('foo-1') // foo-1-2

<h2 id="lexer">The lexer</h2>

The lexer is...

The lexer turns a markdown string into tokens.

<h2 id="parser">The parser</h2>

The parser is...
The parser takes tokens as input and calls the renderer functions.

***

Expand All @@ -105,30 +104,46 @@ The parser is...
You also have direct access to the lexer and parser if you so desire.

``` js
const tokens = marked.lexer(text, options);
const tokens = marked.lexer(markdown, options);
console.log(marked.parser(tokens, options));
```

``` js
const lexer = new marked.Lexer(options);
const tokens = lexer.lex(text);
const tokens = lexer.lex(markdown);
console.log(tokens);
console.log(lexer.rules);
console.log(lexer.rules.block); // block level rules
console.log(lexer.rules.inline); // inline level rules
```

``` bash
$ node
> require('marked').lexer('> i am using marked.')
[ { type: 'blockquote_start' },
{ type: 'paragraph',
text: 'i am using marked.' },
{ type: 'blockquote_end' },
links: {} ]
> require('marked').lexer('> I am using marked.')
[
{
type: "blockquote",
raw: "> I am using marked.",
tokens: [
{
type: "paragraph",
raw: "I am using marked.",
text: "I am using marked.",
tokens: [
{
type: "text",
raw: "I am using marked.",
text: "I am using marked."
}
]
}
]
},
links: {}
]
```

The Lexers build an array of tokens, which will be passed to their respective
Parsers. The Parsers process each token in the token arrays,
which are removed from the array of tokens:
The Lexer builds an array of tokens, which will be passed to the Parser.
The Parser processes each token in the token array:

``` js
const marked = require('marked');
Expand All @@ -146,18 +161,60 @@ console.log(tokens);

const html = marked.parser(tokens);
console.log(html);

console.log(tokens);
```

``` bash
[ { type: 'heading', depth: 1, text: 'heading' },
{ type: 'paragraph', text: ' [link][1]' },
{ type: 'space' },
links: { '1': { href: '#heading', title: 'heading' } } ]

[
{
type: "heading",
raw: " # heading\n\n",
depth: 1,
text: "heading",
tokens: [
{
type: "text",
raw: "heading",
text: "heading"
}
]
},
{
type: "paragraph",
raw: " [link][1]",
text: " [link][1]",
tokens: [
{
type: "text",
raw: " ",
text: " "
},
{
type: "link",
raw: "[link][1]",
text: "link",
href: "#heading",
title: "heading",
tokens: [
{
type: "text",
raw: "link",
text: "link"
}
]
}
]
},
{
type: "space",
raw: "\n\n"
},
links: {
"1": {
href: "#heading",
title: "heading"
}
}
]
<h1 id="heading">heading</h1>
<p> <a href="#heading" title="heading">link</a></p>

[ links: { '1': { href: '#heading', title: 'heading' } } ]
```
14 changes: 7 additions & 7 deletions docs/demo/demo.js
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ function handleIframeLoad() {

function handleInput() {
inputDirty = true;
};
}

function handleVersionChange() {
if ($markedVerElem.value === 'commit' || $markedVerElem.value === 'pr') {
Expand Down Expand Up @@ -256,7 +256,7 @@ function handleChange(panes, visiblePane) {
}
}
return active;
};
}

function addCommitVersion(value, text, commit) {
if (markedVersions[value]) {
Expand Down Expand Up @@ -331,13 +331,13 @@ function jsonString(input) {
.replace(/[\\"']/g, '\\$&')
.replace(/\u0000/g, '\\0');
return '"' + output + '"';
};
}

function getScrollSize() {
var e = $activeOutputElem;

return e.scrollHeight - e.clientHeight;
};
}

function getScrollPercent() {
var size = getScrollSize();
Expand All @@ -347,11 +347,11 @@ function getScrollPercent() {
}

return $activeOutputElem.scrollTop / size;
};
}

function setScrollPercent(percent) {
$activeOutputElem.scrollTop = percent * getScrollSize();
};
}

function updateLink() {
var outputType = '';
Expand Down Expand Up @@ -446,7 +446,7 @@ function checkForChanges() {
}
}
checkChangeTimeout = window.setTimeout(checkForChanges, delayTime);
};
}

function setResponseTime(ms) {
var amount = ms;
Expand Down
41 changes: 29 additions & 12 deletions docs/demo/worker.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
/* globals marked, unfetch, ES6Promise, Promise */ // eslint-disable-line no-redeclare

if (!self.Promise) {
self.importScripts('https://cdn.jsdelivr.net/npm/es6-promise/dist/es6-promise.js');
self.Promise = ES6Promise;
Expand Down Expand Up @@ -48,28 +49,44 @@ function parse(e) {
case 'parse':
var startTime = new Date();
var lexed = marked.lexer(e.data.markdown, e.data.options);
var lexedList = [];
for (var i = 0; i < lexed.length; i++) {
var lexedLine = [];
for (var j in lexed[i]) {
lexedLine.push(j + ':' + jsonString(lexed[i][j]));
}
lexedList.push('{' + lexedLine.join(', ') + '}');
}
var lexedList = getLexedList(lexed);
var parsed = marked.parser(lexed, e.data.options);
var endTime = new Date();
// setTimeout(function () {
postMessage({
task: e.data.task,
lexed: lexedList.join('\n'),
lexed: lexedList,
parsed: parsed,
time: endTime - startTime
});
// }, 10000);
break;
}
}

function getLexedList(lexed, level) {
level = level || 0;
var lexedList = [];
for (var i = 0; i < lexed.length; i++) {
var lexedLine = [];
for (var j in lexed[i]) {
if (j === 'tokens' || j === 'items') {
lexedLine.push(j + ': [\n' + getLexedList(lexed[i][j], level + 1) + '\n]');
} else {
lexedLine.push(j + ':' + jsonString(lexed[i][j]));
}
}
lexedList.push(stringRepeat(' ', 2 * level) + '{' + lexedLine.join(', ') + '}');
}
return lexedList.join('\n');
}

function stringRepeat(char, times) {
var s = '';
for (var i = 0; i < times; i++) {
s += char;
}
return s;
}

function jsonString(input) {
var output = (input + '')
.replace(/\n/g, '\\n')
Expand All @@ -79,7 +96,7 @@ function jsonString(input) {
.replace(/[\\"']/g, '\\$&')
.replace(/\u0000/g, '\\0');
return '"' + output + '"';
};
}

function loadVersion(ver) {
var promise;
Expand Down
Loading

1 comment on commit 743ec55

@vercel
Copy link

@vercel vercel bot commented on 743ec55 Apr 8, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.