Skip to content

Commit

Permalink
fix: Refactor table tokens (#2166)
Browse files Browse the repository at this point in the history
BREAKING CHANGE:

- `table` tokens `header` property changed to contain an array of objects for each header cell with `text` and `tokens` properties.
- `table` tokens `cells` property changed to `rows` and is an array of rows where each row contains an array of objects for each cell with `text` and `tokens` properties.

v2:

```json
{
  "type": "table",
  "align": [null, null],
  "raw": "| a | b |\n|---|---|\n| 1 | 2 |\n",
  "header": ["a", "b"],
  "cells": [["1", "2"]],
  "tokens": {
    "header": [
      [{ "type": "text", "raw": "a", "text": "a" }],
      [{ "type": "text", "raw": "b", "text": "b" }]
    ],
    "cells": [[
      [{ "type": "text", "raw": "1", "text": "1" }],
      [{ "type": "text", "raw": "2", "text": "2" }]
    ]]
  }
}
```

v3:

```json
{
  "type": "table",
  "align": [null, null],
  "raw": "| a | b |\n|---|---|\n| 1 | 2 |\n",
  "header": [
    {
      "text": "a",
      "tokens": [{ "type": "text", "raw": "a", "text": "a" }]
    },
    {
      "text": "b",
      "tokens": [{ "type": "text", "raw": "b", "text": "b" }]
    }
  ],
  "rows": [
    {
      "text": "1",
      "tokens": [{ "type": "text", "raw": "1", "text": "1" }]
    },
    {
      "text": "2",
      "tokens": [{ "type": "text", "raw": "2", "text": "2" }]
    }
  ]
}
```
  • Loading branch information
calculuschild authored Aug 16, 2021
1 parent eb33d3b commit bc400ac
Show file tree
Hide file tree
Showing 5 changed files with 113 additions and 74 deletions.
8 changes: 4 additions & 4 deletions src/Parser.js
Original file line number Diff line number Diff line change
Expand Up @@ -103,22 +103,22 @@ module.exports = class Parser {
l2 = token.header.length;
for (j = 0; j < l2; j++) {
cell += this.renderer.tablecell(
this.parseInline(token.tokens.header[j]),
this.parseInline(token.header[j].tokens),
{ header: true, align: token.align[j] }
);
}
header += this.renderer.tablerow(cell);

body = '';
l2 = token.cells.length;
l2 = token.rows.length;
for (j = 0; j < l2; j++) {
row = token.tokens.cells[j];
row = token.rows[j];

cell = '';
l3 = row.length;
for (k = 0; k < l3; k++) {
cell += this.renderer.tablecell(
this.parseInline(row[k]),
this.parseInline(row[k].tokens),
{ header: false, align: token.align[k] }
);
}
Expand Down
25 changes: 10 additions & 15 deletions src/Tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -352,9 +352,9 @@ module.exports = class Tokenizer {
if (cap) {
const item = {
type: 'table',
header: splitCells(cap[1].replace(/^ *| *\| *$/g, '')),
header: splitCells(cap[1]).map(c => { return { text: c }; }),
align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */),
cells: cap[3] ? cap[3].replace(/\n$/, '').split('\n') : []
rows: cap[3] ? cap[3].replace(/\n$/, '').split('\n') : []
};

if (item.header.length === item.align.length) {
Expand All @@ -374,32 +374,27 @@ module.exports = class Tokenizer {
}
}

l = item.cells.length;
l = item.rows.length;
for (i = 0; i < l; i++) {
item.cells[i] = splitCells(item.cells[i], item.header.length);
item.rows[i] = splitCells(item.rows[i], item.header.length).map(c => { return { text: c }; });
}

// parse child tokens inside headers and cells
item.tokens = {
header: [],
cells: []
};

// header child tokens
l = item.header.length;
for (j = 0; j < l; j++) {
item.tokens.header[j] = [];
this.lexer.inlineTokens(item.header[j], item.tokens.header[j]);
item.header[j].tokens = [];
this.lexer.inlineTokens(item.header[j].text, item.header[j].tokens);
}

// cell child tokens
l = item.cells.length;
l = item.rows.length;
for (j = 0; j < l; j++) {
row = item.cells[j];
item.tokens.cells[j] = [];
row = item.rows[j];
for (k = 0; k < row.length; k++) {
item.tokens.cells[j][k] = [];
this.lexer.inlineTokens(row[k], item.tokens.cells[j][k]);
row[k].tokens = [];
this.lexer.inlineTokens(row[k].text, row[k].tokens);
}
}

Expand Down
8 changes: 4 additions & 4 deletions src/marked.js
Original file line number Diff line number Diff line change
Expand Up @@ -260,12 +260,12 @@ marked.walkTokens = function(tokens, callback) {
callback(token);
switch (token.type) {
case 'table': {
for (const cell of token.tokens.header) {
marked.walkTokens(cell, callback);
for (const cell of token.header) {
marked.walkTokens(cell.tokens, callback);
}
for (const row of token.tokens.cells) {
for (const row of token.rows) {
for (const cell of row) {
marked.walkTokens(cell, callback);
marked.walkTokens(cell.tokens, callback);
}
}
break;
Expand Down
112 changes: 74 additions & 38 deletions test/unit/Lexer-spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -176,20 +176,30 @@ lheading 2
`,
tokens: [{
type: 'table',
header: ['a', 'b'],
align: [null, null],
cells: [['1', '2']],
raw: '| a | b |\n|---|---|\n| 1 | 2 |\n',
tokens: {
header: [
[{ type: 'text', raw: 'a', text: 'a' }],
[{ type: 'text', raw: 'b', text: 'b' }]
],
cells: [[
[{ type: 'text', raw: '1', text: '1' }],
[{ type: 'text', raw: '2', text: '2' }]
]]
}
header: [
{
text: 'a',
tokens: [{ type: 'text', raw: 'a', text: 'a' }]
},
{
text: 'b',
tokens: [{ type: 'text', raw: 'b', text: 'b' }]
}
],
rows: [
[
{
text: '1',
tokens: [{ type: 'text', raw: '1', text: '1' }]
},
{
text: '2',
tokens: [{ type: 'text', raw: '2', text: '2' }]
}
]
]
}]
});
});
Expand All @@ -203,22 +213,38 @@ lheading 2
`,
tokens: [{
type: 'table',
header: ['a', 'b', 'c'],
align: ['left', 'center', 'right'],
cells: [['1', '2', '3']],
raw: '| a | b | c |\n|:--|:-:|--:|\n| 1 | 2 | 3 |\n',
tokens: {
header: [
[{ type: 'text', raw: 'a', text: 'a' }],
[{ type: 'text', raw: 'b', text: 'b' }],
[{ type: 'text', raw: 'c', text: 'c' }]
],
cells: [[
[{ type: 'text', raw: '1', text: '1' }],
[{ type: 'text', raw: '2', text: '2' }],
[{ type: 'text', raw: '3', text: '3' }]
]]
}
header: [
{
text: 'a',
tokens: [{ type: 'text', raw: 'a', text: 'a' }]
},
{
text: 'b',
tokens: [{ type: 'text', raw: 'b', text: 'b' }]
},
{
text: 'c',
tokens: [{ type: 'text', raw: 'c', text: 'c' }]
}
],
rows: [
[
{
text: '1',
tokens: [{ type: 'text', raw: '1', text: '1' }]
},
{
text: '2',
tokens: [{ type: 'text', raw: '2', text: '2' }]
},
{
text: '3',
tokens: [{ type: 'text', raw: '3', text: '3' }]
}
]
]
}]
});
});
Expand All @@ -232,20 +258,30 @@ a | b
`,
tokens: [{
type: 'table',
header: ['a', 'b'],
align: [null, null],
cells: [['1', '2']],
raw: 'a | b\n--|--\n1 | 2\n',
tokens: {
header: [
[{ type: 'text', raw: 'a', text: 'a' }],
[{ type: 'text', raw: 'b', text: 'b' }]
],
cells: [[
[{ type: 'text', raw: '1', text: '1' }],
[{ type: 'text', raw: '2', text: '2' }]
]]
}
header: [
{
text: 'a',
tokens: [{ type: 'text', raw: 'a', text: 'a' }]
},
{
text: 'b',
tokens: [{ type: 'text', raw: 'b', text: 'b' }]
}
],
rows: [
[
{
text: '1',
tokens: [{ type: 'text', raw: '1', text: '1' }]
},
{
text: '2',
tokens: [{ type: 'text', raw: '2', text: '2' }]
}
]
]
}]
});
});
Expand Down
34 changes: 21 additions & 13 deletions test/unit/Parser-spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,21 +68,29 @@ describe('Parser', () => {
await expectHtml({
tokens: [{
type: 'table',
header: ['a', 'b'],
align: ['left', 'right'],
cells: [['1', '2']],
tokens: {
header: [
[{ type: 'text', text: 'a' }],
[{ type: 'text', text: 'b' }]
],
cells: [
[
[{ type: 'text', text: '1' }],
[{ type: 'text', text: '2' }]
]
header: [
{
text: 'a',
tokens: [{ type: 'text', raw: 'a', text: 'a' }]
},
{
text: 'b',
tokens: [{ type: 'text', raw: 'b', text: 'b' }]
}
],
rows: [
[
{
text: '1',
tokens: [{ type: 'text', raw: '1', text: '1' }]
},
{
text: '2',
tokens: [{ type: 'text', raw: '2', text: '2' }]
}
]
}
]
}],
html: `
<table>
Expand Down

1 comment on commit bc400ac

@vercel
Copy link

@vercel vercel bot commented on bc400ac Aug 16, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.