Skip to content

Commit

Permalink
fix: proposed token important fix
Browse files Browse the repository at this point in the history
  • Loading branch information
shellscape committed Nov 13, 2018
1 parent 3016597 commit d96075d
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 15 deletions.
42 changes: 27 additions & 15 deletions lib/LessParser.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,9 @@ module.exports = class LessParser extends Parser {
}

unknownWord(tokens) {
// NOTE: keep commented for examining unknown structures
// console.log('unknown', tokens);

const [first] = tokens;

// TODO: move this into a util function/file
Expand Down Expand Up @@ -110,6 +113,29 @@ module.exports = class LessParser extends Parser {
tokens = tokens.concat(tokensAfter);
}

const importantTokens = [];

for (const token of tokens) {
if (token[1] === '!' || importantTokens.length) {
importantTokens.push(token);
}

if (token[1] === 'important') {
break;
}
}

if (importantTokens.length) {
const [bangToken] = importantTokens;
const bangIndex = tokens.indexOf(bangToken);
const last = importantTokens[importantTokens.length - 1];
const start = [bangToken[2], bangToken[3]];
const end = [last[4], last[5]];
const combined = importantTokens.map((t) => t[1]).reduce((a, c) => a + c);

This comment has been minimized.

Copy link
@jwilsson

jwilsson Nov 13, 2018

Collaborator

IMO, const combined = importantTokens.map((t) => t[1]).join(''); would be easier to read.

This comment has been minimized.

Copy link
@shellscape

shellscape Nov 13, 2018

Author Owner

eesh. you're right. that's a victim of me starting with a reduce there.

const newToken = ['word', combined].concat(start, end);
tokens.splice(bangIndex, importantTokens.length, newToken);
}

const importantIndex = tokens.findIndex((t) => importantPattern.test(t[1]));

if (importantIndex > 0) {
Expand All @@ -125,28 +151,14 @@ module.exports = class LessParser extends Parser {
this.lastNode.mixin = true;
this.lastNode.raws.identifier = identifier;

// const importantIndex = tokens.findIndex((t) => importantPattern.test(t[1]));

if (important) {
this.lastNode.important = true;
this.lastNode.raws.important = important;
}

// if (importantIndex > 0) {
// nodes.splice(importantIndex, 1);
// [this.lastNode.raws.important] = this.lastNode.params.match(importantPattern);

// this.lastNode.params = this.lastNode.params.replace(importantPattern, '');

// const [spaces] = this.lastNode.params.match(/\s+$/) || [''];
// this.lastNode.raws.between = spaces;
// this.lastNode.params = this.lastNode.params.trim();
// }

return;
}
// NOTE: keep commented for examining unknown structures
// console.log('unknown', tokens);

super.unknownWord(tokens);
}
};
24 changes: 24 additions & 0 deletions test/parser/mixins.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,30 @@ test('!important, no whitespace', (t) => {
t.is(nodeToString(root), less);
});

test('!important, whitespace between', (t) => {
const less = `
.foo()! important;
`;
const root = parse(less);
const { first } = root;
t.is(first.name, 'foo');
t.is(first.params, '()');
t.is(first.important, true);
t.is(nodeToString(root), less);
});

test('!important, whitespace before and between', (t) => {
const less = `
.foo() ! important;
`;
const root = parse(less);
const { first } = root;
t.is(first.name, 'foo');
t.is(first.params, '()');
t.is(first.important, true);
t.is(nodeToString(root), less);
});

test('parses nested mixins with the rule set', (t) => {
const params = '({background-color: red;})';
const ruleSet = `.desktop-and-old-ie ${params}`;
Expand Down

0 comments on commit d96075d

Please sign in to comment.