From 721e9df5bf4f0e5ad29d6a9c3af99239c972122d Mon Sep 17 00:00:00 2001 From: Jakub Pawlowicz Date: Tue, 6 Dec 2016 09:06:48 +0100 Subject: [PATCH] Fixes tokenization on extra close brace. Why: * It should be ignored when at the end of tokenized string, but preserved when in the middle. --- lib/optimizer/basic.js | 2 +- lib/optimizer/merge-adjacent.js | 2 +- lib/optimizer/merge-non-adjacent-by-body.js | 2 +- lib/optimizer/tidy-rules.js | 42 ++++++++++++++++++++- lib/tokenizer/tokenize.js | 8 ++-- test/integration-test.js | 4 +- test/tokenizer/tokenize-test.js | 4 +- 7 files changed, 52 insertions(+), 12 deletions(-) diff --git a/lib/optimizer/basic.js b/lib/optimizer/basic.js index 248106a2..7a42daad 100644 --- a/lib/optimizer/basic.js +++ b/lib/optimizer/basic.js @@ -546,7 +546,7 @@ function basicOptimize(tokens, context) { optimizeComment(token, options); break; case Token.RULE: - token[1] = tidyRules(token[1], !ie7Hack, adjacentSpace); + token[1] = tidyRules(token[1], !ie7Hack, adjacentSpace, context.warnings); optimizeBody(token[2], context); afterRules = true; break; diff --git a/lib/optimizer/merge-adjacent.js b/lib/optimizer/merge-adjacent.js index 622902b9..988e376e 100644 --- a/lib/optimizer/merge-adjacent.js +++ b/lib/optimizer/merge-adjacent.js @@ -27,7 +27,7 @@ function mergeAdjacent(tokens, context) { token[2] = []; } else if (lastToken[0] == Token.RULE && stringifyBody(token[2]) == stringifyBody(lastToken[2]) && !isSpecial(options, stringifyRules(token[1])) && !isSpecial(options, stringifyRules(lastToken[1]))) { - lastToken[1] = tidyRules(lastToken[1].concat(token[1]), false, adjacentSpace); + lastToken[1] = tidyRules(lastToken[1].concat(token[1]), false, adjacentSpace, context.warnings); token[2] = []; } else { lastToken = token; diff --git a/lib/optimizer/merge-non-adjacent-by-body.js b/lib/optimizer/merge-non-adjacent-by-body.js index 2549cce9..b5eda742 100644 --- a/lib/optimizer/merge-non-adjacent-by-body.js +++ b/lib/optimizer/merge-non-adjacent-by-body.js @@ -50,7 +50,7 @@ function mergeNonAdjacentByBody(tokens, context) { var oldToken = candidates[candidateBody]; if (oldToken && !isSpecial(options, stringifyRules(token[1])) && !isSpecial(options, stringifyRules(oldToken[1]))) { token[1] = token[2].length > 0 ? - tidyRules(oldToken[1].concat(token[1]), false, adjacentSpace) : + tidyRules(oldToken[1].concat(token[1]), false, adjacentSpace, context.warnings) : oldToken[1].concat(token[1]); oldToken[2] = []; diff --git a/lib/optimizer/tidy-rules.js b/lib/optimizer/tidy-rules.js index 5a38f14c..d5c8a6d1 100644 --- a/lib/optimizer/tidy-rules.js +++ b/lib/optimizer/tidy-rules.js @@ -6,6 +6,41 @@ var WHITESPACE_PATTERN = /\s/; var STAR_PLUS_HTML_HACK = '*+html '; var STAR_FIRST_CHILD_PLUS_HTML_HACK = '*:first-child+html '; +function hasInvalidCharacters(value) { + return value.indexOf(Marker.SINGLE_QUOTE) > -1 || value.indexOf(Marker.DOUBLE_QUOTE) > -1 ? + hasInvalidCharactersWithQuotes(value) : + hasInvalidCharactersWithoutQuotes(value); +} + +function hasInvalidCharactersWithQuotes(value) { + var isEscaped; + var isInvalid = false; + var character; + var isQuote = false; + var i, l; + + for (i = 0, l = value.length; i < l; i++) { + character = value[i]; + + if (isEscaped) { + // continue as always + } else if (character == Marker.SINGLE_QUOTE || character == Marker.DOUBLE_QUOTE) { + isQuote = !isQuote; + } else if (character == Marker.CLOSE_BRACE && !isQuote) { + isInvalid = true; + break; + } + + isEscaped = character == Marker.BACK_SLASH; + } + + return isInvalid; +} + +function hasInvalidCharactersWithoutQuotes(value) { + return value.indexOf(Marker.CLOSE_BRACE) > -1; +} + function removeWhitespace(value) { var stripped = []; var character; @@ -93,7 +128,7 @@ function ruleSorter(s1, s2) { return s1[0] > s2[0] ? 1 : -1; } -function tidyRules(rules, removeUnsupported, adjacentSpace) { +function tidyRules(rules, removeUnsupported, adjacentSpace, warnings) { var list = []; var repeated = []; @@ -101,6 +136,11 @@ function tidyRules(rules, removeUnsupported, adjacentSpace) { var rule = rules[i]; var reduced = rule[0]; + if (hasInvalidCharacters(reduced)) { + warnings.push('Invalid selector \'' + rule[0] + '\' at line ' + rule[1][0][0] + ', column ' + rule[1][0][1] + '. Ignoring.'); + continue; + } + reduced = removeWhitespace(reduced); reduced = removeQuotes(reduced); diff --git a/lib/tokenizer/tokenize.js b/lib/tokenizer/tokenize.js index ed9b73ae..c652c7ab 100644 --- a/lib/tokenizer/tokenize.js +++ b/lib/tokenizer/tokenize.js @@ -290,10 +290,10 @@ function intoTokens(source, externalContext, internalContext, isNested) { level = levels.pop(); seekingValue = false; - } else if (character == Marker.CLOSE_BRACE && level == Level.BLOCK && !isNested && position.index < source.length - 1) { - // stray close brace at block level, e.g. @media screen {...}}<-- - externalContext.warnings.push('Extra \'}\' at line ' + position.line + ', column ' + position.column); - // noop + } else if (character == Marker.CLOSE_BRACE && level == Level.BLOCK && !isNested && position.index <= source.length - 1) { + // stray close brace at block level, e.g. a{color:red}color:blue}<-- + externalContext.warnings.push('Unexpected \'}\' at line ' + position.line + ', column ' + position.column + '.'); + buffer.push(character); } else if (character == Marker.CLOSE_BRACE && level == Level.BLOCK) { // close brace at block level, e.g. @media screen {...}<-- break; diff --git a/test/integration-test.js b/test/integration-test.js index 28bfe7d5..199c87a7 100644 --- a/test/integration-test.js +++ b/test/integration-test.js @@ -2277,11 +2277,11 @@ vows.describe('integration tests') optimizerContext('invalid data tokenization', { 'extra top-level closing brace': [ 'a{color:red}}p{width:auto}', - 'a{color:red}p{width:auto}' + 'a{color:red}' ], 'extra top-level closing braces': [ 'a{color:red}}}}p{width:auto}', - 'a{color:red}p{width:auto}' + 'a{color:red}' ] }) ) diff --git a/test/tokenizer/tokenize-test.js b/test/tokenizer/tokenize-test.js index e8a11f44..e69a08c0 100644 --- a/test/tokenizer/tokenize-test.js +++ b/test/tokenizer/tokenize-test.js @@ -3335,9 +3335,9 @@ vows.describe(tokenize) 'rule', [ [ - 'a', + '}a', [ - [1, 16, undefined] + [1, 15, undefined] ] ] ], -- 2.34.1