From d00b7c1074eb147d9db41dbbd9c57dcfd837ed3f Mon Sep 17 00:00:00 2001 From: Jakub Pawlowicz Date: Tue, 21 Apr 2015 20:54:35 +0100 Subject: [PATCH] Cleans up tokenizer code getting rid of unnecessary state. --- History.md | 1 + lib/selectors/metadata.js | 0 lib/selectors/optimizer.js | 4 +- lib/selectors/tokenizer.js | 69 ++++++++++--------- lib/utils/extractors.js | 6 +- lib/utils/source-maps.js | 10 +-- test/properties/longhand-overriding-test.js | 6 +- test/properties/optimizer-test.js | 6 +- test/properties/override-compacting-test.js | 6 +- .../shorthand-compacting-source-maps-test.js | 8 +-- test/properties/shorthand-compacting-test.js | 6 +- test/selectors/extractor-test.js | 4 +- test/selectors/optimizers/simple-test.js | 6 +- test/selectors/reorderable-test.js | 4 +- test/selectors/tokenizer-source-maps-test.js | 40 +++++++---- test/selectors/tokenizer-test.js | 10 +-- 16 files changed, 102 insertions(+), 84 deletions(-) delete mode 100644 lib/selectors/metadata.js diff --git a/History.md b/History.md index 92fbe548..ab15fbd2 100644 --- a/History.md +++ b/History.md @@ -2,6 +2,7 @@ ================== * Cleans up url rebase code getting rid of unnecessary state. +* Cleans up tokenizer code getting rid of unnecessary state. * Moves URL rebasing & rewriting into lib/urls. * Fixed issue [#436](https://github.com/jakubpawlowicz/clean-css/issues/436) - refactors URI rewriting. diff --git a/lib/selectors/metadata.js b/lib/selectors/metadata.js deleted file mode 100644 index e69de29b..00000000 diff --git a/lib/selectors/optimizer.js b/lib/selectors/optimizer.js index 3cfc6f8e..e17d9a59 100644 --- a/lib/selectors/optimizer.js +++ b/lib/selectors/optimizer.js @@ -1,4 +1,4 @@ -var Tokenizer = require('./tokenizer'); +var tokenize = require('./tokenizer'); var SimpleOptimizer = require('./optimizers/simple'); var AdvancedOptimizer = require('./optimizers/advanced'); var addOptimizationMetadata = require('./optimization-metadata'); @@ -9,7 +9,7 @@ function SelectorsOptimizer(options, context) { } SelectorsOptimizer.prototype.process = function (data, stringify, restoreCallback, sourceMapTracker) { - var tokens = new Tokenizer(this.context, this.options.sourceMap).toTokens(data); + var tokens = tokenize(data, this.context); addOptimizationMetadata(tokens); diff --git a/lib/selectors/tokenizer.js b/lib/selectors/tokenizer.js index c83c06c6..ef8a2137 100644 --- a/lib/selectors/tokenizer.js +++ b/lib/selectors/tokenizer.js @@ -6,42 +6,43 @@ var path = require('path'); var flatBlock = /(^@(font\-face|page|\-ms\-viewport|\-o\-viewport|viewport|counter\-style)|\\@.+?)/; -function Tokenizer(minifyContext, sourceMaps) { - this.minifyContext = minifyContext; - this.sourceMaps = sourceMaps; -} - -Tokenizer.prototype.toTokens = function (data) { - data = data.replace(/\r\n/g, '\n'); - - var chunker = new Chunker(data, '}', 128); +function tokenize(data, outerContext) { + var chunker = new Chunker(normalize(data), '}', 128); if (chunker.isEmpty()) return []; var context = { + chunk: chunker.next(), + chunker: chunker, + column: 0, cursor: 0, + line: 1, mode: 'top', - chunker: chunker, - chunk: chunker.next(), - outer: this.minifyContext, - track: this.sourceMaps ? + resolvePath: outerContext.options.explicitTarget ? + relativePathResolver(outerContext.options.root, outerContext.options.target) : + null, + source: undefined, + sourceMap: outerContext.options.sourceMap, + sourceMapInlineSources: outerContext.options.sourceMapInlineSources, + sourceMapTracker: outerContext.inputSourceMapTracker, + sourceReader: outerContext.sourceReader, + sourceTracker: outerContext.sourceTracker, + state: [], + track: outerContext.options.sourceMap ? function (data, snapshotMetadata, fallbacks) { return [[track(data, context, snapshotMetadata, fallbacks)]]; } : function () { return []; }, - sourceMaps: this.sourceMaps, - state: [], - line: 1, - column: 0, - source: undefined + warnings: outerContext.warnings }; - if (this.minifyContext.options.explicitTarget) - context.resolvePath = relativePathResolver(context); + return intoTokens(context); +} - return tokenize(context); -}; +function normalize(data) { + return data.replace(/\r\n/g, '\n'); +} -function relativePathResolver(context) { - var rebaseTo = path.relative(context.outer.options.root, context.outer.options.target); +function relativePathResolver(root, target) { + var rebaseTo = path.relative(root, target); return function (relativeTo, sourcePath) { return relativeTo != sourcePath ? @@ -98,7 +99,7 @@ function whatsNext(context) { return [closest, 'special']; } -function tokenize(context) { +function intoTokens(context) { var chunk = context.chunk; var tokenized = []; var newToken; @@ -110,7 +111,7 @@ function tokenize(context) { var whatsLeft = context.chunk.substring(context.cursor); if (whatsLeft.trim().length > 0) { if (context.mode == 'body') { - context.outer.warnings.push('Missing \'}\' after \'' + whatsLeft + '\'. Ignoring.'); + context.warnings.push('Missing \'}\' after \'' + whatsLeft + '\'. Ignoring.'); } else { tokenized.push(['text', [whatsLeft]]); } @@ -142,7 +143,7 @@ function tokenize(context) { var isSingle = firstSemicolonAt > -1 && (firstOpenBraceAt == -1 || firstSemicolonAt < firstOpenBraceAt); var isBroken = firstOpenBraceAt == -1 && firstSemicolonAt == -1; if (isBroken) { - context.outer.warnings.push('Broken declaration: \'' + chunk.substring(context.cursor) + '\'.'); + context.warnings.push('Broken declaration: \'' + chunk.substring(context.cursor) + '\'.'); context.cursor = chunk.length; } else if (isSingle) { nextEnd = chunk.indexOf(';', nextSpecial + 1); @@ -171,7 +172,7 @@ function tokenize(context) { newToken.push([trimmedValue].concat(context.track(value, true))); context.track('{'); - newToken.push(tokenize(context)); + newToken.push(intoTokens(context)); if (typeof newToken[2] == 'string') newToken[2] = Extract.properties(newToken[2], [[trimmedValue]], context); @@ -184,8 +185,8 @@ function tokenize(context) { } else if (what == 'escape') { nextEnd = chunk.indexOf('__', nextSpecial + 1); var escaped = chunk.substring(context.cursor, nextEnd + 2); - var isStartSourceMarker = !!context.outer.sourceTracker.nextStart(escaped); - var isEndSourceMarker = !!context.outer.sourceTracker.nextEnd(escaped); + var isStartSourceMarker = !!context.sourceTracker.nextStart(escaped); + var isEndSourceMarker = !!context.sourceTracker.nextEnd(escaped); if (isStartSourceMarker) { context.track(escaped); @@ -194,7 +195,7 @@ function tokenize(context) { line: context.line, column: context.column }); - context.source = context.outer.sourceTracker.nextStart(escaped).filename; + context.source = context.sourceTracker.nextStart(escaped).filename; context.line = 1; context.column = 0; } else if (isEndSourceMarker) { @@ -218,7 +219,7 @@ function tokenize(context) { context.cursor = nextSpecial + 1; context.mode = 'body'; - var body = Extract.properties(tokenize(context), selectors, context); + var body = Extract.properties(intoTokens(context), selectors, context); context.track('{'); context.mode = oldMode; @@ -236,7 +237,7 @@ function tokenize(context) { 'Unexpected \'}\' in \'' + chunk.substring(at - 20, at + 20) + '\'. Ignoring.' : 'Unexpected content: \'' + chunk.substring(at, nextSpecial + 1) + '\'. Ignoring.'; - context.outer.warnings.push(warning); + context.warnings.push(warning); context.cursor = nextSpecial + 1; continue; } @@ -255,4 +256,4 @@ function tokenize(context) { return tokenized; } -module.exports = Tokenizer; +module.exports = tokenize; diff --git a/lib/utils/extractors.js b/lib/utils/extractors.js index ad0529f2..e4b6cdc6 100644 --- a/lib/utils/extractors.js +++ b/lib/utils/extractors.js @@ -19,10 +19,10 @@ var Extractors = { string = string.replace(/(__ESCAPED_COMMENT_(SPECIAL_)?CLEAN_CSS[^_]+?__)/g, ';$1;'); if (string.indexOf(')') > -1) - string = string.replace(/\)([^\s_;:,\)])/g, context.sourceMaps ? ') __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ $1' : ') $1'); + string = string.replace(/\)([^\s_;:,\)])/g, context.sourceMap ? ') __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ $1' : ') $1'); if (string.indexOf('ESCAPED_URL_CLEAN_CSS') > -1) - string = string.replace(/(ESCAPED_URL_CLEAN_CSS[^_]+?__)/g, context.sourceMaps ? '$1 __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ ' : '$1 '); + string = string.replace(/(ESCAPED_URL_CLEAN_CSS[^_]+?__)/g, context.sourceMap ? '$1 __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ ' : '$1 '); var candidates = string.split(';'); @@ -50,7 +50,7 @@ var Extractors = { var values = splitter.split(candidate.substring(firstColonAt + 1), true); if (values.length == 1 && values[0] === '') { - context.outer.warnings.push('Empty property \'' + name + '\' inside \'' + selectors.filter(selectorName).join(',') + '\' selector. Ignoring.'); + context.warnings.push('Empty property \'' + name + '\' inside \'' + selectors.filter(selectorName).join(',') + '\' selector. Ignoring.'); continue; } diff --git a/lib/utils/source-maps.js b/lib/utils/source-maps.js index f9a7d319..735a58dc 100644 --- a/lib/utils/source-maps.js +++ b/lib/utils/source-maps.js @@ -50,8 +50,8 @@ function snapshot(data, context, fallbacks) { source: context.source }; var sourceContent = null; - var sourceMetadata = context.outer.inputSourceMapTracker.isTracking(metadata.source) ? - context.outer.inputSourceMapTracker.originalPositionFor(metadata, data, fallbacks || 0) : + var sourceMetadata = context.sourceMapTracker.isTracking(metadata.source) ? + context.sourceMapTracker.originalPositionFor(metadata, data, fallbacks || 0) : {}; metadata.line = sourceMetadata.line || metadata.line; @@ -60,11 +60,11 @@ function snapshot(data, context, fallbacks) { sourceMetadata.source : sourceFor(sourceMetadata, metadata, context); - if (context.outer.options.sourceMapInlineSources) { - var sourceMapSourcesContent = context.outer.inputSourceMapTracker.sourcesContentFor(context.source); + if (context.sourceMapInlineSources) { + var sourceMapSourcesContent = context.sourceMapTracker.sourcesContentFor(context.source); sourceContent = sourceMapSourcesContent && sourceMapSourcesContent[metadata.source] ? sourceMapSourcesContent : - context.outer.sourceReader.sourceAt(context.source); + context.sourceReader.sourceAt(context.source); } return sourceContent ? diff --git a/test/properties/longhand-overriding-test.js b/test/properties/longhand-overriding-test.js index 001311eb..cabc8c3a 100644 --- a/test/properties/longhand-overriding-test.js +++ b/test/properties/longhand-overriding-test.js @@ -3,18 +3,18 @@ var assert = require('assert'); var optimize = require('../../lib/properties/optimizer'); -var Tokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); var Compatibility = require('../../lib/utils/compatibility'); var Validator = require('../../lib/properties/validator'); var addOptimizationMetadata = require('../../lib/selectors/optimization-metadata'); function _optimize(source) { - var tokens = new Tokenizer({ + var tokens = tokenize(source, { options: {}, sourceTracker: new SourceTracker(), warnings: [] - }).toTokens(source); + }); addOptimizationMetadata(tokens); diff --git a/test/properties/optimizer-test.js b/test/properties/optimizer-test.js index fbff5b7f..cb66aa29 100644 --- a/test/properties/optimizer-test.js +++ b/test/properties/optimizer-test.js @@ -3,7 +3,7 @@ var assert = require('assert'); var optimize = require('../../lib/properties/optimizer'); -var Tokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); var Compatibility = require('../../lib/utils/compatibility'); var Validator = require('../../lib/properties/validator'); @@ -13,11 +13,11 @@ var compatibility = new Compatibility().toOptions(); var validator = new Validator(compatibility); function _optimize(source, mergeAdjacent, aggressiveMerging) { - var tokens = new Tokenizer({ + var tokens = tokenize(source, { options: {}, sourceTracker: new SourceTracker(), warnings: [] - }).toTokens(source); + }); addOptimizationMetadata(tokens); optimize(tokens[0][1], tokens[0][2], mergeAdjacent, true, { compatibility: compatibility, aggressiveMerging: aggressiveMerging }, validator); diff --git a/test/properties/override-compacting-test.js b/test/properties/override-compacting-test.js index 4ccb4156..bdbbef32 100644 --- a/test/properties/override-compacting-test.js +++ b/test/properties/override-compacting-test.js @@ -3,18 +3,18 @@ var assert = require('assert'); var optimize = require('../../lib/properties/optimizer'); -var Tokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); var Compatibility = require('../../lib/utils/compatibility'); var Validator = require('../../lib/properties/validator'); var addOptimizationMetadata = require('../../lib/selectors/optimization-metadata'); function _optimize(source, compatibility, aggressiveMerging) { - var tokens = new Tokenizer({ + var tokens = tokenize(source, { options: {}, sourceTracker: new SourceTracker(), warnings: [] - }).toTokens(source); + }); compatibility = new Compatibility(compatibility).toOptions(); var validator = new Validator(compatibility); diff --git a/test/properties/shorthand-compacting-source-maps-test.js b/test/properties/shorthand-compacting-source-maps-test.js index b55b8d7b..28a6a984 100644 --- a/test/properties/shorthand-compacting-source-maps-test.js +++ b/test/properties/shorthand-compacting-source-maps-test.js @@ -3,7 +3,7 @@ var assert = require('assert'); var optimize = require('../../lib/properties/optimizer'); -var Tokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); var SourceReader = require('../../lib/utils/source-reader'); var InputSourceMapTracker = require('../../lib/utils/input-source-map-tracker'); @@ -18,13 +18,13 @@ function _optimize(source) { errors: {}, sourceTracker: new SourceTracker() }); - var tokens = new Tokenizer({ - options: {}, + var tokens = tokenize(source, { + options: { sourceMap: true }, inputSourceMapTracker: inputSourceMapTracker, sourceReader: new SourceReader(), sourceTracker: new SourceTracker(), warnings: [] - }, true).toTokens(source); + }); var compatibility = new Compatibility().toOptions(); var validator = new Validator(compatibility); diff --git a/test/properties/shorthand-compacting-test.js b/test/properties/shorthand-compacting-test.js index b3bf0cbd..2225caa9 100644 --- a/test/properties/shorthand-compacting-test.js +++ b/test/properties/shorthand-compacting-test.js @@ -3,18 +3,18 @@ var assert = require('assert'); var optimize = require('../../lib/properties/optimizer'); -var Tokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); var Compatibility = require('../../lib/utils/compatibility'); var Validator = require('../../lib/properties/validator'); var addOptimizationMetadata = require('../../lib/selectors/optimization-metadata'); function _optimize(source) { - var tokens = new Tokenizer({ + var tokens = tokenize(source, { options: {}, sourceTracker: new SourceTracker(), warnings: [] - }).toTokens(source); + }); var compatibility = new Compatibility(compatibility).toOptions(); var validator = new Validator(compatibility); diff --git a/test/selectors/extractor-test.js b/test/selectors/extractor-test.js index b24c94c2..117b2621 100644 --- a/test/selectors/extractor-test.js +++ b/test/selectors/extractor-test.js @@ -1,10 +1,10 @@ var vows = require('vows'); var assert = require('assert'); -var SelectorTokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var extractor = require('../../lib/selectors/extractor'); function buildToken(source) { - return new SelectorTokenizer({ options: {} }).toTokens(source)[0]; + return tokenize(source, { options: {} })[0]; } vows.describe(extractor) diff --git a/test/selectors/optimizers/simple-test.js b/test/selectors/optimizers/simple-test.js index 5c38f8d5..5eb062ca 100644 --- a/test/selectors/optimizers/simple-test.js +++ b/test/selectors/optimizers/simple-test.js @@ -1,7 +1,7 @@ var vows = require('vows'); var assert = require('assert'); -var Tokenizer = require('../../../lib/selectors/tokenizer'); +var tokenize = require('../../../lib/selectors/tokenizer'); var SimpleOptimizer = require('../../../lib/selectors/optimizers/simple'); var Compatibility = require('../../../lib/utils/compatibility'); var addOptimizationMetadata = require('../../../lib/selectors/optimization-metadata'); @@ -13,7 +13,7 @@ function selectorContext(group, specs, options) { function optimized(selectors) { return function (source) { - var tokens = new Tokenizer({ options: {} }).toTokens(source); + var tokens = tokenize(source, { options: {} }); new SimpleOptimizer(options).optimize(tokens); assert.deepEqual(tokens[0] ? tokens[0][1] : null, selectors); @@ -37,7 +37,7 @@ function propertyContext(group, specs, options) { function optimized(selectors) { return function (source) { - var tokens = new Tokenizer({ options: {} }).toTokens(source); + var tokens = tokenize(source, { options: {} }); addOptimizationMetadata(tokens); new SimpleOptimizer(options).optimize(tokens); diff --git a/test/selectors/reorderable-test.js b/test/selectors/reorderable-test.js index 8edb3479..1ecdb7cd 100644 --- a/test/selectors/reorderable-test.js +++ b/test/selectors/reorderable-test.js @@ -1,13 +1,13 @@ var vows = require('vows'); var assert = require('assert'); -var SelectorTokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var extractProperties = require('../../lib/selectors/extractor'); var canReorder = require('../../lib/selectors/reorderable').canReorder; var canReorderSingle = require('../../lib/selectors/reorderable').canReorderSingle; function propertiesIn(source) { - return extractProperties(new SelectorTokenizer({ options: {} }, false).toTokens(source)[0]); + return extractProperties(tokenize(source, { options: {} })[0]); } vows.describe(canReorder) diff --git a/test/selectors/tokenizer-source-maps-test.js b/test/selectors/tokenizer-source-maps-test.js index fc41da74..96f66818 100644 --- a/test/selectors/tokenizer-source-maps-test.js +++ b/test/selectors/tokenizer-source-maps-test.js @@ -1,6 +1,6 @@ var vows = require('vows'); var assert = require('assert'); -var Tokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); var SourceReader = require('../../lib/utils/source-reader'); var InputSourceMapTracker = require('../../lib/utils/input-source-map-tracker'); @@ -21,12 +21,12 @@ function sourceMapContext(group, specs) { function toTokens(source) { return function () { - return new Tokenizer({ + return tokenize(source, { sourceTracker: sourceTracker, sourceReader: sourceReader, inputSourceMapTracker: inputSourceMapTracker, - options: {} - }, true).toTokens(source); + options: { sourceMap: true } + }); }; } @@ -471,10 +471,14 @@ vows.describe('source-maps/analyzer') var tracker = new SourceTracker(); var reader = new SourceReader(); var inputTracker = new InputSourceMapTracker({ options: { inliner: {} }, errors: {}, sourceTracker: tracker }); - var tokenizer = new Tokenizer({ sourceTracker: tracker, sourceReader: reader, inputSourceMapTracker: inputTracker, options: {} }, true); var data = tracker.store('one.css', 'a{}'); - return tokenizer.toTokens(data); + return tokenize(data, { + sourceTracker: tracker, + sourceReader: reader, + inputSourceMapTracker: inputTracker, + options: { sourceMap: true } + }); }, [ [ @@ -489,11 +493,15 @@ vows.describe('source-maps/analyzer') var tracker = new SourceTracker(); var reader = new SourceReader(); var inputTracker = new InputSourceMapTracker({ options: { inliner: {} }, errors: {}, sourceTracker: tracker }); - var tokenizer = new Tokenizer({ sourceTracker: tracker, sourceReader: reader, inputSourceMapTracker: inputTracker, options: {} }, true); var data1 = tracker.store('one.css', 'a{}'); var data2 = tracker.store('two.css', '\na{color:red}'); - return tokenizer.toTokens(data1 + data2); + return tokenize(data1 + data2, { + sourceTracker: tracker, + sourceReader: reader, + inputSourceMapTracker: inputTracker, + options: { sourceMap: true } + }); }, [ [ @@ -519,8 +527,12 @@ vows.describe('source-maps/analyzer') var inputTracker = new InputSourceMapTracker({ options: { inliner: {}, sourceMap: inputMap, options: {} }, errors: {}, sourceTracker: tracker }); inputTracker.track('', function () {}); - var tokenizer = new Tokenizer({ sourceTracker: tracker, sourceReader: reader, inputSourceMapTracker: inputTracker, options: {} }, true); - return tokenizer.toTokens('div > a {\n color: red;\n}'); + return tokenize('div > a {\n color: red;\n}', { + sourceTracker: tracker, + sourceReader: reader, + inputSourceMapTracker: inputTracker, + options: { sourceMap: true } + }); }, [ [ @@ -537,8 +549,12 @@ vows.describe('source-maps/analyzer') var inputTracker = new InputSourceMapTracker({ options: { inliner: {}, sourceMap: inputMap, options: {} }, errors: {}, sourceTracker: tracker }); inputTracker.track('', function () {}); - var tokenizer = new Tokenizer({ sourceTracker: tracker, sourceReader: reader, inputSourceMapTracker: inputTracker, options: {} }, true); - return tokenizer.toTokens('div > a {\n color: red red;\n}'); + return tokenize('div > a {\n color: red red;\n}', { + sourceTracker: tracker, + sourceReader: reader, + inputSourceMapTracker: inputTracker, + options: { sourceMap: true } + }); }, [ [ diff --git a/test/selectors/tokenizer-test.js b/test/selectors/tokenizer-test.js index 15fef2cf..decbe91a 100644 --- a/test/selectors/tokenizer-test.js +++ b/test/selectors/tokenizer-test.js @@ -1,6 +1,6 @@ var vows = require('vows'); var assert = require('assert'); -var Tokenizer = require('../../lib/selectors/tokenizer'); +var tokenize = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); function tokenizerContext(name, specs) { @@ -8,13 +8,13 @@ function tokenizerContext(name, specs) { function tokenized(target) { return function (source) { - var tokenized = new Tokenizer({ + var tokens = tokenize(source, { options: {}, sourceTracker: new SourceTracker(), warnings: [] - }).toTokens(source); + }); - assert.deepEqual(tokenized, target); + assert.deepEqual(tokens, target); }; } @@ -28,7 +28,7 @@ function tokenizerContext(name, specs) { return ctx; } -vows.describe(Tokenizer) +vows.describe(tokenize) .addBatch( tokenizerContext('basic', { 'no content': [ -- 2.34.1