From 244c81eb1d2a32ed57ecb591c994826a9cce33a9 Mon Sep 17 00:00:00 2001 From: Jakub Pawlowicz Date: Sun, 1 Mar 2015 13:22:49 +0000 Subject: [PATCH] Fixes #396 - better input source maps tracking. We do it in tokenization step now instead of stringifying. That aids compatibility as we get raw not processed content. --- History.md | 5 ++ lib/properties/optimizer.js | 2 +- lib/properties/token.js | 2 +- lib/selectors/optimizers/simple.js | 2 +- lib/selectors/source-map-stringifier.js | 64 +++----------------- lib/selectors/tokenizer.js | 29 +++++++++ lib/utils/extractors.js | 3 +- lib/utils/input-source-map-tracker.js | 6 +- lib/utils/source-maps.js | 26 +++++++- test/properties/extractor-test.js | 2 +- test/properties/reorderable-test.js | 2 +- test/selectors/optimizer-test.js | 1 + test/selectors/optimizers/simple-test.js | 4 +- test/selectors/tokenizer-source-maps-test.js | 37 ++++++++++- test/selectors/tokenizer-test.js | 2 +- 15 files changed, 113 insertions(+), 74 deletions(-) diff --git a/History.md b/History.md index 69424f76..4aabe32d 100644 --- a/History.md +++ b/History.md @@ -1,3 +1,8 @@ +[3.2.0 / 2015-xx-xx](https://github.com/jakubpawlowicz/clean-css/compare/v3.1.1...HEAD) +================== + +* Fixed issue [#396](https://github.com/jakubpawlowicz/clean-css/issues/396) - better input source maps tracking. + [3.1.1 / 2015-02-27](https://github.com/jakubpawlowicz/clean-css/compare/v3.1.0...v3.1.1) ================== diff --git a/lib/properties/optimizer.js b/lib/properties/optimizer.js index 5e604cba..4ffe9166 100644 --- a/lib/properties/optimizer.js +++ b/lib/properties/optimizer.js @@ -237,7 +237,7 @@ module.exports = function Optimizer(options, context) { if (!eligibleForCompacting && processableInfo.implementedFor.test(tokens[i][0])) eligibleForCompacting = true; - // FIXME: the check should be gone with #396 + // FIXME: the check should be gone with #407 var property = !tokens[i][0] && tokens[i][1].indexOf('__ESCAPED_') === 0 ? tokens[i][1] : tokens[i][0] + ':' + tokens[i][1]; diff --git a/lib/properties/token.js b/lib/properties/token.js index c5f8ad16..13c5b7bb 100644 --- a/lib/properties/token.js +++ b/lib/properties/token.js @@ -128,7 +128,7 @@ module.exports = (function() { continue; } - // FIXME: the check should be gone with #396 + // FIXME: the check should be gone with #407 var property = t.prop === '' && t.value.indexOf('__ESCAPED_') === 0 ? t.value : t.prop + ':' + t.value + (t.isImportant ? important : ''); diff --git a/lib/selectors/optimizers/simple.js b/lib/selectors/optimizers/simple.js index af3acb62..e822be64 100644 --- a/lib/selectors/optimizers/simple.js +++ b/lib/selectors/optimizers/simple.js @@ -190,7 +190,7 @@ function reduce(body, options) { for (var i = 0, l = body.length; i < l; i++) { var token = body[i]; - // FIXME: the check should be gone with #396 + // FIXME: the check should be gone with #407 if (token.value.indexOf('__ESCAPED_') === 0) { reduced.push(token); properties.push(token.value); diff --git a/lib/selectors/source-map-stringifier.js b/lib/selectors/source-map-stringifier.js index 35258a1b..367edf68 100644 --- a/lib/selectors/source-map-stringifier.js +++ b/lib/selectors/source-map-stringifier.js @@ -1,7 +1,3 @@ -var path = require('path'); -var fs = require('fs'); -var url = require('url'); - var SourceMapGenerator = require('source-map').SourceMapGenerator; var SourceMap = require('../utils/source-maps'); @@ -15,30 +11,9 @@ function Rebuilder(options, restoreCallback, inputMapTracker) { this.restore = restoreCallback; this.inputMapTracker = inputMapTracker; this.outputMap = new SourceMapGenerator(); - - if (options.root) { - this.rebaseTo = path.resolve(options.root); - this.resolvePath = this.rootPathResolver; - } else if (options.target) { - this.rebaseTo = path.resolve(process.cwd(), options.target); - if (!fs.existsSync(this.rebaseTo) || fs.statSync(this.rebaseTo).isFile()) - this.rebaseTo = path.dirname(this.rebaseTo); - this.resolvePath = this.relativePathResolver; - } } -Rebuilder.prototype.rootPathResolver = function (sourcePath) { - return sourcePath.replace(this.rebaseTo, ''); -}; - -Rebuilder.prototype.relativePathResolver = function (sourcePath, sourceRelativeTo) { - if (sourceRelativeTo) - sourcePath = path.resolve(path.dirname(sourceRelativeTo), sourcePath); - - return path.relative(this.rebaseTo, sourcePath); -}; - -Rebuilder.prototype.rebuildValue = function (list, separator, isSelector) { +Rebuilder.prototype.rebuildValue = function (list, separator) { var escaped = 0; for (var i = 0, l = list.length; i < l; i++) { @@ -51,19 +26,19 @@ Rebuilder.prototype.rebuildValue = function (list, separator, isSelector) { if (i === l - 1 && escaped > 0) this.output.splice(this.output.length - escaped - 1, 1); } else { - this.store(el, isSelector ? l : 0); + this.store(el); this.store(i < l - 1 ? separator : ''); escaped = 0; } } }; -Rebuilder.prototype.store = function (token, allowNFallbacks) { +Rebuilder.prototype.store = function (token) { var value = typeof token == 'string' ? token : token.value.indexOf('_') > -1 ? this.restore(token.value) : token.value; - this.track(value, token.metadata, allowNFallbacks); + this.track(value, token.metadata); this.output.push(value); }; @@ -93,7 +68,7 @@ Rebuilder.prototype.rebuildList = function (tokens, isFlatBlock) { this.store('}'); } } else { - this.rebuildValue(token.value, ',', true); + this.rebuildValue(token.value, ','); this.store('{'); this.rebuildValue(token.body, ';'); this.store('}'); @@ -103,45 +78,26 @@ Rebuilder.prototype.rebuildList = function (tokens, isFlatBlock) { } }; -Rebuilder.prototype.track = function (value, metadata, allowNFallbacks) { +Rebuilder.prototype.track = function (value, metadata) { if (metadata) - this.trackMetadata(metadata, value, allowNFallbacks); + this.trackMetadata(metadata); var parts = value.split('\n'); this.line += parts.length - 1; this.column = parts.length > 1 ? 0 : (this.column + parts.pop().length); }; -Rebuilder.prototype.trackMetadata = function (metadata, value, allowNFallbacks) { - var original = this.inputMapTracker.isTracking(metadata.source) ? - this.inputMapTracker.originalPositionFor(metadata, value, allowNFallbacks) : - {}; - +Rebuilder.prototype.trackMetadata = function (metadata) { this.outputMap.addMapping({ generated: { line: this.line, column: this.column }, - source: this.stylingSourceFor(original, metadata) || SourceMap.unknownSource, - original: { - line: original.line || metadata.original.line, - column: original.column || metadata.original.column - } + source: metadata.source || SourceMap.unknownSource, + original: metadata.original }); }; -Rebuilder.prototype.stylingSourceFor = function (original, metadata) { - var source = original.source || metadata.source; - - if (source && metadata.source && (/^https?:\/\//.test(metadata.source) || /^\/\//.test(metadata.source)) && source != metadata.source) - return url.resolve(metadata.source, source); - else if (source && this.resolvePath) - return this.resolvePath(source, metadata.source); - else - return source; -}; - - function SourceMapStringifier(options, restoreCallback, inputMapTracker) { this.rebuilder = new Rebuilder(options, restoreCallback, inputMapTracker); } diff --git a/lib/selectors/tokenizer.js b/lib/selectors/tokenizer.js index af80009b..39a0205a 100644 --- a/lib/selectors/tokenizer.js +++ b/lib/selectors/tokenizer.js @@ -2,6 +2,9 @@ var Chunker = require('../utils/chunker'); var Extract = require('../utils/extractors'); var SourceMaps = require('../utils/source-maps'); +var path = require('path'); +var fs = require('fs'); + var flatBlock = /(^@(font\-face|page|\-ms\-viewport|\-o\-viewport|viewport|counter\-style)|\\@.+?)/; function Tokenizer(minifyContext, addMetadata, addSourceMap) { @@ -33,9 +36,35 @@ Tokenizer.prototype.toTokens = function (data) { source: undefined }; + if (this.minifyContext.options.root) + context.resolvePath = absolutePathResolver(context); + else if (this.minifyContext.options.target) + context.resolvePath = relativePathResolver(context); + return tokenize(context); }; +function absolutePathResolver(context) { + var rebaseTo = path.resolve(context.outer.options.root); + + return function (_, sourcePath) { + return sourcePath.replace(rebaseTo, ''); + }; +} + +function relativePathResolver(context) { + var rebaseTo = path.resolve(process.cwd(), context.outer.options.target); + if (!fs.existsSync(rebaseTo) || fs.statSync(rebaseTo).isFile()) + rebaseTo = path.dirname(rebaseTo); + + return function (relativeTo, sourcePath) { + if (relativeTo) + sourcePath = path.resolve(path.dirname(relativeTo), sourcePath); + + return path.relative(rebaseTo, sourcePath); + }; +} + function whatsNext(context) { var mode = context.mode; var chunk = context.chunk; diff --git a/lib/utils/extractors.js b/lib/utils/extractors.js index 0cc78700..f3d08041 100644 --- a/lib/utils/extractors.js +++ b/lib/utils/extractors.js @@ -116,7 +116,6 @@ var Extractors = { var list = []; var selectors = new Splitter(',').split(string); var addSourceMap = context.addSourceMap; - for (var i = 0, l = selectors.length; i < l; i++) { var selector = selectors[i]; @@ -126,7 +125,7 @@ var Extractors = { tokenized.push(token); if (addSourceMap) - token.metadata = SourceMaps.saveAndTrack(selector, context, true); + token.metadata = SourceMaps.saveAndTrack(selector, context, true, i); } return { diff --git a/lib/utils/input-source-map-tracker.js b/lib/utils/input-source-map-tracker.js index d7bee48c..286b1091 100644 --- a/lib/utils/input-source-map-tracker.js +++ b/lib/utils/input-source-map-tracker.js @@ -20,7 +20,7 @@ function InputSourceMapStore(outerContext) { this.maps = {}; } -function fromString(self, data, whenDone) { +function fromString(self, _, whenDone) { self.maps[undefined] = new SourceMapConsumer(self.options.sourceMap); return whenDone(); } @@ -108,10 +108,8 @@ function fetchMapFile(self, mapSource, context, done) { } function originalPositionIn(trackedSource, sourceInfo, token, allowNFallbacks) { - // FIXME: we should rather track original positions in tokenizer - // here it is a bit too late do do it reliably hance the hack var originalPosition; - var maxRange = token.replace(/[>\+~]/g, ' $1 ').length; + var maxRange = token.length; var position = { line: sourceInfo.line, column: sourceInfo.column + maxRange diff --git a/lib/utils/source-maps.js b/lib/utils/source-maps.js index 4b3f41df..ae06b08e 100644 --- a/lib/utils/source-maps.js +++ b/lib/utils/source-maps.js @@ -1,3 +1,5 @@ +var url = require('url'); + function trimLeft(value, context) { var withoutContent; var total; @@ -18,26 +20,44 @@ function trimLeft(value, context) { return value.substring(shift).trimLeft(); } +function sourceFor(originalMetadata, contextMetadata, context) { + var source = originalMetadata.source || contextMetadata.source; + + if (source && contextMetadata.source && (/^https?:\/\//.test(contextMetadata.source) || /^\/\//.test(contextMetadata.source)) && source != contextMetadata.source) + return url.resolve(contextMetadata.source, source); + else if (source && context.resolvePath) + return context.resolvePath(contextMetadata.source, source); + else + return source; +} + var SourceMaps = { unknownSource: '$stdin', - saveAndTrack: function (data, context, hasSuffix) { + saveAndTrack: function (data, context, hasSuffix, allowNFallbacks) { var trimmedValue = trimLeft(data, context); - var metadata = { + var contextMetadata = { original: { line: context.line, column: context.column }, source: context.source }; + var sourceMetadata = context.outer.inputSourceMapTracker.isTracking(contextMetadata.source) ? + context.outer.inputSourceMapTracker.originalPositionFor(contextMetadata, trimmedValue, allowNFallbacks || 0) : + {}; + + contextMetadata.original.line = sourceMetadata.line || contextMetadata.original.line; + contextMetadata.original.column = sourceMetadata.column || contextMetadata.original.column; + contextMetadata.source = sourceFor(sourceMetadata, contextMetadata, context); this.track(trimmedValue, context); if (hasSuffix) context.column++; - return metadata; + return contextMetadata; }, suffix: function (context) { diff --git a/test/properties/extractor-test.js b/test/properties/extractor-test.js index 781a39a9..cd2db25d 100644 --- a/test/properties/extractor-test.js +++ b/test/properties/extractor-test.js @@ -4,7 +4,7 @@ var SelectorTokenizer = require('../../lib/selectors/tokenizer'); var extractor = require('../../lib/properties/extractor'); function buildToken(source) { - return new SelectorTokenizer(null, true, false).toTokens(source)[0]; + return new SelectorTokenizer({ options: {} }, true, false).toTokens(source)[0]; } vows.describe(extractor) diff --git a/test/properties/reorderable-test.js b/test/properties/reorderable-test.js index 3cf424ba..4450b250 100644 --- a/test/properties/reorderable-test.js +++ b/test/properties/reorderable-test.js @@ -7,7 +7,7 @@ var canReorder = require('../../lib/properties/reorderable').canReorder; var canReorderSingle = require('../../lib/properties/reorderable').canReorderSingle; function propertiesIn(source) { - return extractProperties(new SelectorTokenizer(null, true, false).toTokens(source)[0]); + return extractProperties(new SelectorTokenizer({ options: {} }, true, false).toTokens(source)[0]); } vows.describe(canReorder) diff --git a/test/selectors/optimizer-test.js b/test/selectors/optimizer-test.js index 3cb85b46..2fa55905 100644 --- a/test/selectors/optimizer-test.js +++ b/test/selectors/optimizer-test.js @@ -14,6 +14,7 @@ function optimizerContext(group, specs, options) { options.restructuring = true; options.compatibility = new Compatibility(options.compatibility).toOptions(); var outerContext = { + options: {}, sourceTracker: new SourceTracker() }; diff --git a/test/selectors/optimizers/simple-test.js b/test/selectors/optimizers/simple-test.js index 648f8d5b..e6122ef4 100644 --- a/test/selectors/optimizers/simple-test.js +++ b/test/selectors/optimizers/simple-test.js @@ -12,7 +12,7 @@ function selectorContext(group, specs, options) { function optimized(selectors) { return function (source) { - var tokens = new Tokenizer().toTokens(source); + var tokens = new Tokenizer({ options: {} }).toTokens(source); new SimpleOptimizer(options).optimize(tokens); assert.deepEqual(tokens[0] ? tokens[0].value : null, selectors); @@ -36,7 +36,7 @@ function propertyContext(group, specs, options) { function optimized(selectors) { return function (source) { - var tokens = new Tokenizer().toTokens(source); + var tokens = new Tokenizer({ options: {} }).toTokens(source); new SimpleOptimizer(options).optimize(tokens); var value = tokens[0].body.map(function (property) { return property.value; }); diff --git a/test/selectors/tokenizer-source-maps-test.js b/test/selectors/tokenizer-source-maps-test.js index f8dd6c9d..b38ff754 100644 --- a/test/selectors/tokenizer-source-maps-test.js +++ b/test/selectors/tokenizer-source-maps-test.js @@ -2,6 +2,12 @@ var vows = require('vows'); var assert = require('assert'); var Tokenizer = require('../../lib/selectors/tokenizer'); var SourceTracker = require('../../lib/utils/source-tracker'); +var InputSourceMapTracker = require('../../lib/utils/input-source-map-tracker'); + +var fs = require('fs'); +var path = require('path'); +var inputMapPath = path.join('test', 'fixtures', 'source-maps', 'styles.css.map'); +var inputMap = fs.readFileSync(inputMapPath, 'utf-8'); function sourceMapContext(group, specs) { var ctx = {}; @@ -15,11 +21,13 @@ function sourceMapContext(group, specs) { for (var test in specs) { for (var i = 0; i < specs[test][1].length; i++) { var target = specs[test][1][i]; + var sourceTracker = new SourceTracker(); + var inputSourceMapTracker = new InputSourceMapTracker({ options: { inliner: {} }, errors: {}, sourceTracker: sourceTracker }); ctx[group + ' ' + test + ' - #' + (i + 1)] = { topic: typeof specs[test][0] == 'function' ? specs[test][0]() : - new Tokenizer({ sourceTracker: new SourceTracker() }, false, true).toTokens(specs[test][0]), + new Tokenizer({ sourceTracker: sourceTracker, inputSourceMapTracker: inputSourceMapTracker, options: {} }, false, true).toTokens(specs[test][0]), tokenized: tokenizedContext(target, i) }; } @@ -436,7 +444,9 @@ vows.describe('source-maps/analyzer') 'one': [ function () { var tracker = new SourceTracker(); - var tokenizer = new Tokenizer({ sourceTracker: tracker }, false, true); + var inputTracker = new InputSourceMapTracker({ options: { inliner: {} }, errors: {}, sourceTracker: tracker }); + var tokenizer = new Tokenizer({ sourceTracker: tracker, inputSourceMapTracker: inputTracker, options: {} }, false, true); + var data = tracker.store('one.css', 'a{}'); return tokenizer.toTokens(data); }, @@ -449,7 +459,9 @@ vows.describe('source-maps/analyzer') 'two': [ function () { var tracker = new SourceTracker(); - var tokenizer = new Tokenizer({ sourceTracker: tracker }, false, true); + var inputTracker = new InputSourceMapTracker({ options: { inliner: {} }, errors: {}, sourceTracker: tracker }); + var tokenizer = new Tokenizer({ sourceTracker: tracker, inputSourceMapTracker: inputTracker, options: {} }, false, true); + var data1 = tracker.store('one.css', 'a{}'); var data2 = tracker.store('two.css', '\na{color:red}'); return tokenizer.toTokens(data1 + data2); @@ -473,4 +485,23 @@ vows.describe('source-maps/analyzer') ] }) ) + .addBatch( + sourceMapContext('input source maps', { + 'one': [ + function () { + var tracker = new SourceTracker(); + var inputTracker = new InputSourceMapTracker({ options: { inliner: {}, sourceMap: inputMap, options: {} }, errors: {}, sourceTracker: tracker }); + inputTracker.track('', function () {}); + + var tokenizer = new Tokenizer({ sourceTracker: tracker, inputSourceMapTracker: inputTracker, options: {} }, false, true); + return tokenizer.toTokens('div > a {\n color: red;\n}'); + }, + [{ + kind: 'selector', + value: [{ value: 'div > a ', metadata: { original: { line: 1, column: 4 }, source: 'styles.less' } }], + body: [{ value: 'color:red', metadata: { original: { line: 2, column: 2 }, source: 'styles.less' } }] + }] + ] + }) + ) .export(module); diff --git a/test/selectors/tokenizer-test.js b/test/selectors/tokenizer-test.js index 7c34885f..32d44806 100644 --- a/test/selectors/tokenizer-test.js +++ b/test/selectors/tokenizer-test.js @@ -8,7 +8,7 @@ function tokenizerContext(name, specs, addMetadata) { function tokenized(target) { return function (source) { - var tokenized = new Tokenizer({ sourceTracker: new SourceTracker(), warnings: [] }, addMetadata).toTokens(source); + var tokenized = new Tokenizer({ options: {}, sourceTracker: new SourceTracker(), warnings: [] }, addMetadata).toTokens(source); assert.deepEqual(target, tokenized); }; } -- 2.34.1