Moves tokenizer code into lib/tokenizer.
authorJakub Pawlowicz <contact@jakubpawlowicz.com>
Tue, 21 Apr 2015 20:36:01 +0000 (21:36 +0100)
committerJakub Pawlowicz <contact@jakubpawlowicz.com>
Tue, 21 Apr 2015 20:37:11 +0000 (21:37 +0100)
19 files changed:
History.md
lib/selectors/optimizer.js
lib/source-maps/track.js [moved from lib/utils/source-maps.js with 100% similarity]
lib/tokenizer/chunker.js [moved from lib/utils/chunker.js with 100% similarity]
lib/tokenizer/extract-properties.js [new file with mode: 0644]
lib/tokenizer/extract-selectors.js [new file with mode: 0644]
lib/tokenizer/tokenize.js [moved from lib/selectors/tokenizer.js with 94% similarity]
lib/utils/extractors.js [deleted file]
test/properties/longhand-overriding-test.js
test/properties/optimizer-test.js
test/properties/override-compacting-test.js
test/properties/shorthand-compacting-source-maps-test.js
test/properties/shorthand-compacting-test.js
test/selectors/extractor-test.js
test/selectors/optimizers/simple-test.js
test/selectors/reorderable-test.js
test/tokenizer/chunker-test.js [moved from test/utils/chunker-test.js with 95% similarity]
test/tokenizer/tokenizer-source-maps-test.js [moved from test/selectors/tokenizer-source-maps-test.js with 99% similarity]
test/tokenizer/tokenizer-test.js [moved from test/selectors/tokenizer-test.js with 99% similarity]

index ab15fbd..da124b9 100644 (file)
@@ -3,6 +3,8 @@
 
 * Cleans up url rebase code getting rid of unnecessary state.
 * Cleans up tokenizer code getting rid of unnecessary state.
+* Moves source maps tracker into lib/source-maps/track.
+* Moves tokenizer code into lib/tokenizer.
 * Moves URL rebasing & rewriting into lib/urls.
 * Fixed issue [#436](https://github.com/jakubpawlowicz/clean-css/issues/436) - refactors URI rewriting.
 
index e17d9a5..c804230 100644 (file)
@@ -1,4 +1,4 @@
-var tokenize = require('./tokenizer');
+var tokenize = require('../tokenizer/tokenize');
 var SimpleOptimizer = require('./optimizers/simple');
 var AdvancedOptimizer = require('./optimizers/advanced');
 var addOptimizationMetadata = require('./optimization-metadata');
diff --git a/lib/tokenizer/extract-properties.js b/lib/tokenizer/extract-properties.js
new file mode 100644 (file)
index 0000000..10ea3d8
--- /dev/null
@@ -0,0 +1,105 @@
+var Splitter = require('../utils/splitter');
+
+var COMMA = ',';
+var FORWARD_SLASH = '/';
+
+function selectorName(value) {
+  return value[0];
+}
+
+function extractProperties(string, selectors, context) {
+  var list = [];
+  var splitter = new Splitter(/[ ,\/]/);
+
+  if (typeof string != 'string')
+    return [];
+
+  if (string.indexOf('__ESCAPED_COMMENT_') > -1)
+    string = string.replace(/(__ESCAPED_COMMENT_(SPECIAL_)?CLEAN_CSS[^_]+?__)/g, ';$1;');
+
+  if (string.indexOf(')') > -1)
+    string = string.replace(/\)([^\s_;:,\)])/g, context.sourceMap ? ') __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ $1' : ') $1');
+
+  if (string.indexOf('ESCAPED_URL_CLEAN_CSS') > -1)
+    string = string.replace(/(ESCAPED_URL_CLEAN_CSS[^_]+?__)/g, context.sourceMap ? '$1 __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ ' : '$1 ');
+
+  var candidates = string.split(';');
+
+  for (var i = 0, l = candidates.length; i < l; i++) {
+    var candidate = candidates[i];
+    var firstColonAt = candidate.indexOf(':');
+
+    if (firstColonAt == -1) {
+      context.track(candidate);
+      if (candidate.indexOf('__ESCAPED_COMMENT_SPECIAL') > -1)
+        list.push(candidate);
+      continue;
+    }
+
+    if (candidate.indexOf('{') > 0) {
+      context.track(candidate);
+      continue;
+    }
+
+    var body = [];
+    var name = candidate.substring(0, firstColonAt);
+    body.push([name.trim()].concat(context.track(name, true)));
+    context.track(':');
+
+    var values = splitter.split(candidate.substring(firstColonAt + 1), true);
+
+    if (values.length == 1 && values[0] === '') {
+      context.warnings.push('Empty property \'' + name + '\' inside \'' + selectors.filter(selectorName).join(',') + '\' selector. Ignoring.');
+      continue;
+    }
+
+    for (var j = 0, m = values.length; j < m; j++) {
+      var value = values[j];
+      var trimmed = value.trim();
+
+      if (trimmed.length === 0)
+        continue;
+
+      var lastCharacter = trimmed[trimmed.length - 1];
+      var endsWithNonSpaceSeparator = trimmed.length > 1 && (lastCharacter == COMMA || lastCharacter == FORWARD_SLASH);
+
+      if (endsWithNonSpaceSeparator)
+        trimmed = trimmed.substring(0, trimmed.length - 1);
+
+      if (trimmed.indexOf('__ESCAPED_COMMENT_CLEAN_CSS(0,-') > -1) {
+        context.track(trimmed);
+        continue;
+      }
+
+      var pos = body.length - 1;
+      if (trimmed == 'important' && body[pos][0] == '!') {
+        context.track(trimmed);
+        body[pos - 1][0] += '!important';
+        body.pop();
+        continue;
+      }
+
+      if (trimmed == '!important' || (trimmed == 'important' && body[pos][0][body[pos][0].length - 1] == '!')) {
+        context.track(trimmed);
+        body[pos][0] += trimmed;
+        continue;
+      }
+
+      body.push([trimmed].concat(context.track(value, true)));
+
+      if (endsWithNonSpaceSeparator) {
+        body.push([lastCharacter]);
+        context.track(lastCharacter);
+      }
+    }
+
+    if (i < l - 1)
+      context.track(';');
+
+    list.push(body);
+  }
+
+  return list;
+}
+
+module.exports = extractProperties;
diff --git a/lib/tokenizer/extract-selectors.js b/lib/tokenizer/extract-selectors.js
new file mode 100644 (file)
index 0000000..cbdf367
--- /dev/null
@@ -0,0 +1,17 @@
+var Splitter = require('../utils/splitter');
+
+function extractSelectors(string, context) {
+  var list = [];
+  var metadata;
+  var selectors = new Splitter(',').split(string);
+
+  for (var i = 0, l = selectors.length; i < l; i++) {
+    metadata = context.track(selectors[i], true, i);
+    context.track(',');
+    list.push([selectors[i].trim()].concat(metadata));
+  }
+
+  return list;
+}
+
+module.exports = extractSelectors;
similarity index 94%
rename from lib/selectors/tokenizer.js
rename to lib/tokenizer/tokenize.js
index ef8a213..b779a9c 100644 (file)
@@ -1,6 +1,7 @@
-var Chunker = require('../utils/chunker');
-var Extract = require('../utils/extractors');
-var track = require('../utils/source-maps');
+var Chunker = require('./chunker');
+var extractProperties = require('./extract-properties');
+var extractSelectors = require('./extract-selectors');
+var track = require('../source-maps/track');
 
 var path = require('path');
 
@@ -175,7 +176,7 @@ function intoTokens(context) {
         newToken.push(intoTokens(context));
 
         if (typeof newToken[2] == 'string')
-          newToken[2] = Extract.properties(newToken[2], [[trimmedValue]], context);
+          newToken[2] = extractProperties(newToken[2], [[trimmedValue]], context);
 
         context.mode = oldMode;
         context.track('}');
@@ -213,13 +214,13 @@ function intoTokens(context) {
 
       context.cursor = nextEnd + 2;
     } else if (what == 'bodyStart') {
-      var selectors = Extract.selectors(chunk.substring(context.cursor, nextSpecial), context);
+      var selectors = extractSelectors(chunk.substring(context.cursor, nextSpecial), context);
 
       oldMode = context.mode;
       context.cursor = nextSpecial + 1;
       context.mode = 'body';
 
-      var body = Extract.properties(intoTokens(context), selectors, context);
+      var body = extractProperties(intoTokens(context), selectors, context);
 
       context.track('{');
       context.mode = oldMode;
diff --git a/lib/utils/extractors.js b/lib/utils/extractors.js
deleted file mode 100644 (file)
index e4b6cdc..0000000
+++ /dev/null
@@ -1,121 +0,0 @@
-var Splitter = require('./splitter');
-
-var COMMA = ',';
-var FORWARD_SLASH = '/';
-
-function selectorName(value) {
-  return value[0];
-}
-
-var Extractors = {
-  properties: function (string, selectors, context) {
-    var list = [];
-    var splitter = new Splitter(/[ ,\/]/);
-
-    if (typeof string != 'string')
-      return [];
-
-    if (string.indexOf('__ESCAPED_COMMENT_') > -1)
-      string = string.replace(/(__ESCAPED_COMMENT_(SPECIAL_)?CLEAN_CSS[^_]+?__)/g, ';$1;');
-
-    if (string.indexOf(')') > -1)
-      string = string.replace(/\)([^\s_;:,\)])/g, context.sourceMap ? ') __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ $1' : ') $1');
-
-    if (string.indexOf('ESCAPED_URL_CLEAN_CSS') > -1)
-      string = string.replace(/(ESCAPED_URL_CLEAN_CSS[^_]+?__)/g, context.sourceMap ? '$1 __ESCAPED_COMMENT_CLEAN_CSS(0,-1)__ ' : '$1 ');
-
-    var candidates = string.split(';');
-
-    for (var i = 0, l = candidates.length; i < l; i++) {
-      var candidate = candidates[i];
-      var firstColonAt = candidate.indexOf(':');
-
-      if (firstColonAt == -1) {
-        context.track(candidate);
-        if (candidate.indexOf('__ESCAPED_COMMENT_SPECIAL') > -1)
-          list.push(candidate);
-        continue;
-      }
-
-      if (candidate.indexOf('{') > 0) {
-        context.track(candidate);
-        continue;
-      }
-
-      var body = [];
-      var name = candidate.substring(0, firstColonAt);
-      body.push([name.trim()].concat(context.track(name, true)));
-      context.track(':');
-
-      var values = splitter.split(candidate.substring(firstColonAt + 1), true);
-
-      if (values.length == 1 && values[0] === '') {
-        context.warnings.push('Empty property \'' + name + '\' inside \'' + selectors.filter(selectorName).join(',') + '\' selector. Ignoring.');
-        continue;
-      }
-
-      for (var j = 0, m = values.length; j < m; j++) {
-        var value = values[j];
-        var trimmed = value.trim();
-
-        if (trimmed.length === 0)
-          continue;
-
-        var lastCharacter = trimmed[trimmed.length - 1];
-        var endsWithNonSpaceSeparator = trimmed.length > 1 && (lastCharacter == COMMA || lastCharacter == FORWARD_SLASH);
-
-        if (endsWithNonSpaceSeparator)
-          trimmed = trimmed.substring(0, trimmed.length - 1);
-
-        if (trimmed.indexOf('__ESCAPED_COMMENT_CLEAN_CSS(0,-') > -1) {
-          context.track(trimmed);
-          continue;
-        }
-
-        var pos = body.length - 1;
-        if (trimmed == 'important' && body[pos][0] == '!') {
-          context.track(trimmed);
-          body[pos - 1][0] += '!important';
-          body.pop();
-          continue;
-        }
-
-        if (trimmed == '!important' || (trimmed == 'important' && body[pos][0][body[pos][0].length - 1] == '!')) {
-          context.track(trimmed);
-          body[pos][0] += trimmed;
-          continue;
-        }
-
-        body.push([trimmed].concat(context.track(value, true)));
-
-        if (endsWithNonSpaceSeparator) {
-          body.push([lastCharacter]);
-          context.track(lastCharacter);
-        }
-      }
-
-      if (i < l - 1)
-        context.track(';');
-
-      list.push(body);
-    }
-
-    return list;
-  },
-
-  selectors: function (string, context) {
-    var list = [];
-    var metadata;
-    var selectors = new Splitter(',').split(string);
-
-    for (var i = 0, l = selectors.length; i < l; i++) {
-      metadata = context.track(selectors[i], true, i);
-      context.track(',');
-      list.push([selectors[i].trim()].concat(metadata));
-    }
-
-    return list;
-  }
-};
-
-module.exports = Extractors;
index cabc8c3..fc3d01f 100644 (file)
@@ -3,7 +3,7 @@ var assert = require('assert');
 
 var optimize = require('../../lib/properties/optimizer');
 
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var SourceTracker = require('../../lib/utils/source-tracker');
 var Compatibility = require('../../lib/utils/compatibility');
 var Validator = require('../../lib/properties/validator');
index cb66aa2..e09c7fe 100644 (file)
@@ -3,7 +3,7 @@ var assert = require('assert');
 
 var optimize = require('../../lib/properties/optimizer');
 
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var SourceTracker = require('../../lib/utils/source-tracker');
 var Compatibility = require('../../lib/utils/compatibility');
 var Validator = require('../../lib/properties/validator');
index bdbbef3..6266a29 100644 (file)
@@ -3,7 +3,7 @@ var assert = require('assert');
 
 var optimize = require('../../lib/properties/optimizer');
 
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var SourceTracker = require('../../lib/utils/source-tracker');
 var Compatibility = require('../../lib/utils/compatibility');
 var Validator = require('../../lib/properties/validator');
index 28a6a98..341ec67 100644 (file)
@@ -3,7 +3,7 @@ var assert = require('assert');
 
 var optimize = require('../../lib/properties/optimizer');
 
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var SourceTracker = require('../../lib/utils/source-tracker');
 var SourceReader = require('../../lib/utils/source-reader');
 var InputSourceMapTracker = require('../../lib/utils/input-source-map-tracker');
index 2225caa..52fa421 100644 (file)
@@ -3,7 +3,7 @@ var assert = require('assert');
 
 var optimize = require('../../lib/properties/optimizer');
 
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var SourceTracker = require('../../lib/utils/source-tracker');
 var Compatibility = require('../../lib/utils/compatibility');
 var Validator = require('../../lib/properties/validator');
index 117b262..ab4eb39 100644 (file)
@@ -1,6 +1,6 @@
 var vows = require('vows');
 var assert = require('assert');
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var extractor = require('../../lib/selectors/extractor');
 
 function buildToken(source) {
index 5eb062c..302c331 100644 (file)
@@ -1,7 +1,7 @@
 var vows = require('vows');
 var assert = require('assert');
 
-var tokenize = require('../../../lib/selectors/tokenizer');
+var tokenize = require('../../../lib/tokenizer/tokenize');
 var SimpleOptimizer = require('../../../lib/selectors/optimizers/simple');
 var Compatibility = require('../../../lib/utils/compatibility');
 var addOptimizationMetadata = require('../../../lib/selectors/optimization-metadata');
index 1ecdb7c..389a33d 100644 (file)
@@ -1,7 +1,7 @@
 var vows = require('vows');
 var assert = require('assert');
 
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var extractProperties = require('../../lib/selectors/extractor');
 var canReorder = require('../../lib/selectors/reorderable').canReorder;
 var canReorderSingle = require('../../lib/selectors/reorderable').canReorderSingle;
similarity index 95%
rename from test/utils/chunker-test.js
rename to test/tokenizer/chunker-test.js
index 38966bb..3c4861d 100644 (file)
@@ -1,6 +1,6 @@
 var vows = require('vows');
 var assert = require('assert');
-var Chunker = require('../../lib/utils/chunker');
+var Chunker = require('../../lib/tokenizer/chunker');
 
 vows.describe(Chunker)
   .addBatch({
similarity index 99%
rename from test/selectors/tokenizer-source-maps-test.js
rename to test/tokenizer/tokenizer-source-maps-test.js
index 96f6681..97c2304 100644 (file)
@@ -1,6 +1,6 @@
 var vows = require('vows');
 var assert = require('assert');
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var SourceTracker = require('../../lib/utils/source-tracker');
 var SourceReader = require('../../lib/utils/source-reader');
 var InputSourceMapTracker = require('../../lib/utils/input-source-map-tracker');
similarity index 99%
rename from test/selectors/tokenizer-test.js
rename to test/tokenizer/tokenizer-test.js
index decbe91..7a69f55 100644 (file)
@@ -1,6 +1,6 @@
 var vows = require('vows');
 var assert = require('assert');
-var tokenize = require('../../lib/selectors/tokenizer');
+var tokenize = require('../../lib/tokenizer/tokenize');
 var SourceTracker = require('../../lib/utils/source-tracker');
 
 function tokenizerContext(name, specs) {