mirror of
https://github.com/mgerb/mywebsite
synced 2026-01-11 18:32:50 +00:00
updated bunch of file paths and changed the way posts are loaded
This commit is contained in:
2
node_modules/transformers/.npmignore
generated
vendored
Normal file
2
node_modules/transformers/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
test/
|
||||
.travis.yml
|
||||
141
node_modules/transformers/README.md
generated
vendored
Normal file
141
node_modules/transformers/README.md
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
[](https://travis-ci.org/ForbesLindesay/transformers)
|
||||
# transformers
|
||||
|
||||
String/Data transformations for use in templating libraries, static site generators and web frameworks. This gathers the most useful transformations you can apply to text or data into one library with a consistent API. Transformations can be pretty much anything but most are either compilers or templating engines.
|
||||
|
||||
## Supported transforms
|
||||
|
||||
To use each of these transforms you will also need to install the associated npm module for that transformer.
|
||||
|
||||
### Template engines
|
||||
|
||||
- [atpl](http://documentup.com/soywiz/atpl.js) - Compatible with twig templates
|
||||
- [coffeecup](http://documentup.com/gradus/coffeecup) - pure coffee-script templates (fork of coffeekup)
|
||||
- [dot](http://documentup.com/olado/doT) [(website)](https://github.com/Katahdin/dot-packer) - focused on speed
|
||||
- [dust](http://documentup.com/akdubya/dustjs) [(website)](http://akdubya.github.com/dustjs/) - asyncronous templates
|
||||
- [eco](http://documentup.com/sstephenson/eco) - Embedded CoffeeScript templates
|
||||
- [ect](http://documentup.com/baryshev/ect) [(website)](http://ectjs.com/) - Embedded CoffeeScript templates
|
||||
- [ejs](http://documentup.com/visionmedia/ejs) - Embedded JavaScript templates
|
||||
- [haml](http://documentup.com/visionmedia/haml.js) [(website)](http://haml-lang.com/) - dry indented markup
|
||||
- [haml-coffee](http://documentup.com/netzpirat/haml-coffee/) [(website)](http://haml-lang.com/) - haml with embedded CoffeeScript
|
||||
- [handlebars](http://documentup.com/wycats/handlebars.js/) [(website)](http://handlebarsjs.com/) - extension of mustache templates
|
||||
- [hogan](http://documentup.com/twitter/hogan.js) [(website)](http://twitter.github.com/hogan.js/) - Mustache templates
|
||||
- [jade](http://documentup.com/visionmedia/jade) [(website)](http://jade-lang.com/) - robust, elegant, feature rich template engine
|
||||
- [jazz](http://documentup.com/shinetech/jazz)
|
||||
- [jqtpl](http://documentup.com/kof/jqtpl) [(website)](http://api.jquery.com/category/plugins/templates/) - extensible logic-less templates
|
||||
- [JUST](http://documentup.com/baryshev/just) - EJS style template with some special syntax for layouts/partials etc.
|
||||
- [liquor](http://documentup.com/chjj/liquor) - extended EJS with significant white space
|
||||
- [mustache](http://documentup.com/janl/mustache.js) - logic less templates
|
||||
- [QEJS](http://documentup.com/jepso/QEJS) - Promises + EJS for async templating
|
||||
- [swig](http://documentup.com/paularmstrong/swig) [(website)](http://paularmstrong.github.com/swig/) - Django-like templating engine
|
||||
- [templayed](http://documentup.com/archan937/templayed.js/) [(website)](http://archan937.github.com/templayed.js/) - Mustache focused on performance
|
||||
- [toffee](http://documentup.com/malgorithms/toffee) - templating language based on coffeescript
|
||||
- [underscore](http://documentup.com/documentcloud/underscore) [(website)](http://documentcloud.github.com/underscore/)
|
||||
- [walrus](http://documentup.com/jeremyruppel/walrus) - A bolder kind of mustache
|
||||
- [whiskers](http://documentup.com/gsf/whiskers.js/tree/) - logic-less focused on readability
|
||||
|
||||
### Stylesheet Languages
|
||||
|
||||
- [less](http://documentup.com/cloudhead/less.js) [(website)](http://lesscss.org/) - LESS extends CSS with dynamic behavior such as variables, mixins, operations and functions.
|
||||
- [stylus](http://documentup.com/learnboost/stylus) [(website)](http://learnboost.github.com/stylus/) - revolutionary CSS generator making braces optional
|
||||
- [sass](http://documentup.com/visionmedia/sass.js) [(website)](http://sass-lang.com/) - Sassy CSS
|
||||
|
||||
### Minifiers
|
||||
|
||||
- [uglify-js](http://documentup.com/mishoo/UglifyJS2) - No need to install anything, just minifies/beautifies JavaScript
|
||||
- [uglify-css](https://github.com/visionmedia/css) - No need to install anything, just minifies/beautifies CSS
|
||||
- ugilify-json - No need to install anything, just minifies/beautifies JSON
|
||||
|
||||
### Other
|
||||
|
||||
- cdata - No need to install anything, just wraps input as `<![CDATA[${INPUT_STRING]]>` with the standard escape for `]]>` (`]]]]><![CDATA[>`).
|
||||
- cdata-js - as `cdata`, but with surrounding comments suitable for inclusion into a HTML/JavaScript `<script>` block: `//<![CDATA[\n${INPUT_STRING\n//]]>`.
|
||||
- cdata-css - as `cdata`, but with surrounding comments suitable for inclusion into a HTML/CSS `<style>` block: `/*<![CDATA[*/\n${INPUT_STRING\n/*]]>*/`.
|
||||
- verbatim - No need to install anything, acts as a verbatim passthrough `${INPUT_STRING}`
|
||||
- [coffee-script](http://coffeescript.org/) - `npm install coffee-script`
|
||||
- [cson](https://github.com/bevry/cson) - coffee-script based JSON format
|
||||
- markdown - You can use `marked`, `supermarked`, `markdown-js` or `markdown`
|
||||
- [component-js](http://documentup.com/component/component) [(website)](http://component.io) - `npm install component-builder` options: `{development: false}`
|
||||
- [component-css](http://documentup.com/component/component) [(website)](http://component.io) - `npm install component-builder` options: `{development: false}`
|
||||
- [html2jade](http://documentup.com/donpark/html2jade) [(website)](http://html2jade.aaron-powell.com/) - `npm install html2jade` - Converts HTML back into jade
|
||||
|
||||
Pull requests to add more transforms will always be accepted providing they are open-source, come with unit tests, and don't cause any of the tests to fail.
|
||||
|
||||
## API
|
||||
|
||||
The exported object `transformers` is a collection of named transformers. To access an individual transformer just do:
|
||||
|
||||
```javascript
|
||||
var transformer = require('transformers')['transformer-name']
|
||||
```
|
||||
|
||||
### Transformer
|
||||
|
||||
The following options are given special meaning by `transformers`:
|
||||
|
||||
- `filename` is set by transformers automatically if using the `renderFile` APIs. It is used if `cache` is enabled.
|
||||
- `cache` if true, the template function will be cached where possible (templates are still updated if you provide new options, so this can be used in most live applications).
|
||||
- `sudoSync` used internally to put some asyncronous transformers into "sudo syncronous" mode. Don't touch this.
|
||||
- `minify` if set to true on a transformer that isn't a minifier, it will cause the output to be minified. e.g. `coffeeScript.renderSync(str, {minify: true})` will result in minified JavaScript.
|
||||
|
||||
#### Transformer.engines
|
||||
|
||||
Returns an array of engines that can be used to power this transformer. The first of these that's installed will be used for the transformation.
|
||||
|
||||
To enable a transformation just take `[engine] = Transformer.engines[0]` and then do `npm install [engine]`. If `[engine]` is `.` there is no need to install an engine from npm to use the transformer.
|
||||
|
||||
#### Transformer.render(str, options, cb)
|
||||
|
||||
Tranform the string `str` using the `Transformer` with the provided options and call the callback `cb(err, res)`.
|
||||
|
||||
If no `cb` is provided, this method returns a [promises/A+](http://promises-aplus.github.com/promises-spec/) promise.
|
||||
|
||||
#### Transformer.renderSync(str, options)
|
||||
|
||||
Synchronous version of `Transformer.render`
|
||||
|
||||
#### Transformer.renderFile(filename, options, cb)
|
||||
|
||||
Reads the file at filename into `str` and sets `options.filename = filename` then calls `Transform.render(str, options, cb)`.
|
||||
|
||||
If no `cb` is provided, this method returns a [promises/A+](http://promises-aplus.github.com/promises-spec/) promise.
|
||||
|
||||
#### Tranformer.renderFileSync(filename, options)
|
||||
|
||||
Synchronous version of `Tranformer.renderFile`
|
||||
|
||||
#### Transformer.outputFormat
|
||||
|
||||
A string, one of:
|
||||
|
||||
- `'xml'`
|
||||
- `'css'`
|
||||
- `'js'`
|
||||
- `'json'`
|
||||
- `'text'`
|
||||
|
||||
Adding to this list will **not** result in a major version change, so you should handle unexpected types gracefully (I'd suggest default to assuming `'text'`).
|
||||
|
||||
#### Transformer.sync
|
||||
|
||||
`true` if the transformer can be used syncronously, `false` otherwise.
|
||||
|
||||
## Libraries that don't work synchronously
|
||||
|
||||
The following transformations will always throw an exception if you attempt to run them synchronously:
|
||||
|
||||
1. dust
|
||||
2. qejs
|
||||
3. html2jade
|
||||
|
||||
The following transformations sometimes throw an exception if run syncronously, typically they only throw an exception if you are doing something like including another file. If you are not doing the things that cause them to fail then they are consistently safe to use syncronously.
|
||||
|
||||
- jade (only when using `then-jade` instead of `jade`)
|
||||
- less (when `@import` is used with a url instead of a filename)
|
||||
- jazz (When one of the functions passed as locals is asyncronous)
|
||||
|
||||
The following libraries look like they might sometimes throw exceptions when used syncronously (if you read the source) but they never actually do so:
|
||||
|
||||
- just
|
||||
- ect
|
||||
- stylus
|
||||
77
node_modules/transformers/history.md
generated
vendored
Normal file
77
node_modules/transformers/history.md
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
## 2.1.0 - 2013-07-13
|
||||
|
||||
- **verbatim**
|
||||
- **cdata-js**
|
||||
- **cdata-css**
|
||||
- fix escaping in **cdata**
|
||||
- fix **less** errors
|
||||
|
||||
## 2.0.1 - 2013-04-22
|
||||
|
||||
- Fix global leak of `exportscoffeeScript` (test still fails because `jade` requires an out of date version of `transformers`)
|
||||
|
||||
## 2.0.0 - 2013-03-31
|
||||
|
||||
- Add `minify` support to all transformers
|
||||
- Bundle minifiers in package
|
||||
|
||||
## 1.8.3 - 2013-03-19
|
||||
|
||||
- Update promise dependency
|
||||
|
||||
## 1.8.2 - 2013-02-10
|
||||
|
||||
- Support `sourceURLs` in **component**
|
||||
|
||||
## 1.8.1 - 2013-02-10
|
||||
|
||||
- Add travis-ci
|
||||
- FIX **toffee** support which was broken by their latest update
|
||||
- FIX lookup paths for **component** weren't set so you couldn't build components with dependancies
|
||||
|
||||
## 1.8.0 - 2013-01-30
|
||||
|
||||
- **highlight** (needs tests)
|
||||
|
||||
## 1.7.0 - 2013-01-30
|
||||
|
||||
- **component-js**
|
||||
- **component-css**
|
||||
- **html2jade** - must be `v0.0.7` because of a bug in later versions
|
||||
- Much more extensive tests
|
||||
|
||||
## 1.6.0 - 2013-01-29
|
||||
|
||||
- **uglify-css**
|
||||
- **uglify-json**
|
||||
- Rename **uglify** to **uglify-js**
|
||||
|
||||
## 1.5.0 - 2013-01-27
|
||||
|
||||
- **dot**
|
||||
|
||||
## 1.4.0 - 2013-01-25
|
||||
|
||||
- Support sync `@import` statements in **less**
|
||||
- Report real errors from **less** (rather than objects)
|
||||
|
||||
## 1.3.0 - 2013-01-25
|
||||
|
||||
- **templayed**
|
||||
- **plates**
|
||||
|
||||
## 1.2.1 - 2013-01-09
|
||||
|
||||
- make **markdown** work when using **markdown** as the engine (**marked** is the recommended engine).
|
||||
|
||||
## 1.2.0 - 2013-01-09
|
||||
|
||||
- **js** (pass through)
|
||||
- **css** (pass through)
|
||||
- rename **coffee-script** from **coffee** to **coffee-script** and add **coffee** as an alias
|
||||
|
||||
## 1.1.0 - 2013-01-08
|
||||
|
||||
- **coffeecup**
|
||||
- **cson**
|
||||
- FIX: disabling **dust** cache
|
||||
162
node_modules/transformers/lib/shared.js
generated
vendored
Normal file
162
node_modules/transformers/lib/shared.js
generated
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
var Promise = require('promise');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var normalize = path.normalize;
|
||||
|
||||
|
||||
Promise.prototype.nodeify = function (cb) {
|
||||
if (typeof cb === 'function') {
|
||||
this.then(function (res) { process.nextTick(function () { cb(null, res); }); },
|
||||
function (err) { process.nextTick(function () { cb(err); }); });
|
||||
return undefined;
|
||||
} else {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
var minifiers = {};
|
||||
|
||||
module.exports = Transformer;
|
||||
function Transformer(obj) {
|
||||
this.name = obj.name;
|
||||
this.engines = obj.engines;
|
||||
this.isBinary = obj.isBinary || false;
|
||||
this.isMinifier = obj.isMinifier || false;
|
||||
this.outputFormat = obj.outputFormat;
|
||||
this._cache = {};
|
||||
if (typeof obj.async === 'function') {
|
||||
this._renderAsync = obj.async;
|
||||
this.sudoSync = obj.sudoSync || false;
|
||||
}
|
||||
if (typeof obj.sync === 'function') {
|
||||
this._renderSync = obj.sync;
|
||||
this.sync = true;
|
||||
} else {
|
||||
this.sync = obj.sudoSync || false;
|
||||
}
|
||||
|
||||
if (this.isMinifier)
|
||||
minifiers[this.outputFormat] = this;
|
||||
else {
|
||||
var minifier = minifiers[this.outputFormat];
|
||||
if (minifier) {
|
||||
this.minify = function(str, options) {
|
||||
if (options && options.minify)
|
||||
return minifier.renderSync(str, typeof options.minify === 'object' && options.minify || {});
|
||||
return str;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Transformer.prototype.cache = function (options, data) {
|
||||
if (options.cache && options.filename) {
|
||||
if (data) return this.cache[options.filename] = data;
|
||||
else return this.cache[options.filename];
|
||||
} else {
|
||||
return data;
|
||||
}
|
||||
};
|
||||
Transformer.prototype.loadModule = function () {
|
||||
if (this.engine) return this.engine;
|
||||
for (var i = 0; i < this.engines.length; i++) {
|
||||
try {
|
||||
var res = this.engines[i] === '.' ? null : (this.engine = require(this.engines[i]));
|
||||
this.engineName = this.engines[i];
|
||||
return res;
|
||||
} catch (ex) {
|
||||
if (this.engines.length === 1) {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error('In order to apply the transform ' + this.name + ' you must install one of ' + this.engines.map(function (e) { return '"' + e + '"'; }).join());
|
||||
};
|
||||
Transformer.prototype.minify = function(str, options) {
|
||||
return str;
|
||||
}
|
||||
Transformer.prototype.renderSync = function (str, options) {
|
||||
options = options || {};
|
||||
options = clone(options);
|
||||
this.loadModule();
|
||||
if (this._renderSync) {
|
||||
return this.minify(this._renderSync((this.isBinary ? str : fixString(str)), options), options);
|
||||
} else if (this.sudoSync) {
|
||||
options.sudoSync = true;
|
||||
var res, err;
|
||||
this._renderAsync((this.isBinary ? str : fixString(str)), options, function (e, val) {
|
||||
if (e) err = e;
|
||||
else res = val;
|
||||
});
|
||||
if (err) throw err;
|
||||
else if (res != undefined) return this.minify(res, options);
|
||||
else if (typeof this.sudoSync === 'string') throw new Error(this.sudoSync.replace(/FILENAME/g, options.filename || ''));
|
||||
else throw new Error('There was a problem transforming ' + (options.filename || '') + ' syncronously using ' + this.name);
|
||||
} else {
|
||||
throw new Error(this.name + ' does not support transforming syncronously.');
|
||||
}
|
||||
};
|
||||
Transformer.prototype.render = function (str, options, cb) {
|
||||
options = options || {};
|
||||
var self = this;
|
||||
return new Promise(function (resolve, reject) {
|
||||
self.loadModule();
|
||||
if (self._renderAsync) {
|
||||
self._renderAsync((self.isBinary ? str : fixString(str)), clone(options), function (err, val) {
|
||||
if (err) reject(err);
|
||||
else resolve(self.minify(val, options));
|
||||
})
|
||||
} else {
|
||||
resolve(self.renderSync(str, options));
|
||||
}
|
||||
})
|
||||
.nodeify(cb);
|
||||
};
|
||||
Transformer.prototype.renderFile = function (path, options, cb) {
|
||||
options = options || {};
|
||||
var self = this;
|
||||
return new Promise(function (resolve, reject) {
|
||||
options.filename = (path = normalize(path));
|
||||
if (self._cache[path])
|
||||
resolve(null);
|
||||
else
|
||||
fs.readFile(path, function (err, data) {
|
||||
if (err) reject(err);
|
||||
else resolve(data);
|
||||
})
|
||||
})
|
||||
.then(function (str) {
|
||||
return self.render(str, options);
|
||||
})
|
||||
.nodeify(cb);
|
||||
};
|
||||
Transformer.prototype.renderFileSync = function (path, options) {
|
||||
options = options || {};
|
||||
options.filename = (path = normalize(path));
|
||||
return this.renderSync((this._cache[path] ? null : fs.readFileSync(path)), options);
|
||||
};
|
||||
function fixString(str) {
|
||||
if (str == null) return str;
|
||||
//convert buffer to string
|
||||
str = str.toString();
|
||||
// Strip UTF-8 BOM if it exists
|
||||
str = (0xFEFF == str.charCodeAt(0)
|
||||
? str.substring(1)
|
||||
: str);
|
||||
//remove `\r` added by windows
|
||||
return str.replace(/\r/g, '');
|
||||
}
|
||||
|
||||
function clone(obj) {
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(clone);
|
||||
} else if (obj && typeof obj === 'object') {
|
||||
var res = {};
|
||||
for (var key in obj) {
|
||||
res[key] = clone(obj[key]);
|
||||
}
|
||||
return res;
|
||||
} else {
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
604
node_modules/transformers/lib/transformers.js
generated
vendored
Normal file
604
node_modules/transformers/lib/transformers.js
generated
vendored
Normal file
@@ -0,0 +1,604 @@
|
||||
var dirname = require('path').dirname;
|
||||
var Transformer = require('./shared');
|
||||
|
||||
/**
|
||||
* minifiers must be first in order to be incorporated inside instances of respective output formats
|
||||
*/
|
||||
var uglifyJS = require('uglify-js');
|
||||
exports.uglify = exports.uglifyJS = exports['uglify-js'] = new Transformer({
|
||||
name: 'uglify-js',
|
||||
engines: ['.'],
|
||||
outputFormat: 'js',
|
||||
isMinifier: true,
|
||||
sync: function (str, options) {
|
||||
options.fromString = true;
|
||||
return this.cache(options) || this.cache(options, uglifyJS.minify(str, options).code);
|
||||
}
|
||||
});
|
||||
var uglifyCSS = require('css');
|
||||
exports.uglifyCSS = exports['uglify-css'] = new Transformer({
|
||||
name: 'uglify-css',
|
||||
engines: ['.'],
|
||||
outputFormat: 'css',
|
||||
isMinifier: true,
|
||||
sync: function (str, options) {
|
||||
options.compress = options.compress != false && options.beautify != true;
|
||||
return this.cache(options) || this.cache(options, uglifyCSS.stringify(uglifyCSS.parse(str), options));
|
||||
}
|
||||
});
|
||||
|
||||
exports.uglifyJSON = exports['uglify-json'] = new Transformer({
|
||||
name: 'uglify-json',
|
||||
engines: ['.'],
|
||||
outputFormat: 'json',
|
||||
isMinifier: true,
|
||||
sync: function (str, options) {
|
||||
return JSON.stringify(JSON.parse(str), null, options.beautify);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Syncronous Templating Languages
|
||||
*/
|
||||
|
||||
function sync(str, options) {
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.compile(str, options));
|
||||
return tmpl(options);
|
||||
}
|
||||
|
||||
exports.swig = new Transformer({
|
||||
name: 'swig',
|
||||
engines: ['swig'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync
|
||||
});
|
||||
|
||||
exports.atpl = new Transformer({
|
||||
name: 'atpl',
|
||||
engines: ['atpl'],
|
||||
outputFormat: 'xml',
|
||||
sync: function sync(str, options) {
|
||||
var tmpl = this.cache(options);
|
||||
if (!tmpl) {
|
||||
var cInfo = {cache: options.cache, filename: options.filename};
|
||||
if (options.filename) {
|
||||
delete options.filename; //atpl can't handle absolute windows file paths properly
|
||||
}
|
||||
tmpl = this.cache(cInfo, this.engine.compile(str, options));
|
||||
}
|
||||
return tmpl(options);
|
||||
}
|
||||
});
|
||||
|
||||
exports.dot = new Transformer({
|
||||
name: 'dot',
|
||||
engines: ['dot'],
|
||||
outputFormat: 'xml',
|
||||
sync: function sync(str, options) {
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.template(str));
|
||||
return tmpl(options);
|
||||
}
|
||||
});
|
||||
|
||||
exports.liquor = new Transformer({
|
||||
name: 'liquor',
|
||||
engines: ['liquor'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync
|
||||
});
|
||||
|
||||
exports.ejs = new Transformer({
|
||||
name: 'ejs',
|
||||
engines: ['ejs'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync
|
||||
});
|
||||
|
||||
exports.eco = new Transformer({
|
||||
name: 'eco',
|
||||
engines: ['eco'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync//N.B. eco's internal this.cache isn't quite right but this bypasses it
|
||||
});
|
||||
|
||||
exports.jqtpl = new Transformer({
|
||||
name: 'jqtpl',
|
||||
engines: ['jqtpl'],
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options) {
|
||||
var engine = this.engine;
|
||||
var key = (options.cache && options.filename) ? options.filename : '@';
|
||||
engine.compile(str, key);
|
||||
var res = this.engine.render(key, options);
|
||||
if (!(options.cache && options.filename)) {
|
||||
delete engine.cache[key];
|
||||
}
|
||||
this.cache(options, true); // caching handled internally
|
||||
return res;
|
||||
}
|
||||
});
|
||||
|
||||
exports.haml = new Transformer({
|
||||
name: 'haml',
|
||||
engines: ['hamljs'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync
|
||||
});
|
||||
|
||||
exports['haml-coffee'] = new Transformer({
|
||||
name: 'haml-coffee',
|
||||
engines: ['haml-coffee'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync
|
||||
});
|
||||
|
||||
exports.whiskers = new Transformer({
|
||||
name: 'whiskers',
|
||||
engines: ['whiskers'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync
|
||||
});
|
||||
|
||||
exports.hogan = new Transformer({
|
||||
name: 'hogan',
|
||||
engines: ['hogan.js'],
|
||||
outputFormat: 'xml',
|
||||
sync: function(str, options){
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.compile(str, options));
|
||||
return tmpl.render(options, options.partials);
|
||||
}
|
||||
});
|
||||
|
||||
exports.handlebars = new Transformer({
|
||||
name: 'handlebars',
|
||||
engines: ['handlebars'],
|
||||
outputFormat: 'xml',
|
||||
sync: function(str, options){
|
||||
for (var partial in options.partials) {
|
||||
this.engine.registerPartial(partial, options.partials[partial]);
|
||||
}
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.compile(str, options));
|
||||
return tmpl(options);
|
||||
}
|
||||
});
|
||||
|
||||
exports.underscore = new Transformer({
|
||||
name: 'underscore',
|
||||
engines: ['underscore'],
|
||||
outputFormat: 'xml',
|
||||
sync: function(str, options){
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.template(str));
|
||||
return tmpl(options);
|
||||
}
|
||||
});
|
||||
|
||||
exports.walrus = new Transformer({
|
||||
name: 'walrus',
|
||||
engines: ['walrus'],
|
||||
outputFormat: 'xml',
|
||||
sync: function(str, options){
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.parse(str));
|
||||
return tmpl.compile(options);
|
||||
}
|
||||
});
|
||||
|
||||
exports.mustache = new Transformer({
|
||||
name: 'mustache',
|
||||
engines: ['mustache'],
|
||||
outputFormat: 'xml',
|
||||
sync: function(str, options){
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.compile(str));
|
||||
return tmpl(options, options.partials);
|
||||
}
|
||||
});
|
||||
|
||||
exports.templayed = new Transformer({
|
||||
name: 'templayed',
|
||||
engines: ['templayed'],
|
||||
outputFormat: 'xml',
|
||||
sync: function(str, options){
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine(str));
|
||||
return tmpl(options);
|
||||
}
|
||||
});
|
||||
|
||||
exports.plates = new Transformer({
|
||||
name: 'plates',
|
||||
engines: ['plates'],
|
||||
outputFormat: 'xml',
|
||||
sync: function(str, options){
|
||||
str = this.cache(options) || this.cache(options, str);
|
||||
return this.engine.bind(str, options, options.map);
|
||||
}
|
||||
});
|
||||
|
||||
exports.mote = new Transformer({
|
||||
name: 'mote',
|
||||
engines: ['mote'],
|
||||
outputFormat: 'xml',
|
||||
sync: sync
|
||||
});
|
||||
|
||||
exports.toffee = new Transformer({
|
||||
name: 'toffee',
|
||||
engines: ['toffee'],
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options) {
|
||||
var View = this.engine.view;
|
||||
var v = this.cache(options) || this.cache(options, new View(str, options));
|
||||
var res = v.run(options, require('vm').createContext({}));
|
||||
if (res[0]) throw res[0];
|
||||
else return res[1];
|
||||
}
|
||||
});
|
||||
|
||||
exports.coffeekup = exports.coffeecup = new Transformer({
|
||||
name: 'coffeecup',
|
||||
engines: ['coffeecup', 'coffeekup'],
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options) {
|
||||
var compiled = this.cache(options) || this.cache(options, this.engine.compile(str, options));
|
||||
return compiled(options);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Asyncronous Templating Languages
|
||||
*/
|
||||
|
||||
exports.just = new Transformer({
|
||||
name: 'just',
|
||||
engines: ['just'],
|
||||
outputFormat: 'xml',
|
||||
sudoSync: true,
|
||||
async: function (str, options, cb) {
|
||||
var JUST = this.engine;
|
||||
var tmpl = this.cache(options) || this.cache(options, new JUST({ root: { page: str }}));
|
||||
tmpl.render('page', options, cb);
|
||||
}
|
||||
});
|
||||
|
||||
exports.ect = new Transformer({
|
||||
name: 'ect',
|
||||
engines: ['ect'],
|
||||
outputFormat: 'xml',
|
||||
sudoSync: true, // Always runs syncronously
|
||||
async: function (str, options, cb) {
|
||||
var ECT = this.engine;
|
||||
var tmpl = this.cache(options) || this.cache(options, new ECT({ root: { page: str }}));
|
||||
tmpl.render('page', options, cb);
|
||||
}
|
||||
});
|
||||
|
||||
exports.jade = new Transformer({
|
||||
name: 'jade',
|
||||
engines: ['jade', 'then-jade'],
|
||||
outputFormat: 'xml',
|
||||
sudoSync: 'The jade file FILENAME could not be rendered syncronously. N.B. then-jade does not support syncronous rendering.',
|
||||
async: function (str, options, cb) {
|
||||
this.cache(options, true);//jade handles this.cache internally
|
||||
this.engine.render(str, options, cb);
|
||||
}
|
||||
})
|
||||
|
||||
exports.dust = new Transformer({
|
||||
name: 'dust',
|
||||
engines: ['dust', 'dustjs-linkedin'],
|
||||
outputFormat: 'xml',
|
||||
sudoSync: false,
|
||||
async: function (str, options, cb) {
|
||||
var ext = 'dust'
|
||||
, views = '.';
|
||||
|
||||
if (options) {
|
||||
if (options.ext) ext = options.ext;
|
||||
if (options.views) views = options.views;
|
||||
if (options.settings && options.settings.views) views = options.settings.views;
|
||||
}
|
||||
|
||||
this.engine.onLoad = function(path, callback){
|
||||
if ('' == extname(path)) path += '.' + ext;
|
||||
if ('/' !== path[0]) path = views + '/' + path;
|
||||
read(path, options, callback);
|
||||
};
|
||||
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.compileFn(str));
|
||||
if (options && !options.cache) this.engine.cache = {};//invalidate dust's internal cache
|
||||
tmpl(options, cb);
|
||||
}
|
||||
});
|
||||
|
||||
exports.jazz = new Transformer({
|
||||
name: 'jazz',
|
||||
engines: ['jazz'],
|
||||
outputFormat: 'xml',
|
||||
sudoSync: true, // except when an async function is passed to locals
|
||||
async: function (str, options, cb) {
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.compile(str, options));
|
||||
tmpl.eval(options, function(str){
|
||||
cb(null, str);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
exports.qejs = new Transformer({
|
||||
name: 'qejs',
|
||||
engines: ['qejs'],
|
||||
outputFormat: 'xml',
|
||||
sudoSync: false,
|
||||
async: function (str, options, cb) {
|
||||
var tmpl = this.cache(options) || this.cache(options, this.engine.compile(str, options));
|
||||
tmpl(options).done(function (result) {
|
||||
cb(null, result);
|
||||
}, function (err) {
|
||||
cb(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Stylsheet Languages
|
||||
*/
|
||||
|
||||
exports.less = new Transformer({
|
||||
name: 'less',
|
||||
engines: ['less'],
|
||||
outputFormat: 'css',
|
||||
sudoSync: 'The less file FILENAME could not be rendered syncronously. This is usually because the file contains `@import url` statements.',
|
||||
async: function (str, options, cb) {
|
||||
var self = this;
|
||||
if (self.cache(options)) return cb(null, self.cache(options));
|
||||
if (options.filename) {
|
||||
options.paths = options.paths || [dirname(options.filename)];
|
||||
}
|
||||
//If this.cache is enabled, compress by default
|
||||
if (options.compress !== true && options.compress !== false) {
|
||||
options.compress = options.cache || false;
|
||||
}
|
||||
if (options.sudoSync) {
|
||||
options.syncImport = true;
|
||||
}
|
||||
var parser = new(this.engine.Parser)(options);
|
||||
parser.parse(str, function (err, tree) {
|
||||
try {
|
||||
if (err) throw err;
|
||||
var res = tree.toCSS(options);
|
||||
self.cache(options, res);
|
||||
cb(null, res);
|
||||
} catch (ex) {
|
||||
if (ex.constructor.name === 'LessError' && typeof ex === 'object') {
|
||||
ex.filename = ex.filename || '"Unkown Source"';
|
||||
var err = new Error(self.engine.formatError(ex, options).replace(/^[^:]+:/, ''), ex.filename, ex.line);
|
||||
err.name = ex.type;
|
||||
ex = err;
|
||||
}
|
||||
return cb(ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
exports.styl = exports.stylus = new Transformer({
|
||||
name: 'stylus',
|
||||
engines: ['stylus'],
|
||||
outputFormat: 'css',
|
||||
sudoSync: true,// always runs syncronously
|
||||
async: function (str, options, cb) {
|
||||
var self = this;
|
||||
if (self.cache(options)) return cb(null, self.cache(options));
|
||||
if (options.filename) {
|
||||
options.paths = options.paths || [dirname(options.filename)];
|
||||
}
|
||||
//If this.cache is enabled, compress by default
|
||||
if (options.compress !== true && options.compress !== false) {
|
||||
options.compress = options.cache || false;
|
||||
}
|
||||
this.engine.render(str, options, function (err, res) {
|
||||
if (err) return cb(err);
|
||||
self.cache(options, res);
|
||||
cb(null, res);
|
||||
});
|
||||
}
|
||||
})
|
||||
|
||||
exports.sass = new Transformer({
|
||||
name: 'sass',
|
||||
engines: ['sass'],
|
||||
outputFormat: 'css',
|
||||
sync: function (str, options) {
|
||||
try {
|
||||
return this.cache(options) || this.cache(options, this.engine.render(str));
|
||||
} catch (ex) {
|
||||
if (options.filename) ex.message += ' in ' + options.filename;
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Miscelaneous
|
||||
*/
|
||||
|
||||
exports.md = exports.markdown = new Transformer({
|
||||
name: 'markdown',
|
||||
engines: ['marked', 'supermarked', 'markdown-js', 'markdown'],
|
||||
outputFormat: 'html',
|
||||
sync: function (str, options) {
|
||||
var arg = options;
|
||||
if (this.engineName === 'markdown') arg = options.dialect; //even if undefined
|
||||
return this.cache(options) || this.cache(options, this.engine.parse(str, arg));
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
exports.coffee = exports['coffee-script'] = exports.coffeescript = exports.coffeeScript = new Transformer({
|
||||
name: 'coffee-script',
|
||||
engines: ['coffee-script'],
|
||||
outputFormat: 'js',
|
||||
sync: function (str, options) {
|
||||
return this.cache(options) || this.cache(options, this.engine.compile(str, options));
|
||||
}
|
||||
});
|
||||
|
||||
exports.cson = new Transformer({
|
||||
name: 'cson',
|
||||
engines: ['cson'],
|
||||
outputFormat: 'json',
|
||||
sync: function (str, options) {
|
||||
//todo: remove once https://github.com/rstacruz/js2coffee/pull/174 accepted & released
|
||||
if (global.Narcissus) delete global.Narcissus; //prevent global leak
|
||||
return this.cache(options) || this.cache(options, JSON.stringify(this.engine.parseSync(str)));
|
||||
}
|
||||
});
|
||||
|
||||
exports.cdata = new Transformer({
|
||||
name: 'cdata',
|
||||
engines: ['.'],// `.` means "no dependency"
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options) {
|
||||
var escaped = str.replace(/\]\]>/g, "]]]]><![CDATA[>");
|
||||
return this.cache(options) || this.cache(options, '<![CDATA[' + escaped + ']]>');
|
||||
}
|
||||
});
|
||||
|
||||
exports["cdata-js"] = new Transformer({
|
||||
name: 'cdata-js',
|
||||
engines: ['.'],// `.` means "no dependency"
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options) {
|
||||
var escaped = str.replace(/\]\]>/g, "]]]]><![CDATA[>");
|
||||
return this.cache(options) || this.cache(options, '//<![CDATA[\n' + escaped + '\n//]]>');
|
||||
}
|
||||
});
|
||||
|
||||
exports["cdata-css"] = new Transformer({
|
||||
name: 'cdata-css',
|
||||
engines: ['.'],// `.` means "no dependency"
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options) {
|
||||
var escaped = str.replace(/\]\]>/g, "]]]]><![CDATA[>");
|
||||
return this.cache(options) || this.cache(options, '/*<![CDATA[*/\n' + escaped + '\n/*]]>*/');
|
||||
}
|
||||
});
|
||||
|
||||
exports.verbatim = new Transformer({
|
||||
name: 'verbatim',
|
||||
engines: ['.'],// `.` means "no dependency"
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options) {
|
||||
return this.cache(options) || this.cache(options, str);
|
||||
}
|
||||
});
|
||||
|
||||
exports.component = exports['component-js'] = new Transformer({
|
||||
name: 'component-js',
|
||||
engines: ['component-builder'],
|
||||
outputFormat: 'js',
|
||||
async: function (str, options, cb) {
|
||||
if (this.cache(options)) return this.cache(options);
|
||||
var self = this;
|
||||
var builder = new this.engine(dirname(options.filename));
|
||||
if (options.development) {
|
||||
builder.development();
|
||||
}
|
||||
if (options.sourceURLs === true || (options.sourceURLs !== false && options.development)) {
|
||||
builder.addSourceURLs();
|
||||
}
|
||||
var path = require('path');
|
||||
builder.paths = (options.paths || ['components']).map(function (p) {
|
||||
if (path.resolve(p) === p) {
|
||||
return p;
|
||||
} else {
|
||||
return path.join(dirname(options.filename), p);
|
||||
}
|
||||
});
|
||||
builder.build(function (err, obj) {
|
||||
if (err) return cb(err);
|
||||
else return cb(null, self.cache(options, obj.require + obj.js));
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
exports['component-css'] = new Transformer({
|
||||
name: 'component-css',
|
||||
engines: ['component-builder'],
|
||||
outputFormat: 'css',
|
||||
async: function (str, options, cb) {
|
||||
if (this.cache(options)) return this.cache(options);
|
||||
var self = this;
|
||||
var builder = new this.engine(dirname(options.filename));
|
||||
if (options.development) {
|
||||
builder.development();
|
||||
}
|
||||
if (options.sourceURLs === true || (options.sourceURLs !== false && options.development)) {
|
||||
builder.addSourceURLs();
|
||||
}
|
||||
var path = require('path');
|
||||
builder.paths = (options.paths || ['components']).map(function (p) {
|
||||
if (path.resolve(p) === p) {
|
||||
return p;
|
||||
} else {
|
||||
return path.join(dirname(options.filename), p);
|
||||
}
|
||||
});
|
||||
builder.build(function (err, obj) {
|
||||
if (err) return cb(err);
|
||||
else return cb(null, self.cache(options, obj.css));
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
exports['html2jade'] = new Transformer({
|
||||
name: 'html2jade',
|
||||
engines: ['html2jade'],
|
||||
outputFormat: 'jade',
|
||||
async: function (str, options, cb) {
|
||||
return this.cache(options) || this.cache(options, this.engine.convertHtml(str, options, cb));
|
||||
}
|
||||
});
|
||||
|
||||
exports['highlight'] = new Transformer({
|
||||
name: 'highlight',
|
||||
engines: ['highlight.js'],
|
||||
outputFormat: 'xml',
|
||||
sync: function (str, options, cb) {
|
||||
if (this.cache(options)) return this.cache(options);
|
||||
if (options.lang) {
|
||||
try {
|
||||
return this.cache(options, this.engine.highlight(options.lang, str).value);
|
||||
} catch (ex) {}
|
||||
}
|
||||
if (options.auto || !options.lang) {
|
||||
try {
|
||||
return this.cache(options, this.engine.highlightAuto(str).value);
|
||||
} catch (ex) {}
|
||||
}
|
||||
return this.cache(options, str);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Marker transformers (they don't actually apply a transformation, but let you declare the 'outputFormat')
|
||||
*/
|
||||
|
||||
exports.css = new Transformer({
|
||||
name: 'css',
|
||||
engines: ['.'],// `.` means "no dependency"
|
||||
outputFormat: 'css',
|
||||
sync: function (str, options) {
|
||||
return this.cache(options) || this.cache(options, str);
|
||||
}
|
||||
});
|
||||
|
||||
exports.js = new Transformer({
|
||||
name: 'js',
|
||||
engines: ['.'],// `.` means "no dependency"
|
||||
outputFormat: 'js',
|
||||
sync: function (str, options) {
|
||||
return this.cache(options) || this.cache(options, str);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
1
node_modules/transformers/node_modules/.bin/uglifyjs
generated
vendored
Symbolic link
1
node_modules/transformers/node_modules/.bin/uglifyjs
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../uglify-js/bin/uglifyjs
|
||||
6
node_modules/transformers/node_modules/is-promise/.npmignore
generated
vendored
Normal file
6
node_modules/transformers/node_modules/is-promise/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
component
|
||||
build
|
||||
node_modules
|
||||
test.js
|
||||
component.json
|
||||
.gitignore
|
||||
3
node_modules/transformers/node_modules/is-promise/.travis.yml
generated
vendored
Normal file
3
node_modules/transformers/node_modules/is-promise/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.10"
|
||||
19
node_modules/transformers/node_modules/is-promise/LICENSE
generated
vendored
Normal file
19
node_modules/transformers/node_modules/is-promise/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2014 Forbes Lindesay
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
5
node_modules/transformers/node_modules/is-promise/index.js
generated
vendored
Normal file
5
node_modules/transformers/node_modules/is-promise/index.js
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
module.exports = isPromise;
|
||||
|
||||
function isPromise(obj) {
|
||||
return obj && typeof obj.then === 'function';
|
||||
}
|
||||
72
node_modules/transformers/node_modules/is-promise/package.json
generated
vendored
Normal file
72
node_modules/transformers/node_modules/is-promise/package.json
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"_args": [
|
||||
[
|
||||
"is-promise@~1",
|
||||
"/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers/node_modules/promise"
|
||||
]
|
||||
],
|
||||
"_from": "is-promise@>=1.0.0 <2.0.0",
|
||||
"_id": "is-promise@1.0.1",
|
||||
"_inCache": true,
|
||||
"_installable": true,
|
||||
"_location": "/transformers/is-promise",
|
||||
"_npmUser": {
|
||||
"email": "forbes@lindeay.co.uk",
|
||||
"name": "forbeslindesay"
|
||||
},
|
||||
"_npmVersion": "1.4.3",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"name": "is-promise",
|
||||
"raw": "is-promise@~1",
|
||||
"rawSpec": "~1",
|
||||
"scope": null,
|
||||
"spec": ">=1.0.0 <2.0.0",
|
||||
"type": "range"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/transformers/promise"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/is-promise/-/is-promise-1.0.1.tgz",
|
||||
"_shasum": "31573761c057e33c2e91aab9e96da08cefbe76e5",
|
||||
"_shrinkwrap": null,
|
||||
"_spec": "is-promise@~1",
|
||||
"_where": "/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers/node_modules/promise",
|
||||
"author": {
|
||||
"name": "ForbesLindesay"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/then/is-promise/issues"
|
||||
},
|
||||
"dependencies": {},
|
||||
"description": "Test whether an object looks like a promises-a+ promise",
|
||||
"devDependencies": {
|
||||
"better-assert": "~0.1.0",
|
||||
"mocha": "~1.7.4"
|
||||
},
|
||||
"directories": {},
|
||||
"dist": {
|
||||
"shasum": "31573761c057e33c2e91aab9e96da08cefbe76e5",
|
||||
"tarball": "http://registry.npmjs.org/is-promise/-/is-promise-1.0.1.tgz"
|
||||
},
|
||||
"homepage": "https://github.com/then/is-promise",
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "forbeslindesay",
|
||||
"email": "forbes@lindesay.co.uk"
|
||||
}
|
||||
],
|
||||
"name": "is-promise",
|
||||
"optionalDependencies": {},
|
||||
"readme": "ERROR: No README data found!",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/then/is-promise.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha -R spec"
|
||||
},
|
||||
"version": "1.0.1"
|
||||
}
|
||||
29
node_modules/transformers/node_modules/is-promise/readme.md
generated
vendored
Normal file
29
node_modules/transformers/node_modules/is-promise/readme.md
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
<a href="http://promises-aplus.github.com/promises-spec"><img src="http://promises-aplus.github.com/promises-spec/assets/logo-small.png" align="right" /></a>
|
||||
# is-promise
|
||||
|
||||
Test whether an object looks like a promises-a+ promise
|
||||
|
||||
[](https://travis-ci.org/then/is-promise)
|
||||
[](https://gemnasium.com/then/is-promise)
|
||||
[](https://www.npmjs.org/package/is-promise)
|
||||
|
||||
## Installation
|
||||
|
||||
$ npm install is-promise
|
||||
|
||||
You can also use it client side via npm.
|
||||
|
||||
## API
|
||||
|
||||
```javascript
|
||||
var isPromise = require('is-promise');
|
||||
|
||||
isPromise({then:function () {...}});//=>true
|
||||
isPromise(null);//=>false
|
||||
isPromise({});//=>false
|
||||
isPromise({then: true})//=>false
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
6
node_modules/transformers/node_modules/promise/.npmignore
generated
vendored
Normal file
6
node_modules/transformers/node_modules/promise/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
components
|
||||
node_modules
|
||||
test
|
||||
.gitignore
|
||||
.travis.yml
|
||||
component.json
|
||||
85
node_modules/transformers/node_modules/promise/Readme.md
generated
vendored
Normal file
85
node_modules/transformers/node_modules/promise/Readme.md
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
[](https://travis-ci.org/then/promise)
|
||||
<a href="http://promises-aplus.github.com/promises-spec"><img src="http://promises-aplus.github.com/promises-spec/assets/logo-small.png" align="right" /></a>
|
||||
# promise
|
||||
|
||||
This a bare bones [Promises/A+](http://promises-aplus.github.com/promises-spec/) implementation.
|
||||
|
||||
It is designed to get the basics spot on correct, so that you can build extended promise implementations on top of it.
|
||||
|
||||
## Installation
|
||||
|
||||
Server:
|
||||
|
||||
$ npm install promise
|
||||
|
||||
Client:
|
||||
|
||||
$ component install then/promise
|
||||
|
||||
## API
|
||||
|
||||
In the example below shows how you can load the promise library (in a way that works on both client and server). It then demonstrates creating a promise from scratch. You simply call `new Promise(fn)`. There is a complete specification for what is returned by this method in [Promises/A+](http://promises-aplus.github.com/promises-spec/).
|
||||
|
||||
```javascript
|
||||
var Promise = require('promise');
|
||||
|
||||
var promise = new Promise(function (resolve, reject) {
|
||||
get('http://www.google.com', function (err, res) {
|
||||
if (err) reject(err);
|
||||
else resolve(res);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Extending Promises
|
||||
|
||||
There are three options for extending the promises created by this library.
|
||||
|
||||
### Inheritance
|
||||
|
||||
You can use inheritance if you want to create your own complete promise library with this as your basic starting point, perfect if you have lots of cool features you want to add. Here is an example of a promise library called `Awesome`, which is built on top of `Promise` correctly.
|
||||
|
||||
```javascript
|
||||
var Promise = require('promise');
|
||||
function Awesome(fn) {
|
||||
if (!(this instanceof Awesome)) return new Awesome(fn);
|
||||
Promise.call(this, fn);
|
||||
}
|
||||
Awesome.prototype = Object.create(Promise.prototype);
|
||||
Awesome.prototype.constructor = Awesome;
|
||||
|
||||
//Awesome extension
|
||||
Awesome.prototype.spread = function (cb) {
|
||||
return this.then(function (arr) {
|
||||
return cb.apply(this, arr);
|
||||
})
|
||||
};
|
||||
```
|
||||
|
||||
N.B. if you fail to set the prototype and constructor properly or fail to do Promise.call, things can fail in really subtle ways.
|
||||
|
||||
### Wrap
|
||||
|
||||
This is the nuclear option, for when you want to start from scratch. It ensures you won't be impacted by anyone who is extending the prototype (see below).
|
||||
|
||||
```javascript
|
||||
function Uber(fn) {
|
||||
if (!(this instanceof Uber)) return new Uber(fn);
|
||||
var _prom = new Promise(fn);
|
||||
this.then = _prom.then;
|
||||
}
|
||||
|
||||
Uber.prototype.spread = function (cb) {
|
||||
return this.then(function (arr) {
|
||||
return cb.apply(this, arr);
|
||||
})
|
||||
};
|
||||
```
|
||||
|
||||
### Extending the Prototype
|
||||
|
||||
In general, you should never extend the prototype of this promise implimenation because your extensions could easily conflict with someone elses extensions. However, this organisation will host a library of extensions which do not conflict with each other, so you can safely enable any of those. If you think of an extension that we don't provide and you want to write it, submit an issue on this repository and (if I agree) I'll set you up with a repository and give you permission to commit to it.
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
164
node_modules/transformers/node_modules/promise/index.js
generated
vendored
Normal file
164
node_modules/transformers/node_modules/promise/index.js
generated
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
var isPromise = require('is-promise')
|
||||
|
||||
var nextTick
|
||||
if (typeof setImediate === 'function') nextTick = setImediate
|
||||
else if (typeof process === 'object' && process && process.nextTick) nextTick = process.nextTick
|
||||
else nextTick = function (cb) { setTimeout(cb, 0) }
|
||||
|
||||
var extensions = []
|
||||
|
||||
module.exports = Promise
|
||||
function Promise(fn) {
|
||||
if (!(this instanceof Promise)) {
|
||||
return fn ? new Promise(fn) : defer()
|
||||
}
|
||||
if (typeof fn !== 'function') {
|
||||
throw new TypeError('fn is not a function')
|
||||
}
|
||||
|
||||
var state = {
|
||||
isResolved: false,
|
||||
isSettled: false,
|
||||
isFulfilled: false,
|
||||
value: null,
|
||||
waiting: [],
|
||||
running: false
|
||||
}
|
||||
|
||||
function _resolve(val) {
|
||||
resolve(state, val);
|
||||
}
|
||||
function _reject(err) {
|
||||
reject(state, err);
|
||||
}
|
||||
this.then = function _then(onFulfilled, onRejected) {
|
||||
return then(state, onFulfilled, onRejected);
|
||||
}
|
||||
|
||||
_resolve.fulfill = deprecate(_resolve, 'resolver.fulfill(x)', 'resolve(x)')
|
||||
_resolve.reject = deprecate(_reject, 'resolver.reject', 'reject(x)')
|
||||
|
||||
try {
|
||||
fn(_resolve, _reject)
|
||||
} catch (ex) {
|
||||
_reject(ex)
|
||||
}
|
||||
}
|
||||
|
||||
function resolve(promiseState, value) {
|
||||
if (promiseState.isResolved) return
|
||||
if (isPromise(value)) {
|
||||
assimilate(promiseState, value)
|
||||
} else {
|
||||
settle(promiseState, true, value)
|
||||
}
|
||||
}
|
||||
|
||||
function reject(promiseState, reason) {
|
||||
if (promiseState.isResolved) return
|
||||
settle(promiseState, false, reason)
|
||||
}
|
||||
|
||||
function then(promiseState, onFulfilled, onRejected) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
function done(next, skipTimeout) {
|
||||
var callback = promiseState.isFulfilled ? onFulfilled : onRejected
|
||||
if (typeof callback === 'function') {
|
||||
function timeoutDone() {
|
||||
var val
|
||||
try {
|
||||
val = callback(promiseState.value)
|
||||
} catch (ex) {
|
||||
reject(ex)
|
||||
return next(true)
|
||||
}
|
||||
resolve(val)
|
||||
next(true)
|
||||
}
|
||||
if (skipTimeout) timeoutDone()
|
||||
else nextTick(timeoutDone)
|
||||
} else if (promiseState.isFulfilled) {
|
||||
resolve(promiseState.value)
|
||||
next(skipTimeout)
|
||||
} else {
|
||||
reject(promiseState.value)
|
||||
next(skipTimeout)
|
||||
}
|
||||
}
|
||||
promiseState.waiting.push(done)
|
||||
if (promiseState.isSettled && !promiseState.running) processQueue(promiseState)
|
||||
})
|
||||
}
|
||||
|
||||
function processQueue(promiseState) {
|
||||
function next(skipTimeout) {
|
||||
if (promiseState.waiting.length) {
|
||||
promiseState.running = true
|
||||
promiseState.waiting.shift()(next, skipTimeout)
|
||||
} else {
|
||||
promiseState.running = false
|
||||
}
|
||||
}
|
||||
next(false)
|
||||
}
|
||||
|
||||
function settle(promiseState, isFulfilled, value) {
|
||||
if (promiseState.isSettled) return
|
||||
|
||||
promiseState.isResolved = promiseState.isSettled = true
|
||||
promiseState.value = value
|
||||
promiseState.isFulfilled = isFulfilled
|
||||
|
||||
processQueue(promiseState)
|
||||
}
|
||||
|
||||
function assimilate(promiseState, thenable) {
|
||||
try {
|
||||
promiseState.isResolved = true
|
||||
thenable.then(function (res) {
|
||||
if (isPromise(res)) {
|
||||
assimilate(promiseState, res)
|
||||
} else {
|
||||
settle(promiseState, true, res)
|
||||
}
|
||||
}, function (err) {
|
||||
settle(promiseState, false, err)
|
||||
})
|
||||
} catch (ex) {
|
||||
settle(promiseState, false, ex)
|
||||
}
|
||||
}
|
||||
|
||||
Promise.use = function (extension) {
|
||||
extensions.push(extension)
|
||||
}
|
||||
|
||||
|
||||
function deprecate(method, name, alternative) {
|
||||
return function () {
|
||||
var err = new Error(name + ' is deprecated use ' + alternative)
|
||||
if (typeof console !== 'undefined' && console && typeof console.warn === 'function') {
|
||||
console.warn(name + ' is deprecated use ' + alternative)
|
||||
if (err.stack) console.warn(err.stack)
|
||||
} else {
|
||||
nextTick(function () {
|
||||
throw err
|
||||
})
|
||||
}
|
||||
method.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
function defer() {
|
||||
var err = new Error('promise.defer() is deprecated')
|
||||
if (typeof console !== 'undefined' && console && typeof console.warn === 'function') {
|
||||
console.warn('promise.defer() is deprecated')
|
||||
if (err.stack) console.warn(err.stack)
|
||||
} else {
|
||||
nextTick(function () {
|
||||
throw err
|
||||
})
|
||||
}
|
||||
var resolver
|
||||
var promise = new Promise(function (res) { resolver = res })
|
||||
return {resolver: resolver, promise: promise}
|
||||
}
|
||||
75
node_modules/transformers/node_modules/promise/package.json
generated
vendored
Normal file
75
node_modules/transformers/node_modules/promise/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"_args": [
|
||||
[
|
||||
"promise@~2.0",
|
||||
"/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers"
|
||||
]
|
||||
],
|
||||
"_from": "promise@>=2.0.0 <2.1.0",
|
||||
"_id": "promise@2.0.0",
|
||||
"_inCache": true,
|
||||
"_installable": true,
|
||||
"_location": "/transformers/promise",
|
||||
"_npmUser": {
|
||||
"email": "forbes@lindesay.co.uk",
|
||||
"name": "forbeslindesay"
|
||||
},
|
||||
"_npmVersion": "1.2.10",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"name": "promise",
|
||||
"raw": "promise@~2.0",
|
||||
"rawSpec": "~2.0",
|
||||
"scope": null,
|
||||
"spec": ">=2.0.0 <2.1.0",
|
||||
"type": "range"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/transformers"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/promise/-/promise-2.0.0.tgz",
|
||||
"_shasum": "46648aa9d605af5d2e70c3024bf59436da02b80e",
|
||||
"_shrinkwrap": null,
|
||||
"_spec": "promise@~2.0",
|
||||
"_where": "/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers",
|
||||
"author": {
|
||||
"name": "ForbesLindesay"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/then/promise/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"is-promise": "~1"
|
||||
},
|
||||
"description": "Bare bones Promises/A+ implementation",
|
||||
"devDependencies": {
|
||||
"better-assert": "~1.0.0",
|
||||
"mocha-as-promised": "~1.2.1",
|
||||
"promises-aplus-tests": "*"
|
||||
},
|
||||
"directories": {},
|
||||
"dist": {
|
||||
"shasum": "46648aa9d605af5d2e70c3024bf59436da02b80e",
|
||||
"tarball": "http://registry.npmjs.org/promise/-/promise-2.0.0.tgz"
|
||||
},
|
||||
"homepage": "https://github.com/then/promise#readme",
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "forbeslindesay",
|
||||
"email": "forbes@lindesay.co.uk"
|
||||
}
|
||||
],
|
||||
"name": "promise",
|
||||
"optionalDependencies": {},
|
||||
"readme": "ERROR: No README data found!",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/then/promise.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha -R spec --timeout 200 --slow 99999"
|
||||
},
|
||||
"version": "2.0.0"
|
||||
}
|
||||
2
node_modules/transformers/node_modules/source-map/.npmignore
generated
vendored
Normal file
2
node_modules/transformers/node_modules/source-map/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
dist/*
|
||||
node_modules/*
|
||||
4
node_modules/transformers/node_modules/source-map/.travis.yml
generated
vendored
Normal file
4
node_modules/transformers/node_modules/source-map/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.8
|
||||
- "0.10"
|
||||
194
node_modules/transformers/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
194
node_modules/transformers/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
# Change Log
|
||||
|
||||
## 0.1.43
|
||||
|
||||
* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue
|
||||
#148 for some discussion and issues #150, #151, and #152 for implementations.
|
||||
|
||||
## 0.1.42
|
||||
|
||||
* Fix an issue where `SourceNode`s from different versions of the source-map
|
||||
library couldn't be used in conjunction with each other. See issue #142.
|
||||
|
||||
## 0.1.41
|
||||
|
||||
* Fix a bug with getting the source content of relative sources with a "./"
|
||||
prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768).
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the
|
||||
column span of each mapping.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find
|
||||
all generated positions associated with a given original source and line.
|
||||
|
||||
## 0.1.40
|
||||
|
||||
* Performance improvements for parsing source maps in SourceMapConsumer.
|
||||
|
||||
## 0.1.39
|
||||
|
||||
* Fix a bug where setting a source's contents to null before any source content
|
||||
had been set before threw a TypeError. See issue #131.
|
||||
|
||||
## 0.1.38
|
||||
|
||||
* Fix a bug where finding relative paths from an empty path were creating
|
||||
absolute paths. See issue #129.
|
||||
|
||||
## 0.1.37
|
||||
|
||||
* Fix a bug where if the source root was an empty string, relative source paths
|
||||
would turn into absolute source paths. Issue #124.
|
||||
|
||||
## 0.1.36
|
||||
|
||||
* Allow the `names` mapping property to be an empty string. Issue #121.
|
||||
|
||||
## 0.1.35
|
||||
|
||||
* A third optional parameter was added to `SourceNode.fromStringWithSourceMap`
|
||||
to specify a path that relative sources in the second parameter should be
|
||||
relative to. Issue #105.
|
||||
|
||||
* If no file property is given to a `SourceMapGenerator`, then the resulting
|
||||
source map will no longer have a `null` file property. The property will
|
||||
simply not exist. Issue #104.
|
||||
|
||||
* Fixed a bug where consecutive newlines were ignored in `SourceNode`s.
|
||||
Issue #116.
|
||||
|
||||
## 0.1.34
|
||||
|
||||
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
|
||||
|
||||
* Fix bug involving source contents and the
|
||||
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
|
||||
|
||||
## 0.1.33
|
||||
|
||||
* Fix some edge cases surrounding path joining and URL resolution.
|
||||
|
||||
* Add a third parameter for relative path to
|
||||
`SourceMapGenerator.prototype.applySourceMap`.
|
||||
|
||||
* Fix issues with mappings and EOLs.
|
||||
|
||||
## 0.1.32
|
||||
|
||||
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
|
||||
(issue 92).
|
||||
|
||||
* Fixed test runner to actually report number of failed tests as its process
|
||||
exit code.
|
||||
|
||||
* Fixed a typo when reporting bad mappings (issue 87).
|
||||
|
||||
## 0.1.31
|
||||
|
||||
* Delay parsing the mappings in SourceMapConsumer until queried for a source
|
||||
location.
|
||||
|
||||
* Support Sass source maps (which at the time of writing deviate from the spec
|
||||
in small ways) in SourceMapConsumer.
|
||||
|
||||
## 0.1.30
|
||||
|
||||
* Do not join source root with a source, when the source is a data URI.
|
||||
|
||||
* Extend the test runner to allow running single specific test files at a time.
|
||||
|
||||
* Performance improvements in `SourceNode.prototype.walk` and
|
||||
`SourceMapConsumer.prototype.eachMapping`.
|
||||
|
||||
* Source map browser builds will now work inside Workers.
|
||||
|
||||
* Better error messages when attempting to add an invalid mapping to a
|
||||
`SourceMapGenerator`.
|
||||
|
||||
## 0.1.29
|
||||
|
||||
* Allow duplicate entries in the `names` and `sources` arrays of source maps
|
||||
(usually from TypeScript) we are parsing. Fixes github issue 72.
|
||||
|
||||
## 0.1.28
|
||||
|
||||
* Skip duplicate mappings when creating source maps from SourceNode; github
|
||||
issue 75.
|
||||
|
||||
## 0.1.27
|
||||
|
||||
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
|
||||
we don't use it anyway.
|
||||
|
||||
## 0.1.26
|
||||
|
||||
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
|
||||
|
||||
## 0.1.25
|
||||
|
||||
* Make compatible with browserify
|
||||
|
||||
## 0.1.24
|
||||
|
||||
* Fix issue with absolute paths and `file://` URIs. See
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
|
||||
|
||||
## 0.1.23
|
||||
|
||||
* Fix issue with absolute paths and sourcesContent, github issue 64.
|
||||
|
||||
## 0.1.22
|
||||
|
||||
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
|
||||
|
||||
## 0.1.21
|
||||
|
||||
* Fixed handling of sources that start with a slash so that they are relative to
|
||||
the source root's host.
|
||||
|
||||
## 0.1.20
|
||||
|
||||
* Fixed github issue #43: absolute URLs aren't joined with the source root
|
||||
anymore.
|
||||
|
||||
## 0.1.19
|
||||
|
||||
* Using Travis CI to run tests.
|
||||
|
||||
## 0.1.18
|
||||
|
||||
* Fixed a bug in the handling of sourceRoot.
|
||||
|
||||
## 0.1.17
|
||||
|
||||
* Added SourceNode.fromStringWithSourceMap.
|
||||
|
||||
## 0.1.16
|
||||
|
||||
* Added missing documentation.
|
||||
|
||||
* Fixed the generating of empty mappings in SourceNode.
|
||||
|
||||
## 0.1.15
|
||||
|
||||
* Added SourceMapGenerator.applySourceMap.
|
||||
|
||||
## 0.1.14
|
||||
|
||||
* The sourceRoot is now handled consistently.
|
||||
|
||||
## 0.1.13
|
||||
|
||||
* Added SourceMapGenerator.fromSourceMap.
|
||||
|
||||
## 0.1.12
|
||||
|
||||
* SourceNode now generates empty mappings too.
|
||||
|
||||
## 0.1.11
|
||||
|
||||
* Added name support to SourceNode.
|
||||
|
||||
## 0.1.10
|
||||
|
||||
* Added sourcesContent support to the customer and generator.
|
||||
28
node_modules/transformers/node_modules/source-map/LICENSE
generated
vendored
Normal file
28
node_modules/transformers/node_modules/source-map/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
Copyright (c) 2009-2011, Mozilla Foundation and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the names of the Mozilla Foundation nor the names of project
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
166
node_modules/transformers/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
166
node_modules/transformers/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var copy = require('dryice').copy;
|
||||
|
||||
function removeAmdefine(src) {
|
||||
src = String(src).replace(
|
||||
/if\s*\(typeof\s*define\s*!==\s*'function'\)\s*{\s*var\s*define\s*=\s*require\('amdefine'\)\(module,\s*require\);\s*}\s*/g,
|
||||
'');
|
||||
src = src.replace(
|
||||
/\b(define\(.*)('amdefine',?)/gm,
|
||||
'$1');
|
||||
return src;
|
||||
}
|
||||
removeAmdefine.onRead = true;
|
||||
|
||||
function makeNonRelative(src) {
|
||||
return src
|
||||
.replace(/require\('.\//g, 'require(\'source-map/')
|
||||
.replace(/\.\.\/\.\.\/lib\//g, '');
|
||||
}
|
||||
makeNonRelative.onRead = true;
|
||||
|
||||
function buildBrowser() {
|
||||
console.log('\nCreating dist/source-map.js');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/mini-require.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-generator',
|
||||
'source-map/source-map-consumer',
|
||||
'source-map/source-node']
|
||||
},
|
||||
'build/suffix-browser.js'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine
|
||||
],
|
||||
dest: 'dist/source-map.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildBrowserMin() {
|
||||
console.log('\nCreating dist/source-map.min.js');
|
||||
|
||||
copy({
|
||||
source: 'dist/source-map.js',
|
||||
filter: copy.filter.uglifyjs,
|
||||
dest: 'dist/source-map.min.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildFirefox() {
|
||||
console.log('\nCreating dist/SourceMap.jsm');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-source-map.jsm',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-consumer',
|
||||
'source-map/source-map-generator',
|
||||
'source-map/source-node' ]
|
||||
},
|
||||
'build/suffix-source-map.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/SourceMap.jsm'
|
||||
});
|
||||
|
||||
// Create dist/test/Utils.jsm
|
||||
console.log('\nCreating dist/test/Utils.jsm');
|
||||
|
||||
project = copy.createCommonJsProject({
|
||||
roots: [ __dirname, path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-utils.jsm',
|
||||
'build/assert-shim.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'test/source-map/util' ]
|
||||
},
|
||||
'build/suffix-utils.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/test/Utils.jsm'
|
||||
});
|
||||
|
||||
function isTestFile(f) {
|
||||
return /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
var testFiles = fs.readdirSync(path.join(__dirname, 'test', 'source-map')).filter(isTestFile);
|
||||
|
||||
testFiles.forEach(function (testFile) {
|
||||
console.log('\nCreating', path.join('dist', 'test', testFile.replace(/\-/g, '_')));
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/test-prefix.js',
|
||||
path.join('test', 'source-map', testFile),
|
||||
'build/test-suffix.js'
|
||||
],
|
||||
filter: [
|
||||
removeAmdefine,
|
||||
makeNonRelative,
|
||||
function (input, source) {
|
||||
return input.replace('define(',
|
||||
'define("'
|
||||
+ path.join('test', 'source-map', testFile.replace(/\.js$/, ''))
|
||||
+ '", ["require", "exports", "module"], ');
|
||||
},
|
||||
function (input, source) {
|
||||
return input.replace('{THIS_MODULE}', function () {
|
||||
return "test/source-map/" + testFile.replace(/\.js$/, '');
|
||||
});
|
||||
}
|
||||
],
|
||||
dest: path.join('dist', 'test', testFile.replace(/\-/g, '_'))
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function ensureDir(name) {
|
||||
var dirExists = false;
|
||||
try {
|
||||
dirExists = fs.statSync(name).isDirectory();
|
||||
} catch (err) {}
|
||||
|
||||
if (!dirExists) {
|
||||
fs.mkdirSync(name, 0777);
|
||||
}
|
||||
}
|
||||
|
||||
ensureDir("dist");
|
||||
ensureDir("dist/test");
|
||||
buildFirefox();
|
||||
buildBrowser();
|
||||
buildBrowserMin();
|
||||
475
node_modules/transformers/node_modules/source-map/README.md
generated
vendored
Normal file
475
node_modules/transformers/node_modules/source-map/README.md
generated
vendored
Normal file
@@ -0,0 +1,475 @@
|
||||
# Source Map
|
||||
|
||||
This is a library to generate and consume the source map format
|
||||
[described here][format].
|
||||
|
||||
This library is written in the Asynchronous Module Definition format, and works
|
||||
in the following environments:
|
||||
|
||||
* Modern Browsers supporting ECMAScript 5 (either after the build, or with an
|
||||
AMD loader such as RequireJS)
|
||||
|
||||
* Inside Firefox (as a JSM file, after the build)
|
||||
|
||||
* With NodeJS versions 0.8.X and higher
|
||||
|
||||
## Node
|
||||
|
||||
$ npm install source-map
|
||||
|
||||
## Building from Source (for everywhere else)
|
||||
|
||||
Install Node and then run
|
||||
|
||||
$ git clone https://fitzgen@github.com/mozilla/source-map.git
|
||||
$ cd source-map
|
||||
$ npm link .
|
||||
|
||||
Next, run
|
||||
|
||||
$ node Makefile.dryice.js
|
||||
|
||||
This should spew a bunch of stuff to stdout, and create the following files:
|
||||
|
||||
* `dist/source-map.js` - The unminified browser version.
|
||||
|
||||
* `dist/source-map.min.js` - The minified browser version.
|
||||
|
||||
* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox source.
|
||||
|
||||
## Examples
|
||||
|
||||
### Consuming a source map
|
||||
|
||||
var rawSourceMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: 'http://example.com/www/js/',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
|
||||
var smc = new SourceMapConsumer(rawSourceMap);
|
||||
|
||||
console.log(smc.sources);
|
||||
// [ 'http://example.com/www/js/one.js',
|
||||
// 'http://example.com/www/js/two.js' ]
|
||||
|
||||
console.log(smc.originalPositionFor({
|
||||
line: 2,
|
||||
column: 28
|
||||
}));
|
||||
// { source: 'http://example.com/www/js/two.js',
|
||||
// line: 2,
|
||||
// column: 10,
|
||||
// name: 'n' }
|
||||
|
||||
console.log(smc.generatedPositionFor({
|
||||
source: 'http://example.com/www/js/two.js',
|
||||
line: 2,
|
||||
column: 10
|
||||
}));
|
||||
// { line: 2, column: 28 }
|
||||
|
||||
smc.eachMapping(function (m) {
|
||||
// ...
|
||||
});
|
||||
|
||||
### Generating a source map
|
||||
|
||||
In depth guide:
|
||||
[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)
|
||||
|
||||
#### With SourceNode (high level API)
|
||||
|
||||
function compile(ast) {
|
||||
switch (ast.type) {
|
||||
case 'BinaryExpression':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
[compile(ast.left), " + ", compile(ast.right)]
|
||||
);
|
||||
case 'Literal':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
String(ast.value)
|
||||
);
|
||||
// ...
|
||||
default:
|
||||
throw new Error("Bad AST");
|
||||
}
|
||||
}
|
||||
|
||||
var ast = parse("40 + 2", "add.js");
|
||||
console.log(compile(ast).toStringWithSourceMap({
|
||||
file: 'add.js'
|
||||
}));
|
||||
// { code: '40 + 2',
|
||||
// map: [object SourceMapGenerator] }
|
||||
|
||||
#### With SourceMapGenerator (low level API)
|
||||
|
||||
var map = new SourceMapGenerator({
|
||||
file: "source-mapped.js"
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: 10,
|
||||
column: 35
|
||||
},
|
||||
source: "foo.js",
|
||||
original: {
|
||||
line: 33,
|
||||
column: 2
|
||||
},
|
||||
name: "christopher"
|
||||
});
|
||||
|
||||
console.log(map.toString());
|
||||
// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}'
|
||||
|
||||
## API
|
||||
|
||||
Get a reference to the module:
|
||||
|
||||
// NodeJS
|
||||
var sourceMap = require('source-map');
|
||||
|
||||
// Browser builds
|
||||
var sourceMap = window.sourceMap;
|
||||
|
||||
// Inside Firefox
|
||||
let sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
|
||||
### SourceMapConsumer
|
||||
|
||||
A SourceMapConsumer instance represents a parsed source map which we can query
|
||||
for information about the original file positions by giving it a file position
|
||||
in the generated source.
|
||||
|
||||
#### new SourceMapConsumer(rawSourceMap)
|
||||
|
||||
The only parameter is the raw source map (either as a string which can be
|
||||
`JSON.parse`'d, or an object). According to the spec, source maps have the
|
||||
following attributes:
|
||||
|
||||
* `version`: Which version of the source map spec this map is following.
|
||||
|
||||
* `sources`: An array of URLs to the original source files.
|
||||
|
||||
* `names`: An array of identifiers which can be referrenced by individual
|
||||
mappings.
|
||||
|
||||
* `sourceRoot`: Optional. The URL root from which all sources are relative.
|
||||
|
||||
* `sourcesContent`: Optional. An array of contents of the original source files.
|
||||
|
||||
* `mappings`: A string of base64 VLQs which contain the actual mappings.
|
||||
|
||||
* `file`: Optional. The generated filename this source map is associated with.
|
||||
|
||||
#### SourceMapConsumer.prototype.computeColumnSpans()
|
||||
|
||||
Compute the last column for each generated mapping. The last column is
|
||||
inclusive.
|
||||
|
||||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||
|
||||
Returns the original source, line, and column information for the generated
|
||||
source's line and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `line`: The line number in the generated source.
|
||||
|
||||
* `column`: The column number in the generated source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `source`: The original source file, or null if this information is not
|
||||
available.
|
||||
|
||||
* `line`: The line number in the original source, or null if this information is
|
||||
not available.
|
||||
|
||||
* `column`: The column number in the original source, or null or null if this
|
||||
information is not available.
|
||||
|
||||
* `name`: The original identifier, or null if this information is not available.
|
||||
|
||||
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||
|
||||
Returns the generated line and column information for the original source,
|
||||
line, and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
* `column`: The column number in the original source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||
|
||||
Returns all generated line and column information for the original source
|
||||
and line provided. The only argument is an object with the following
|
||||
properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
and an array of objects is returned, each with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
#### SourceMapConsumer.prototype.sourceContentFor(source)
|
||||
|
||||
Returns the original source content for the source provided. The only
|
||||
argument is the URL of the original source file.
|
||||
|
||||
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
|
||||
|
||||
Iterate over each mapping between an original source/line/column and a
|
||||
generated line/column in this source map.
|
||||
|
||||
* `callback`: The function that is called with each mapping. Mappings have the
|
||||
form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,
|
||||
name }`
|
||||
|
||||
* `context`: Optional. If specified, this object will be the value of `this`
|
||||
every time that `callback` is called.
|
||||
|
||||
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
|
||||
the mappings sorted by the generated file's line/column order or the
|
||||
original's source/line/column order, respectively. Defaults to
|
||||
`SourceMapConsumer.GENERATED_ORDER`.
|
||||
|
||||
### SourceMapGenerator
|
||||
|
||||
An instance of the SourceMapGenerator represents a source map which is being
|
||||
built incrementally.
|
||||
|
||||
#### new SourceMapGenerator([startOfSourceMap])
|
||||
|
||||
You may pass an object with the following properties:
|
||||
|
||||
* `file`: The filename of the generated source that this source map is
|
||||
associated with.
|
||||
|
||||
* `sourceRoot`: A root for all relative URLs in this source map.
|
||||
|
||||
* `skipValidation`: Optional. When `true`, disables validation of mappings as
|
||||
they are added. This can improve performance but should be used with
|
||||
discretion, as a last resort. Even then, one should avoid using this flag when
|
||||
running tests, if possible.
|
||||
|
||||
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
|
||||
|
||||
Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
|
||||
* `sourceMapConsumer` The SourceMap.
|
||||
|
||||
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||
|
||||
Add a single mapping from original source line and column to the generated
|
||||
source's line and column for this source map being created. The mapping object
|
||||
should have the following properties:
|
||||
|
||||
* `generated`: An object with the generated line and column positions.
|
||||
|
||||
* `original`: An object with the original line and column positions.
|
||||
|
||||
* `source`: The original source file (relative to the sourceRoot).
|
||||
|
||||
* `name`: An optional original token name for this mapping.
|
||||
|
||||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for an original source file.
|
||||
|
||||
* `sourceFile` the URL of the original source file.
|
||||
|
||||
* `sourceContent` the content of the source file.
|
||||
|
||||
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||
|
||||
Applies a SourceMap for a source file to the SourceMap.
|
||||
Each mapping to the supplied source file is rewritten using the
|
||||
supplied SourceMap. Note: The resolution for the resulting mappings
|
||||
is the minimium of this map and the supplied map.
|
||||
|
||||
* `sourceMapConsumer`: The SourceMap to be applied.
|
||||
|
||||
* `sourceFile`: Optional. The filename of the source file.
|
||||
If omitted, sourceMapConsumer.file will be used, if it exists.
|
||||
Otherwise an error will be thrown.
|
||||
|
||||
* `sourceMapPath`: Optional. The dirname of the path to the SourceMap
|
||||
to be applied. If relative, it is relative to the SourceMap.
|
||||
|
||||
This parameter is needed when the two SourceMaps aren't in the same
|
||||
directory, and the SourceMap to be applied contains relative source
|
||||
paths. If so, those relative source paths need to be rewritten
|
||||
relative to the SourceMap.
|
||||
|
||||
If omitted, it is assumed that both SourceMaps are in the same directory,
|
||||
thus not needing any rewriting. (Supplying `'.'` has the same effect.)
|
||||
|
||||
#### SourceMapGenerator.prototype.toString()
|
||||
|
||||
Renders the source map being generated to a string.
|
||||
|
||||
### SourceNode
|
||||
|
||||
SourceNodes provide a way to abstract over interpolating and/or concatenating
|
||||
snippets of generated JavaScript source code, while maintaining the line and
|
||||
column information associated between those snippets and the original source
|
||||
code. This is useful as the final intermediate representation a compiler might
|
||||
use before outputting the generated JS and source map.
|
||||
|
||||
#### new SourceNode([line, column, source[, chunk[, name]]])
|
||||
|
||||
* `line`: The original line number associated with this source node, or null if
|
||||
it isn't associated with an original line.
|
||||
|
||||
* `column`: The original column number associated with this source node, or null
|
||||
if it isn't associated with an original column.
|
||||
|
||||
* `source`: The original source's filename; null if no filename is provided.
|
||||
|
||||
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
|
||||
below.
|
||||
|
||||
* `name`: Optional. The original identifier.
|
||||
|
||||
#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])
|
||||
|
||||
Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
|
||||
* `code`: The generated code
|
||||
|
||||
* `sourceMapConsumer` The SourceMap for the generated code
|
||||
|
||||
* `relativePath` The optional path that relative sources in `sourceMapConsumer`
|
||||
should be relative to.
|
||||
|
||||
#### SourceNode.prototype.add(chunk)
|
||||
|
||||
Add a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.prepend(chunk)
|
||||
|
||||
Prepend a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for a source file. This will be added to the
|
||||
`SourceMap` in the `sourcesContent` field.
|
||||
|
||||
* `sourceFile`: The filename of the source file
|
||||
|
||||
* `sourceContent`: The content of the source file
|
||||
|
||||
#### SourceNode.prototype.walk(fn)
|
||||
|
||||
Walk over the tree of JS snippets in this node and its children. The walking
|
||||
function is called once for each snippet of JS and is passed that snippet and
|
||||
the its original associated source's line/column location.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
#### SourceNode.prototype.walkSourceContents(fn)
|
||||
|
||||
Walk over the tree of SourceNodes. The walking function is called for each
|
||||
source file content and is passed the filename and source content.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
#### SourceNode.prototype.join(sep)
|
||||
|
||||
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
|
||||
between each of this source node's children.
|
||||
|
||||
* `sep`: The separator.
|
||||
|
||||
#### SourceNode.prototype.replaceRight(pattern, replacement)
|
||||
|
||||
Call `String.prototype.replace` on the very right-most source snippet. Useful
|
||||
for trimming whitespace from the end of a source node, etc.
|
||||
|
||||
* `pattern`: The pattern to replace.
|
||||
|
||||
* `replacement`: The thing to replace the pattern with.
|
||||
|
||||
#### SourceNode.prototype.toString()
|
||||
|
||||
Return the string representation of this source node. Walks over the tree and
|
||||
concatenates all the various snippets together to one string.
|
||||
|
||||
#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])
|
||||
|
||||
Returns the string representation of this tree of source nodes, plus a
|
||||
SourceMapGenerator which contains all the mappings between the generated and
|
||||
original sources.
|
||||
|
||||
The arguments are the same as those to `new SourceMapGenerator`.
|
||||
|
||||
## Tests
|
||||
|
||||
[](https://travis-ci.org/mozilla/source-map)
|
||||
|
||||
Install NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.
|
||||
|
||||
To add new tests, create a new file named `test/test-<your new test name>.js`
|
||||
and export your test functions with names that start with "test", for example
|
||||
|
||||
exports["test doing the foo bar"] = function (assert, util) {
|
||||
...
|
||||
};
|
||||
|
||||
The new test will be located automatically when you run the suite.
|
||||
|
||||
The `util` argument is the test utility module located at `test/source-map/util`.
|
||||
|
||||
The `assert` argument is a cut down version of node's assert module. You have
|
||||
access to the following assertion functions:
|
||||
|
||||
* `doesNotThrow`
|
||||
|
||||
* `equal`
|
||||
|
||||
* `ok`
|
||||
|
||||
* `strictEqual`
|
||||
|
||||
* `throws`
|
||||
|
||||
(The reason for the restricted set of test functions is because we need the
|
||||
tests to run inside Firefox's test suite as well and so the assert module is
|
||||
shimmed in that environment. See `build/assert-shim.js`.)
|
||||
|
||||
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap
|
||||
[Dryice]: https://github.com/mozilla/dryice
|
||||
56
node_modules/transformers/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
56
node_modules/transformers/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
define('test/source-map/assert', ['exports'], function (exports) {
|
||||
|
||||
let do_throw = function (msg) {
|
||||
throw new Error(msg);
|
||||
};
|
||||
|
||||
exports.init = function (throw_fn) {
|
||||
do_throw = throw_fn;
|
||||
};
|
||||
|
||||
exports.doesNotThrow = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
}
|
||||
catch (e) {
|
||||
do_throw(e.message);
|
||||
}
|
||||
};
|
||||
|
||||
exports.equal = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' != ' + String(expected);
|
||||
if (actual != expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.ok = function (val, msg) {
|
||||
msg = msg || String(val) + ' is falsey';
|
||||
if (!Boolean(val)) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.strictEqual = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' !== ' + String(expected);
|
||||
if (actual !== expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.throws = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
do_throw('Expected an error to be thrown, but it wasn\'t.');
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
};
|
||||
|
||||
});
|
||||
152
node_modules/transformers/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
152
node_modules/transformers/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* Define a module along with a payload.
|
||||
* @param {string} moduleName Name for the payload
|
||||
* @param {ignored} deps Ignored. For compatibility with CommonJS AMD Spec
|
||||
* @param {function} payload Function with (require, exports, module) params
|
||||
*/
|
||||
function define(moduleName, deps, payload) {
|
||||
if (typeof moduleName != "string") {
|
||||
throw new TypeError('Expected string, got: ' + moduleName);
|
||||
}
|
||||
|
||||
if (arguments.length == 2) {
|
||||
payload = deps;
|
||||
}
|
||||
|
||||
if (moduleName in define.modules) {
|
||||
throw new Error("Module already defined: " + moduleName);
|
||||
}
|
||||
define.modules[moduleName] = payload;
|
||||
};
|
||||
|
||||
/**
|
||||
* The global store of un-instantiated modules
|
||||
*/
|
||||
define.modules = {};
|
||||
|
||||
|
||||
/**
|
||||
* We invoke require() in the context of a Domain so we can have multiple
|
||||
* sets of modules running separate from each other.
|
||||
* This contrasts with JSMs which are singletons, Domains allows us to
|
||||
* optionally load a CommonJS module twice with separate data each time.
|
||||
* Perhaps you want 2 command lines with a different set of commands in each,
|
||||
* for example.
|
||||
*/
|
||||
function Domain() {
|
||||
this.modules = {};
|
||||
this._currentModule = null;
|
||||
}
|
||||
|
||||
(function () {
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* There are 2 ways to call this, either with an array of dependencies and a
|
||||
* callback to call when the dependencies are found (which can happen
|
||||
* asynchronously in an in-page context) or with a single string an no callback
|
||||
* where the dependency is resolved synchronously and returned.
|
||||
* The API is designed to be compatible with the CommonJS AMD spec and
|
||||
* RequireJS.
|
||||
* @param {string[]|string} deps A name, or names for the payload
|
||||
* @param {function|undefined} callback Function to call when the dependencies
|
||||
* are resolved
|
||||
* @return {undefined|object} The module required or undefined for
|
||||
* array/callback method
|
||||
*/
|
||||
Domain.prototype.require = function(deps, callback) {
|
||||
if (Array.isArray(deps)) {
|
||||
var params = deps.map(function(dep) {
|
||||
return this.lookup(dep);
|
||||
}, this);
|
||||
if (callback) {
|
||||
callback.apply(null, params);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
else {
|
||||
return this.lookup(deps);
|
||||
}
|
||||
};
|
||||
|
||||
function normalize(path) {
|
||||
var bits = path.split('/');
|
||||
var i = 1;
|
||||
while (i < bits.length) {
|
||||
if (bits[i] === '..') {
|
||||
bits.splice(i-1, 1);
|
||||
} else if (bits[i] === '.') {
|
||||
bits.splice(i, 1);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
function join(a, b) {
|
||||
a = a.trim();
|
||||
b = b.trim();
|
||||
if (/^\//.test(b)) {
|
||||
return b;
|
||||
} else {
|
||||
return a.replace(/\/*$/, '/') + b;
|
||||
}
|
||||
}
|
||||
|
||||
function dirname(path) {
|
||||
var bits = path.split('/');
|
||||
bits.pop();
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* @param {string} moduleName A name for the payload to lookup
|
||||
* @return {object} The module specified by aModuleName or null if not found.
|
||||
*/
|
||||
Domain.prototype.lookup = function(moduleName) {
|
||||
if (/^\./.test(moduleName)) {
|
||||
moduleName = normalize(join(dirname(this._currentModule), moduleName));
|
||||
}
|
||||
|
||||
if (moduleName in this.modules) {
|
||||
var module = this.modules[moduleName];
|
||||
return module;
|
||||
}
|
||||
|
||||
if (!(moduleName in define.modules)) {
|
||||
throw new Error("Module not defined: " + moduleName);
|
||||
}
|
||||
|
||||
var module = define.modules[moduleName];
|
||||
|
||||
if (typeof module == "function") {
|
||||
var exports = {};
|
||||
var previousModule = this._currentModule;
|
||||
this._currentModule = moduleName;
|
||||
module(this.require.bind(this), exports, { id: moduleName, uri: "" });
|
||||
this._currentModule = previousModule;
|
||||
module = exports;
|
||||
}
|
||||
|
||||
// cache the resulting module object for next time
|
||||
this.modules[moduleName] = module;
|
||||
|
||||
return module;
|
||||
};
|
||||
|
||||
}());
|
||||
|
||||
define.Domain = Domain;
|
||||
define.globalDomain = new Domain();
|
||||
var require = define.globalDomain.require.bind(define.globalDomain);
|
||||
20
node_modules/transformers/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
20
node_modules/transformers/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "SourceMapConsumer", "SourceMapGenerator", "SourceNode" ];
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||
18
node_modules/transformers/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
18
node_modules/transformers/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||
Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];
|
||||
8
node_modules/transformers/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
8
node_modules/transformers/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
this.sourceMap = {
|
||||
SourceMapConsumer: require('source-map/source-map-consumer').SourceMapConsumer,
|
||||
SourceMapGenerator: require('source-map/source-map-generator').SourceMapGenerator,
|
||||
SourceNode: require('source-map/source-node').SourceNode
|
||||
};
|
||||
6
node_modules/transformers/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
6
node_modules/transformers/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
this.SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||
this.SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||
this.SourceNode = require('source-map/source-node').SourceNode;
|
||||
21
node_modules/transformers/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
21
node_modules/transformers/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
function runSourceMapTests(modName, do_throw) {
|
||||
let mod = require(modName);
|
||||
let assert = require('test/source-map/assert');
|
||||
let util = require('test/source-map/util');
|
||||
|
||||
assert.init(do_throw);
|
||||
|
||||
for (let k in mod) {
|
||||
if (/^test/.test(k)) {
|
||||
mod[k](assert, util);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
this.runSourceMapTests = runSourceMapTests;
|
||||
8
node_modules/transformers/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
8
node_modules/transformers/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://test/Utils.jsm');
|
||||
3
node_modules/transformers/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
3
node_modules/transformers/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
function run_test() {
|
||||
runSourceMapTests('{THIS_MODULE}', do_throw);
|
||||
}
|
||||
8
node_modules/transformers/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
8
node_modules/transformers/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
* Copyright 2009-2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE.txt or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
exports.SourceMapGenerator = require('./source-map/source-map-generator').SourceMapGenerator;
|
||||
exports.SourceMapConsumer = require('./source-map/source-map-consumer').SourceMapConsumer;
|
||||
exports.SourceNode = require('./source-map/source-node').SourceNode;
|
||||
97
node_modules/transformers/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
97
node_modules/transformers/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
/**
|
||||
* A data structure which is a combination of an array and a set. Adding a new
|
||||
* member is O(1), testing for membership is O(1), and finding the index of an
|
||||
* element is O(1). Removing elements from the set is not supported. Only
|
||||
* strings are supported for membership.
|
||||
*/
|
||||
function ArraySet() {
|
||||
this._array = [];
|
||||
this._set = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method for creating ArraySet instances from an existing array.
|
||||
*/
|
||||
ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0, len = aArray.length; i < len; i++) {
|
||||
set.add(aArray[i], aAllowDuplicates);
|
||||
}
|
||||
return set;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add the given string to this set.
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {
|
||||
var isDuplicate = this.has(aStr);
|
||||
var idx = this._array.length;
|
||||
if (!isDuplicate || aAllowDuplicates) {
|
||||
this._array.push(aStr);
|
||||
}
|
||||
if (!isDuplicate) {
|
||||
this._set[util.toSetString(aStr)] = idx;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Is the given string a member of this set?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.has = function ArraySet_has(aStr) {
|
||||
return Object.prototype.hasOwnProperty.call(this._set,
|
||||
util.toSetString(aStr));
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the index of the given string in the array?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
|
||||
if (this.has(aStr)) {
|
||||
return this._set[util.toSetString(aStr)];
|
||||
}
|
||||
throw new Error('"' + aStr + '" is not in the set.');
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the element at the given index?
|
||||
*
|
||||
* @param Number aIdx
|
||||
*/
|
||||
ArraySet.prototype.at = function ArraySet_at(aIdx) {
|
||||
if (aIdx >= 0 && aIdx < this._array.length) {
|
||||
return this._array[aIdx];
|
||||
}
|
||||
throw new Error('No element indexed by ' + aIdx);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the array representation of this set (which has the proper indices
|
||||
* indicated by indexOf). Note that this is a copy of the internal array used
|
||||
* for storing the members so that no one can mess with internal state.
|
||||
*/
|
||||
ArraySet.prototype.toArray = function ArraySet_toArray() {
|
||||
return this._array.slice();
|
||||
};
|
||||
|
||||
exports.ArraySet = ArraySet;
|
||||
|
||||
});
|
||||
142
node_modules/transformers/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
142
node_modules/transformers/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*
|
||||
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
||||
*
|
||||
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('./base64');
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the
|
||||
// continuation bit. The continuation bit tells us whether there are more
|
||||
// digits in this value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
|
||||
var VLQ_BASE_SHIFT = 5;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||
|
||||
// binary: 011111
|
||||
var VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||
|
||||
/**
|
||||
* Converts from a two-complement value to a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||
*/
|
||||
function toVLQSigned(aValue) {
|
||||
return aValue < 0
|
||||
? ((-aValue) << 1) + 1
|
||||
: (aValue << 1) + 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts to a two-complement value from a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
||||
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
||||
*/
|
||||
function fromVLQSigned(aValue) {
|
||||
var isNegative = (aValue & 1) === 1;
|
||||
var shifted = aValue >> 1;
|
||||
return isNegative
|
||||
? -shifted
|
||||
: shifted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base 64 VLQ encoded value.
|
||||
*/
|
||||
exports.encode = function base64VLQ_encode(aValue) {
|
||||
var encoded = "";
|
||||
var digit;
|
||||
|
||||
var vlq = toVLQSigned(aValue);
|
||||
|
||||
do {
|
||||
digit = vlq & VLQ_BASE_MASK;
|
||||
vlq >>>= VLQ_BASE_SHIFT;
|
||||
if (vlq > 0) {
|
||||
// There are still more digits in this value, so we must make sure the
|
||||
// continuation bit is marked.
|
||||
digit |= VLQ_CONTINUATION_BIT;
|
||||
}
|
||||
encoded += base64.encode(digit);
|
||||
} while (vlq > 0);
|
||||
|
||||
return encoded;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes the next base 64 VLQ value from the given string and returns the
|
||||
* value and the rest of the string via the out parameter.
|
||||
*/
|
||||
exports.decode = function base64VLQ_decode(aStr, aOutParam) {
|
||||
var i = 0;
|
||||
var strLen = aStr.length;
|
||||
var result = 0;
|
||||
var shift = 0;
|
||||
var continuation, digit;
|
||||
|
||||
do {
|
||||
if (i >= strLen) {
|
||||
throw new Error("Expected more digits in base 64 VLQ value.");
|
||||
}
|
||||
digit = base64.decode(aStr.charAt(i++));
|
||||
continuation = !!(digit & VLQ_CONTINUATION_BIT);
|
||||
digit &= VLQ_BASE_MASK;
|
||||
result = result + (digit << shift);
|
||||
shift += VLQ_BASE_SHIFT;
|
||||
} while (continuation);
|
||||
|
||||
aOutParam.value = fromVLQSigned(result);
|
||||
aOutParam.rest = aStr.slice(i);
|
||||
};
|
||||
|
||||
});
|
||||
42
node_modules/transformers/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
42
node_modules/transformers/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var charToIntMap = {};
|
||||
var intToCharMap = {};
|
||||
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||
.split('')
|
||||
.forEach(function (ch, index) {
|
||||
charToIntMap[ch] = index;
|
||||
intToCharMap[index] = ch;
|
||||
});
|
||||
|
||||
/**
|
||||
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
||||
*/
|
||||
exports.encode = function base64_encode(aNumber) {
|
||||
if (aNumber in intToCharMap) {
|
||||
return intToCharMap[aNumber];
|
||||
}
|
||||
throw new TypeError("Must be between 0 and 63: " + aNumber);
|
||||
};
|
||||
|
||||
/**
|
||||
* Decode a single base 64 digit to an integer.
|
||||
*/
|
||||
exports.decode = function base64_decode(aChar) {
|
||||
if (aChar in charToIntMap) {
|
||||
return charToIntMap[aChar];
|
||||
}
|
||||
throw new TypeError("Not a valid base 64 digit: " + aChar);
|
||||
};
|
||||
|
||||
});
|
||||
80
node_modules/transformers/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
80
node_modules/transformers/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
/**
|
||||
* Recursive implementation of binary search.
|
||||
*
|
||||
* @param aLow Indices here and lower do not contain the needle.
|
||||
* @param aHigh Indices here and higher do not contain the needle.
|
||||
* @param aNeedle The element being searched for.
|
||||
* @param aHaystack The non-empty array being searched.
|
||||
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
||||
*/
|
||||
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare) {
|
||||
// This function terminates when one of the following is true:
|
||||
//
|
||||
// 1. We find the exact element we are looking for.
|
||||
//
|
||||
// 2. We did not find the exact element, but we can return the index of
|
||||
// the next closest element that is less than that element.
|
||||
//
|
||||
// 3. We did not find the exact element, and there is no next-closest
|
||||
// element which is less than the one we are searching for, so we
|
||||
// return -1.
|
||||
var mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
||||
var cmp = aCompare(aNeedle, aHaystack[mid], true);
|
||||
if (cmp === 0) {
|
||||
// Found the element we are looking for.
|
||||
return mid;
|
||||
}
|
||||
else if (cmp > 0) {
|
||||
// aHaystack[mid] is greater than our needle.
|
||||
if (aHigh - mid > 1) {
|
||||
// The element is in the upper half.
|
||||
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare);
|
||||
}
|
||||
// We did not find an exact match, return the next closest one
|
||||
// (termination case 2).
|
||||
return mid;
|
||||
}
|
||||
else {
|
||||
// aHaystack[mid] is less than our needle.
|
||||
if (mid - aLow > 1) {
|
||||
// The element is in the lower half.
|
||||
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare);
|
||||
}
|
||||
// The exact needle element was not found in this haystack. Determine if
|
||||
// we are in termination case (2) or (3) and return the appropriate thing.
|
||||
return aLow < 0 ? -1 : aLow;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This is an implementation of binary search which will always try and return
|
||||
* the index of next lowest value checked if there is no exact hit. This is
|
||||
* because mappings between original and generated line/col pairs are single
|
||||
* points, and there is an implicit region between each of them, so a miss
|
||||
* just means that you aren't on the very start of a region.
|
||||
*
|
||||
* @param aNeedle The element you are looking for.
|
||||
* @param aHaystack The array that is being searched.
|
||||
* @param aCompare A function which takes the needle and an element in the
|
||||
* array and returns -1, 0, or 1 depending on whether the needle is less
|
||||
* than, equal to, or greater than the element, respectively.
|
||||
*/
|
||||
exports.search = function search(aNeedle, aHaystack, aCompare) {
|
||||
if (aHaystack.length === 0) {
|
||||
return -1;
|
||||
}
|
||||
return recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare)
|
||||
};
|
||||
|
||||
});
|
||||
86
node_modules/transformers/node_modules/source-map/lib/source-map/mapping-list.js
generated
vendored
Normal file
86
node_modules/transformers/node_modules/source-map/lib/source-map/mapping-list.js
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2014 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
/**
|
||||
* Determine whether mappingB is after mappingA with respect to generated
|
||||
* position.
|
||||
*/
|
||||
function generatedPositionAfter(mappingA, mappingB) {
|
||||
// Optimized for most common case
|
||||
var lineA = mappingA.generatedLine;
|
||||
var lineB = mappingB.generatedLine;
|
||||
var columnA = mappingA.generatedColumn;
|
||||
var columnB = mappingB.generatedColumn;
|
||||
return lineB > lineA || lineB == lineA && columnB >= columnA ||
|
||||
util.compareByGeneratedPositions(mappingA, mappingB) <= 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* A data structure to provide a sorted view of accumulated mappings in a
|
||||
* performance conscious manner. It trades a neglibable overhead in general
|
||||
* case for a large speedup in case of mappings being added in order.
|
||||
*/
|
||||
function MappingList() {
|
||||
this._array = [];
|
||||
this._sorted = true;
|
||||
// Serves as infimum
|
||||
this._last = {generatedLine: -1, generatedColumn: 0};
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through internal items. This method takes the same arguments that
|
||||
* `Array.prototype.forEach` takes.
|
||||
*
|
||||
* NOTE: The order of the mappings is NOT guaranteed.
|
||||
*/
|
||||
MappingList.prototype.unsortedForEach =
|
||||
function MappingList_forEach(aCallback, aThisArg) {
|
||||
this._array.forEach(aCallback, aThisArg);
|
||||
};
|
||||
|
||||
/**
|
||||
* Add the given source mapping.
|
||||
*
|
||||
* @param Object aMapping
|
||||
*/
|
||||
MappingList.prototype.add = function MappingList_add(aMapping) {
|
||||
var mapping;
|
||||
if (generatedPositionAfter(this._last, aMapping)) {
|
||||
this._last = aMapping;
|
||||
this._array.push(aMapping);
|
||||
} else {
|
||||
this._sorted = false;
|
||||
this._array.push(aMapping);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the flat, sorted array of mappings. The mappings are sorted by
|
||||
* generated position.
|
||||
*
|
||||
* WARNING: This method returns internal data without copying, for
|
||||
* performance. The return value must NOT be mutated, and should be treated as
|
||||
* an immutable borrow. If you want to take ownership, you must make your own
|
||||
* copy.
|
||||
*/
|
||||
MappingList.prototype.toArray = function MappingList_toArray() {
|
||||
if (!this._sorted) {
|
||||
this._array.sort(util.compareByGeneratedPositions);
|
||||
this._sorted = true;
|
||||
}
|
||||
return this._array;
|
||||
};
|
||||
|
||||
exports.MappingList = MappingList;
|
||||
|
||||
});
|
||||
575
node_modules/transformers/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
575
node_modules/transformers/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
@@ -0,0 +1,575 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
var binarySearch = require('./binary-search');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
|
||||
/**
|
||||
* A SourceMapConsumer instance represents a parsed source map which we can
|
||||
* query for information about the original file positions by giving it a file
|
||||
* position in the generated source.
|
||||
*
|
||||
* The only parameter is the raw source map (either as a JSON string, or
|
||||
* already parsed to an object). According to the spec, source maps have the
|
||||
* following attributes:
|
||||
*
|
||||
* - version: Which version of the source map spec this map is following.
|
||||
* - sources: An array of URLs to the original source files.
|
||||
* - names: An array of identifiers which can be referrenced by individual mappings.
|
||||
* - sourceRoot: Optional. The URL root from which all sources are relative.
|
||||
* - sourcesContent: Optional. An array of contents of the original source files.
|
||||
* - mappings: A string of base64 VLQs which contain the actual mappings.
|
||||
* - file: Optional. The generated file this source map is associated with.
|
||||
*
|
||||
* Here is an example source map, taken from the source map spec[0]:
|
||||
*
|
||||
* {
|
||||
* version : 3,
|
||||
* file: "out.js",
|
||||
* sourceRoot : "",
|
||||
* sources: ["foo.js", "bar.js"],
|
||||
* names: ["src", "maps", "are", "fun"],
|
||||
* mappings: "AA,AB;;ABCDE;"
|
||||
* }
|
||||
*
|
||||
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#
|
||||
*/
|
||||
function SourceMapConsumer(aSourceMap) {
|
||||
var sourceMap = aSourceMap;
|
||||
if (typeof aSourceMap === 'string') {
|
||||
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||
}
|
||||
|
||||
var version = util.getArg(sourceMap, 'version');
|
||||
var sources = util.getArg(sourceMap, 'sources');
|
||||
// Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which
|
||||
// requires the array) to play nice here.
|
||||
var names = util.getArg(sourceMap, 'names', []);
|
||||
var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);
|
||||
var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);
|
||||
var mappings = util.getArg(sourceMap, 'mappings');
|
||||
var file = util.getArg(sourceMap, 'file', null);
|
||||
|
||||
// Once again, Sass deviates from the spec and supplies the version as a
|
||||
// string rather than a number, so we use loose equality checking here.
|
||||
if (version != this._version) {
|
||||
throw new Error('Unsupported version: ' + version);
|
||||
}
|
||||
|
||||
// Some source maps produce relative source paths like "./foo.js" instead of
|
||||
// "foo.js". Normalize these first so that future comparisons will succeed.
|
||||
// See bugzil.la/1090768.
|
||||
sources = sources.map(util.normalize);
|
||||
|
||||
// Pass `true` below to allow duplicate names and sources. While source maps
|
||||
// are intended to be compressed and deduplicated, the TypeScript compiler
|
||||
// sometimes generates source maps with duplicates in them. See Github issue
|
||||
// #72 and bugzil.la/889492.
|
||||
this._names = ArraySet.fromArray(names, true);
|
||||
this._sources = ArraySet.fromArray(sources, true);
|
||||
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this._mappings = mappings;
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a SourceMapConsumer from a SourceMapGenerator.
|
||||
*
|
||||
* @param SourceMapGenerator aSourceMap
|
||||
* The source map that will be consumed.
|
||||
* @returns SourceMapConsumer
|
||||
*/
|
||||
SourceMapConsumer.fromSourceMap =
|
||||
function SourceMapConsumer_fromSourceMap(aSourceMap) {
|
||||
var smc = Object.create(SourceMapConsumer.prototype);
|
||||
|
||||
smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);
|
||||
smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);
|
||||
smc.sourceRoot = aSourceMap._sourceRoot;
|
||||
smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),
|
||||
smc.sourceRoot);
|
||||
smc.file = aSourceMap._file;
|
||||
|
||||
smc.__generatedMappings = aSourceMap._mappings.toArray().slice();
|
||||
smc.__originalMappings = aSourceMap._mappings.toArray().slice()
|
||||
.sort(util.compareByOriginalPositions);
|
||||
|
||||
return smc;
|
||||
};
|
||||
|
||||
/**
|
||||
* The version of the source mapping spec that we are consuming.
|
||||
*/
|
||||
SourceMapConsumer.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* The list of original sources.
|
||||
*/
|
||||
Object.defineProperty(SourceMapConsumer.prototype, 'sources', {
|
||||
get: function () {
|
||||
return this._sources.toArray().map(function (s) {
|
||||
return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;
|
||||
}, this);
|
||||
}
|
||||
});
|
||||
|
||||
// `__generatedMappings` and `__originalMappings` are arrays that hold the
|
||||
// parsed mapping coordinates from the source map's "mappings" attribute. They
|
||||
// are lazily instantiated, accessed via the `_generatedMappings` and
|
||||
// `_originalMappings` getters respectively, and we only parse the mappings
|
||||
// and create these arrays once queried for a source location. We jump through
|
||||
// these hoops because there can be many thousands of mappings, and parsing
|
||||
// them is expensive, so we only want to do it if we must.
|
||||
//
|
||||
// Each object in the arrays is of the form:
|
||||
//
|
||||
// {
|
||||
// generatedLine: The line number in the generated code,
|
||||
// generatedColumn: The column number in the generated code,
|
||||
// source: The path to the original source file that generated this
|
||||
// chunk of code,
|
||||
// originalLine: The line number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// originalColumn: The column number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// name: The name of the original symbol which generated this chunk of
|
||||
// code.
|
||||
// }
|
||||
//
|
||||
// All properties except for `generatedLine` and `generatedColumn` can be
|
||||
// `null`.
|
||||
//
|
||||
// `_generatedMappings` is ordered by the generated positions.
|
||||
//
|
||||
// `_originalMappings` is ordered by the original positions.
|
||||
|
||||
SourceMapConsumer.prototype.__generatedMappings = null;
|
||||
Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {
|
||||
get: function () {
|
||||
if (!this.__generatedMappings) {
|
||||
this.__generatedMappings = [];
|
||||
this.__originalMappings = [];
|
||||
this._parseMappings(this._mappings, this.sourceRoot);
|
||||
}
|
||||
|
||||
return this.__generatedMappings;
|
||||
}
|
||||
});
|
||||
|
||||
SourceMapConsumer.prototype.__originalMappings = null;
|
||||
Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {
|
||||
get: function () {
|
||||
if (!this.__originalMappings) {
|
||||
this.__generatedMappings = [];
|
||||
this.__originalMappings = [];
|
||||
this._parseMappings(this._mappings, this.sourceRoot);
|
||||
}
|
||||
|
||||
return this.__originalMappings;
|
||||
}
|
||||
});
|
||||
|
||||
SourceMapConsumer.prototype._nextCharIsMappingSeparator =
|
||||
function SourceMapConsumer_nextCharIsMappingSeparator(aStr) {
|
||||
var c = aStr.charAt(0);
|
||||
return c === ";" || c === ",";
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse the mappings in a string in to a data structure which we can easily
|
||||
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||
* `this.__originalMappings` properties).
|
||||
*/
|
||||
SourceMapConsumer.prototype._parseMappings =
|
||||
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||
var generatedLine = 1;
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousSource = 0;
|
||||
var previousName = 0;
|
||||
var str = aStr;
|
||||
var temp = {};
|
||||
var mapping;
|
||||
|
||||
while (str.length > 0) {
|
||||
if (str.charAt(0) === ';') {
|
||||
generatedLine++;
|
||||
str = str.slice(1);
|
||||
previousGeneratedColumn = 0;
|
||||
}
|
||||
else if (str.charAt(0) === ',') {
|
||||
str = str.slice(1);
|
||||
}
|
||||
else {
|
||||
mapping = {};
|
||||
mapping.generatedLine = generatedLine;
|
||||
|
||||
// Generated column.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.generatedColumn = previousGeneratedColumn + temp.value;
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !this._nextCharIsMappingSeparator(str)) {
|
||||
// Original source.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.source = this._sources.at(previousSource + temp.value);
|
||||
previousSource += temp.value;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || this._nextCharIsMappingSeparator(str)) {
|
||||
throw new Error('Found a source, but no line and column');
|
||||
}
|
||||
|
||||
// Original line.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.originalLine = previousOriginalLine + temp.value;
|
||||
previousOriginalLine = mapping.originalLine;
|
||||
// Lines are stored 0-based
|
||||
mapping.originalLine += 1;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || this._nextCharIsMappingSeparator(str)) {
|
||||
throw new Error('Found a source and line, but no column');
|
||||
}
|
||||
|
||||
// Original column.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.originalColumn = previousOriginalColumn + temp.value;
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !this._nextCharIsMappingSeparator(str)) {
|
||||
// Original name.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.name = this._names.at(previousName + temp.value);
|
||||
previousName += temp.value;
|
||||
str = temp.rest;
|
||||
}
|
||||
}
|
||||
|
||||
this.__generatedMappings.push(mapping);
|
||||
if (typeof mapping.originalLine === 'number') {
|
||||
this.__originalMappings.push(mapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.__generatedMappings.sort(util.compareByGeneratedPositions);
|
||||
this.__originalMappings.sort(util.compareByOriginalPositions);
|
||||
};
|
||||
|
||||
/**
|
||||
* Find the mapping that best matches the hypothetical "needle" mapping that
|
||||
* we are searching for in the given "haystack" of mappings.
|
||||
*/
|
||||
SourceMapConsumer.prototype._findMapping =
|
||||
function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,
|
||||
aColumnName, aComparator) {
|
||||
// To return the position we are searching for, we must first find the
|
||||
// mapping for the given position and then return the opposite position it
|
||||
// points to. Because the mappings are sorted, we can use binary search to
|
||||
// find the best mapping.
|
||||
|
||||
if (aNeedle[aLineName] <= 0) {
|
||||
throw new TypeError('Line must be greater than or equal to 1, got '
|
||||
+ aNeedle[aLineName]);
|
||||
}
|
||||
if (aNeedle[aColumnName] < 0) {
|
||||
throw new TypeError('Column must be greater than or equal to 0, got '
|
||||
+ aNeedle[aColumnName]);
|
||||
}
|
||||
|
||||
return binarySearch.search(aNeedle, aMappings, aComparator);
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the last column for each generated mapping. The last column is
|
||||
* inclusive.
|
||||
*/
|
||||
SourceMapConsumer.prototype.computeColumnSpans =
|
||||
function SourceMapConsumer_computeColumnSpans() {
|
||||
for (var index = 0; index < this._generatedMappings.length; ++index) {
|
||||
var mapping = this._generatedMappings[index];
|
||||
|
||||
// Mappings do not contain a field for the last generated columnt. We
|
||||
// can come up with an optimistic estimate, however, by assuming that
|
||||
// mappings are contiguous (i.e. given two consecutive mappings, the
|
||||
// first mapping ends where the second one starts).
|
||||
if (index + 1 < this._generatedMappings.length) {
|
||||
var nextMapping = this._generatedMappings[index + 1];
|
||||
|
||||
if (mapping.generatedLine === nextMapping.generatedLine) {
|
||||
mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// The last mapping for each line spans the entire line.
|
||||
mapping.lastGeneratedColumn = Infinity;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source, line, and column information for the generated
|
||||
* source's line and column positions provided. The only argument is an object
|
||||
* with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source.
|
||||
* - column: The column number in the generated source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - source: The original source file, or null.
|
||||
* - line: The line number in the original source, or null.
|
||||
* - column: The column number in the original source, or null.
|
||||
* - name: The original identifier, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.originalPositionFor =
|
||||
function SourceMapConsumer_originalPositionFor(aArgs) {
|
||||
var needle = {
|
||||
generatedLine: util.getArg(aArgs, 'line'),
|
||||
generatedColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
var index = this._findMapping(needle,
|
||||
this._generatedMappings,
|
||||
"generatedLine",
|
||||
"generatedColumn",
|
||||
util.compareByGeneratedPositions);
|
||||
|
||||
if (index >= 0) {
|
||||
var mapping = this._generatedMappings[index];
|
||||
|
||||
if (mapping.generatedLine === needle.generatedLine) {
|
||||
var source = util.getArg(mapping, 'source', null);
|
||||
if (source != null && this.sourceRoot != null) {
|
||||
source = util.join(this.sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
line: util.getArg(mapping, 'originalLine', null),
|
||||
column: util.getArg(mapping, 'originalColumn', null),
|
||||
name: util.getArg(mapping, 'name', null)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source content. The only argument is the url of the
|
||||
* original source file. Returns null if no original source content is
|
||||
* availible.
|
||||
*/
|
||||
SourceMapConsumer.prototype.sourceContentFor =
|
||||
function SourceMapConsumer_sourceContentFor(aSource) {
|
||||
if (!this.sourcesContent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.sourceRoot != null) {
|
||||
aSource = util.relative(this.sourceRoot, aSource);
|
||||
}
|
||||
|
||||
if (this._sources.has(aSource)) {
|
||||
return this.sourcesContent[this._sources.indexOf(aSource)];
|
||||
}
|
||||
|
||||
var url;
|
||||
if (this.sourceRoot != null
|
||||
&& (url = util.urlParse(this.sourceRoot))) {
|
||||
// XXX: file:// URIs and absolute paths lead to unexpected behavior for
|
||||
// many users. We can help them out when they expect file:// URIs to
|
||||
// behave like it would if they were running a local HTTP server. See
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=885597.
|
||||
var fileUriAbsPath = aSource.replace(/^file:\/\//, "");
|
||||
if (url.scheme == "file"
|
||||
&& this._sources.has(fileUriAbsPath)) {
|
||||
return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]
|
||||
}
|
||||
|
||||
if ((!url.path || url.path == "/")
|
||||
&& this._sources.has("/" + aSource)) {
|
||||
return this.sourcesContent[this._sources.indexOf("/" + aSource)];
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the generated line and column information for the original source,
|
||||
* line, and column positions provided. The only argument is an object with
|
||||
* the following properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
* - column: The column number in the original source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.generatedPositionFor =
|
||||
function SourceMapConsumer_generatedPositionFor(aArgs) {
|
||||
var needle = {
|
||||
source: util.getArg(aArgs, 'source'),
|
||||
originalLine: util.getArg(aArgs, 'line'),
|
||||
originalColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
if (this.sourceRoot != null) {
|
||||
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||
}
|
||||
|
||||
var index = this._findMapping(needle,
|
||||
this._originalMappings,
|
||||
"originalLine",
|
||||
"originalColumn",
|
||||
util.compareByOriginalPositions);
|
||||
|
||||
if (index >= 0) {
|
||||
var mapping = this._originalMappings[index];
|
||||
|
||||
return {
|
||||
line: util.getArg(mapping, 'generatedLine', null),
|
||||
column: util.getArg(mapping, 'generatedColumn', null),
|
||||
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
line: null,
|
||||
column: null,
|
||||
lastColumn: null
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns all generated line and column information for the original source
|
||||
* and line provided. The only argument is an object with the following
|
||||
* properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
*
|
||||
* and an array of objects is returned, each with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.allGeneratedPositionsFor =
|
||||
function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {
|
||||
// When there is no exact match, SourceMapConsumer.prototype._findMapping
|
||||
// returns the index of the closest mapping less than the needle. By
|
||||
// setting needle.originalColumn to Infinity, we thus find the last
|
||||
// mapping for the given line, provided such a mapping exists.
|
||||
var needle = {
|
||||
source: util.getArg(aArgs, 'source'),
|
||||
originalLine: util.getArg(aArgs, 'line'),
|
||||
originalColumn: Infinity
|
||||
};
|
||||
|
||||
if (this.sourceRoot != null) {
|
||||
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||
}
|
||||
|
||||
var mappings = [];
|
||||
|
||||
var index = this._findMapping(needle,
|
||||
this._originalMappings,
|
||||
"originalLine",
|
||||
"originalColumn",
|
||||
util.compareByOriginalPositions);
|
||||
if (index >= 0) {
|
||||
var mapping = this._originalMappings[index];
|
||||
|
||||
while (mapping && mapping.originalLine === needle.originalLine) {
|
||||
mappings.push({
|
||||
line: util.getArg(mapping, 'generatedLine', null),
|
||||
column: util.getArg(mapping, 'generatedColumn', null),
|
||||
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
||||
});
|
||||
|
||||
mapping = this._originalMappings[--index];
|
||||
}
|
||||
}
|
||||
|
||||
return mappings.reverse();
|
||||
};
|
||||
|
||||
SourceMapConsumer.GENERATED_ORDER = 1;
|
||||
SourceMapConsumer.ORIGINAL_ORDER = 2;
|
||||
|
||||
/**
|
||||
* Iterate over each mapping between an original source/line/column and a
|
||||
* generated line/column in this source map.
|
||||
*
|
||||
* @param Function aCallback
|
||||
* The function that is called with each mapping.
|
||||
* @param Object aContext
|
||||
* Optional. If specified, this object will be the value of `this` every
|
||||
* time that `aCallback` is called.
|
||||
* @param aOrder
|
||||
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
||||
* iterate over the mappings sorted by the generated file's line/column
|
||||
* order or the original's source/line/column order, respectively. Defaults to
|
||||
* `SourceMapConsumer.GENERATED_ORDER`.
|
||||
*/
|
||||
SourceMapConsumer.prototype.eachMapping =
|
||||
function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {
|
||||
var context = aContext || null;
|
||||
var order = aOrder || SourceMapConsumer.GENERATED_ORDER;
|
||||
|
||||
var mappings;
|
||||
switch (order) {
|
||||
case SourceMapConsumer.GENERATED_ORDER:
|
||||
mappings = this._generatedMappings;
|
||||
break;
|
||||
case SourceMapConsumer.ORIGINAL_ORDER:
|
||||
mappings = this._originalMappings;
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unknown order of iteration.");
|
||||
}
|
||||
|
||||
var sourceRoot = this.sourceRoot;
|
||||
mappings.map(function (mapping) {
|
||||
var source = mapping.source;
|
||||
if (source != null && sourceRoot != null) {
|
||||
source = util.join(sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
generatedLine: mapping.generatedLine,
|
||||
generatedColumn: mapping.generatedColumn,
|
||||
originalLine: mapping.originalLine,
|
||||
originalColumn: mapping.originalColumn,
|
||||
name: mapping.name
|
||||
};
|
||||
}).forEach(aCallback, context);
|
||||
};
|
||||
|
||||
exports.SourceMapConsumer = SourceMapConsumer;
|
||||
|
||||
});
|
||||
400
node_modules/transformers/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
400
node_modules/transformers/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
@@ -0,0 +1,400 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
var util = require('./util');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
var MappingList = require('./mapping-list').MappingList;
|
||||
|
||||
/**
|
||||
* An instance of the SourceMapGenerator represents a source map which is
|
||||
* being built incrementally. You may pass an object with the following
|
||||
* properties:
|
||||
*
|
||||
* - file: The filename of the generated source.
|
||||
* - sourceRoot: A root for all relative URLs in this source map.
|
||||
*/
|
||||
function SourceMapGenerator(aArgs) {
|
||||
if (!aArgs) {
|
||||
aArgs = {};
|
||||
}
|
||||
this._file = util.getArg(aArgs, 'file', null);
|
||||
this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
|
||||
this._skipValidation = util.getArg(aArgs, 'skipValidation', false);
|
||||
this._sources = new ArraySet();
|
||||
this._names = new ArraySet();
|
||||
this._mappings = new MappingList();
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
|
||||
SourceMapGenerator.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
*
|
||||
* @param aSourceMapConsumer The SourceMap.
|
||||
*/
|
||||
SourceMapGenerator.fromSourceMap =
|
||||
function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
|
||||
var sourceRoot = aSourceMapConsumer.sourceRoot;
|
||||
var generator = new SourceMapGenerator({
|
||||
file: aSourceMapConsumer.file,
|
||||
sourceRoot: sourceRoot
|
||||
});
|
||||
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||
var newMapping = {
|
||||
generated: {
|
||||
line: mapping.generatedLine,
|
||||
column: mapping.generatedColumn
|
||||
}
|
||||
};
|
||||
|
||||
if (mapping.source != null) {
|
||||
newMapping.source = mapping.source;
|
||||
if (sourceRoot != null) {
|
||||
newMapping.source = util.relative(sourceRoot, newMapping.source);
|
||||
}
|
||||
|
||||
newMapping.original = {
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
};
|
||||
|
||||
if (mapping.name != null) {
|
||||
newMapping.name = mapping.name;
|
||||
}
|
||||
}
|
||||
|
||||
generator.addMapping(newMapping);
|
||||
});
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
generator.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
return generator;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a single mapping from original source line and column to the generated
|
||||
* source's line and column for this source map being created. The mapping
|
||||
* object should have the following properties:
|
||||
*
|
||||
* - generated: An object with the generated line and column positions.
|
||||
* - original: An object with the original line and column positions.
|
||||
* - source: The original source file (relative to the sourceRoot).
|
||||
* - name: An optional original token name for this mapping.
|
||||
*/
|
||||
SourceMapGenerator.prototype.addMapping =
|
||||
function SourceMapGenerator_addMapping(aArgs) {
|
||||
var generated = util.getArg(aArgs, 'generated');
|
||||
var original = util.getArg(aArgs, 'original', null);
|
||||
var source = util.getArg(aArgs, 'source', null);
|
||||
var name = util.getArg(aArgs, 'name', null);
|
||||
|
||||
if (!this._skipValidation) {
|
||||
this._validateMapping(generated, original, source, name);
|
||||
}
|
||||
|
||||
if (source != null && !this._sources.has(source)) {
|
||||
this._sources.add(source);
|
||||
}
|
||||
|
||||
if (name != null && !this._names.has(name)) {
|
||||
this._names.add(name);
|
||||
}
|
||||
|
||||
this._mappings.add({
|
||||
generatedLine: generated.line,
|
||||
generatedColumn: generated.column,
|
||||
originalLine: original != null && original.line,
|
||||
originalColumn: original != null && original.column,
|
||||
source: source,
|
||||
name: name
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the source content for a source file.
|
||||
*/
|
||||
SourceMapGenerator.prototype.setSourceContent =
|
||||
function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
|
||||
var source = aSourceFile;
|
||||
if (this._sourceRoot != null) {
|
||||
source = util.relative(this._sourceRoot, source);
|
||||
}
|
||||
|
||||
if (aSourceContent != null) {
|
||||
// Add the source content to the _sourcesContents map.
|
||||
// Create a new _sourcesContents map if the property is null.
|
||||
if (!this._sourcesContents) {
|
||||
this._sourcesContents = {};
|
||||
}
|
||||
this._sourcesContents[util.toSetString(source)] = aSourceContent;
|
||||
} else if (this._sourcesContents) {
|
||||
// Remove the source file from the _sourcesContents map.
|
||||
// If the _sourcesContents map is empty, set the property to null.
|
||||
delete this._sourcesContents[util.toSetString(source)];
|
||||
if (Object.keys(this._sourcesContents).length === 0) {
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies the mappings of a sub-source-map for a specific source file to the
|
||||
* source map being generated. Each mapping to the supplied source file is
|
||||
* rewritten using the supplied source map. Note: The resolution for the
|
||||
* resulting mappings is the minimium of this map and the supplied map.
|
||||
*
|
||||
* @param aSourceMapConsumer The source map to be applied.
|
||||
* @param aSourceFile Optional. The filename of the source file.
|
||||
* If omitted, SourceMapConsumer's file property will be used.
|
||||
* @param aSourceMapPath Optional. The dirname of the path to the source map
|
||||
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
||||
* This parameter is needed when the two source maps aren't in the same
|
||||
* directory, and the source map to be applied contains relative source
|
||||
* paths. If so, those relative source paths need to be rewritten
|
||||
* relative to the SourceMapGenerator.
|
||||
*/
|
||||
SourceMapGenerator.prototype.applySourceMap =
|
||||
function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
|
||||
var sourceFile = aSourceFile;
|
||||
// If aSourceFile is omitted, we will use the file property of the SourceMap
|
||||
if (aSourceFile == null) {
|
||||
if (aSourceMapConsumer.file == null) {
|
||||
throw new Error(
|
||||
'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +
|
||||
'or the source map\'s "file" property. Both were omitted.'
|
||||
);
|
||||
}
|
||||
sourceFile = aSourceMapConsumer.file;
|
||||
}
|
||||
var sourceRoot = this._sourceRoot;
|
||||
// Make "sourceFile" relative if an absolute Url is passed.
|
||||
if (sourceRoot != null) {
|
||||
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
// Applying the SourceMap can add and remove items from the sources and
|
||||
// the names array.
|
||||
var newSources = new ArraySet();
|
||||
var newNames = new ArraySet();
|
||||
|
||||
// Find mappings for the "sourceFile"
|
||||
this._mappings.unsortedForEach(function (mapping) {
|
||||
if (mapping.source === sourceFile && mapping.originalLine != null) {
|
||||
// Check if it can be mapped by the source map, then update the mapping.
|
||||
var original = aSourceMapConsumer.originalPositionFor({
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
});
|
||||
if (original.source != null) {
|
||||
// Copy mapping
|
||||
mapping.source = original.source;
|
||||
if (aSourceMapPath != null) {
|
||||
mapping.source = util.join(aSourceMapPath, mapping.source)
|
||||
}
|
||||
if (sourceRoot != null) {
|
||||
mapping.source = util.relative(sourceRoot, mapping.source);
|
||||
}
|
||||
mapping.originalLine = original.line;
|
||||
mapping.originalColumn = original.column;
|
||||
if (original.name != null) {
|
||||
mapping.name = original.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var source = mapping.source;
|
||||
if (source != null && !newSources.has(source)) {
|
||||
newSources.add(source);
|
||||
}
|
||||
|
||||
var name = mapping.name;
|
||||
if (name != null && !newNames.has(name)) {
|
||||
newNames.add(name);
|
||||
}
|
||||
|
||||
}, this);
|
||||
this._sources = newSources;
|
||||
this._names = newNames;
|
||||
|
||||
// Copy sourcesContents of applied map.
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
if (aSourceMapPath != null) {
|
||||
sourceFile = util.join(aSourceMapPath, sourceFile);
|
||||
}
|
||||
if (sourceRoot != null) {
|
||||
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
this.setSourceContent(sourceFile, content);
|
||||
}
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* A mapping can have one of the three levels of data:
|
||||
*
|
||||
* 1. Just the generated position.
|
||||
* 2. The Generated position, original position, and original source.
|
||||
* 3. Generated and original position, original source, as well as a name
|
||||
* token.
|
||||
*
|
||||
* To maintain consistency, we validate that any new mapping being added falls
|
||||
* in to one of these categories.
|
||||
*/
|
||||
SourceMapGenerator.prototype._validateMapping =
|
||||
function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
|
||||
aName) {
|
||||
if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& !aOriginal && !aSource && !aName) {
|
||||
// Case 1.
|
||||
return;
|
||||
}
|
||||
else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aOriginal && 'line' in aOriginal && 'column' in aOriginal
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& aOriginal.line > 0 && aOriginal.column >= 0
|
||||
&& aSource) {
|
||||
// Cases 2 and 3.
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error('Invalid mapping: ' + JSON.stringify({
|
||||
generated: aGenerated,
|
||||
source: aSource,
|
||||
original: aOriginal,
|
||||
name: aName
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serialize the accumulated mappings in to the stream of base 64 VLQs
|
||||
* specified by the source map format.
|
||||
*/
|
||||
SourceMapGenerator.prototype._serializeMappings =
|
||||
function SourceMapGenerator_serializeMappings() {
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousGeneratedLine = 1;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousName = 0;
|
||||
var previousSource = 0;
|
||||
var result = '';
|
||||
var mapping;
|
||||
|
||||
var mappings = this._mappings.toArray();
|
||||
|
||||
for (var i = 0, len = mappings.length; i < len; i++) {
|
||||
mapping = mappings[i];
|
||||
|
||||
if (mapping.generatedLine !== previousGeneratedLine) {
|
||||
previousGeneratedColumn = 0;
|
||||
while (mapping.generatedLine !== previousGeneratedLine) {
|
||||
result += ';';
|
||||
previousGeneratedLine++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (i > 0) {
|
||||
if (!util.compareByGeneratedPositions(mapping, mappings[i - 1])) {
|
||||
continue;
|
||||
}
|
||||
result += ',';
|
||||
}
|
||||
}
|
||||
|
||||
result += base64VLQ.encode(mapping.generatedColumn
|
||||
- previousGeneratedColumn);
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
|
||||
if (mapping.source != null) {
|
||||
result += base64VLQ.encode(this._sources.indexOf(mapping.source)
|
||||
- previousSource);
|
||||
previousSource = this._sources.indexOf(mapping.source);
|
||||
|
||||
// lines are stored 0-based in SourceMap spec version 3
|
||||
result += base64VLQ.encode(mapping.originalLine - 1
|
||||
- previousOriginalLine);
|
||||
previousOriginalLine = mapping.originalLine - 1;
|
||||
|
||||
result += base64VLQ.encode(mapping.originalColumn
|
||||
- previousOriginalColumn);
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
|
||||
if (mapping.name != null) {
|
||||
result += base64VLQ.encode(this._names.indexOf(mapping.name)
|
||||
- previousName);
|
||||
previousName = this._names.indexOf(mapping.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
SourceMapGenerator.prototype._generateSourcesContent =
|
||||
function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {
|
||||
return aSources.map(function (source) {
|
||||
if (!this._sourcesContents) {
|
||||
return null;
|
||||
}
|
||||
if (aSourceRoot != null) {
|
||||
source = util.relative(aSourceRoot, source);
|
||||
}
|
||||
var key = util.toSetString(source);
|
||||
return Object.prototype.hasOwnProperty.call(this._sourcesContents,
|
||||
key)
|
||||
? this._sourcesContents[key]
|
||||
: null;
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Externalize the source map.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toJSON =
|
||||
function SourceMapGenerator_toJSON() {
|
||||
var map = {
|
||||
version: this._version,
|
||||
sources: this._sources.toArray(),
|
||||
names: this._names.toArray(),
|
||||
mappings: this._serializeMappings()
|
||||
};
|
||||
if (this._file != null) {
|
||||
map.file = this._file;
|
||||
}
|
||||
if (this._sourceRoot != null) {
|
||||
map.sourceRoot = this._sourceRoot;
|
||||
}
|
||||
if (this._sourcesContents) {
|
||||
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
|
||||
}
|
||||
|
||||
return map;
|
||||
};
|
||||
|
||||
/**
|
||||
* Render the source map being generated to a string.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toString =
|
||||
function SourceMapGenerator_toString() {
|
||||
return JSON.stringify(this);
|
||||
};
|
||||
|
||||
exports.SourceMapGenerator = SourceMapGenerator;
|
||||
|
||||
});
|
||||
414
node_modules/transformers/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
414
node_modules/transformers/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
@@ -0,0 +1,414 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;
|
||||
var util = require('./util');
|
||||
|
||||
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
|
||||
// operating systems these days (capturing the result).
|
||||
var REGEX_NEWLINE = /(\r?\n)/;
|
||||
|
||||
// Newline character code for charCodeAt() comparisons
|
||||
var NEWLINE_CODE = 10;
|
||||
|
||||
// Private symbol for identifying `SourceNode`s when multiple versions of
|
||||
// the source-map library are loaded. This MUST NOT CHANGE across
|
||||
// versions!
|
||||
var isSourceNode = "$$$isSourceNode$$$";
|
||||
|
||||
/**
|
||||
* SourceNodes provide a way to abstract over interpolating/concatenating
|
||||
* snippets of generated JavaScript source code while maintaining the line and
|
||||
* column information associated with the original source code.
|
||||
*
|
||||
* @param aLine The original line number.
|
||||
* @param aColumn The original column number.
|
||||
* @param aSource The original source's filename.
|
||||
* @param aChunks Optional. An array of strings which are snippets of
|
||||
* generated JS, or other SourceNodes.
|
||||
* @param aName The original identifier.
|
||||
*/
|
||||
function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
|
||||
this.children = [];
|
||||
this.sourceContents = {};
|
||||
this.line = aLine == null ? null : aLine;
|
||||
this.column = aColumn == null ? null : aColumn;
|
||||
this.source = aSource == null ? null : aSource;
|
||||
this.name = aName == null ? null : aName;
|
||||
this[isSourceNode] = true;
|
||||
if (aChunks != null) this.add(aChunks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
*
|
||||
* @param aGeneratedCode The generated code
|
||||
* @param aSourceMapConsumer The SourceMap for the generated code
|
||||
* @param aRelativePath Optional. The path that relative sources in the
|
||||
* SourceMapConsumer should be relative to.
|
||||
*/
|
||||
SourceNode.fromStringWithSourceMap =
|
||||
function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {
|
||||
// The SourceNode we want to fill with the generated code
|
||||
// and the SourceMap
|
||||
var node = new SourceNode();
|
||||
|
||||
// All even indices of this array are one line of the generated code,
|
||||
// while all odd indices are the newlines between two adjacent lines
|
||||
// (since `REGEX_NEWLINE` captures its match).
|
||||
// Processed fragments are removed from this array, by calling `shiftNextLine`.
|
||||
var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
|
||||
var shiftNextLine = function() {
|
||||
var lineContents = remainingLines.shift();
|
||||
// The last line of a file might not have a newline.
|
||||
var newLine = remainingLines.shift() || "";
|
||||
return lineContents + newLine;
|
||||
};
|
||||
|
||||
// We need to remember the position of "remainingLines"
|
||||
var lastGeneratedLine = 1, lastGeneratedColumn = 0;
|
||||
|
||||
// The generate SourceNodes we need a code range.
|
||||
// To extract it current and last mapping is used.
|
||||
// Here we store the last mapping.
|
||||
var lastMapping = null;
|
||||
|
||||
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||
if (lastMapping !== null) {
|
||||
// We add the code from "lastMapping" to "mapping":
|
||||
// First check if there is a new line in between.
|
||||
if (lastGeneratedLine < mapping.generatedLine) {
|
||||
var code = "";
|
||||
// Associate first line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
lastGeneratedColumn = 0;
|
||||
// The remaining code is added without mapping
|
||||
} else {
|
||||
// There is no new line in between.
|
||||
// Associate the code between "lastGeneratedColumn" and
|
||||
// "mapping.generatedColumn" with "lastMapping"
|
||||
var nextLine = remainingLines[0];
|
||||
var code = nextLine.substr(0, mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
remainingLines[0] = nextLine.substr(mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
addMappingWithCode(lastMapping, code);
|
||||
// No more remaining code, continue
|
||||
lastMapping = mapping;
|
||||
return;
|
||||
}
|
||||
}
|
||||
// We add the generated code until the first mapping
|
||||
// to the SourceNode without any mapping.
|
||||
// Each line is added as separate string.
|
||||
while (lastGeneratedLine < mapping.generatedLine) {
|
||||
node.add(shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
}
|
||||
if (lastGeneratedColumn < mapping.generatedColumn) {
|
||||
var nextLine = remainingLines[0];
|
||||
node.add(nextLine.substr(0, mapping.generatedColumn));
|
||||
remainingLines[0] = nextLine.substr(mapping.generatedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
}
|
||||
lastMapping = mapping;
|
||||
}, this);
|
||||
// We have processed all mappings.
|
||||
if (remainingLines.length > 0) {
|
||||
if (lastMapping) {
|
||||
// Associate the remaining code in the current line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
}
|
||||
// and add the remaining lines without any mapping
|
||||
node.add(remainingLines.join(""));
|
||||
}
|
||||
|
||||
// Copy sourcesContent into SourceNode
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
if (aRelativePath != null) {
|
||||
sourceFile = util.join(aRelativePath, sourceFile);
|
||||
}
|
||||
node.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
|
||||
return node;
|
||||
|
||||
function addMappingWithCode(mapping, code) {
|
||||
if (mapping === null || mapping.source === undefined) {
|
||||
node.add(code);
|
||||
} else {
|
||||
var source = aRelativePath
|
||||
? util.join(aRelativePath, mapping.source)
|
||||
: mapping.source;
|
||||
node.add(new SourceNode(mapping.originalLine,
|
||||
mapping.originalColumn,
|
||||
source,
|
||||
code,
|
||||
mapping.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.add = function SourceNode_add(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
aChunk.forEach(function (chunk) {
|
||||
this.add(chunk);
|
||||
}, this);
|
||||
}
|
||||
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||
if (aChunk) {
|
||||
this.children.push(aChunk);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to the beginning of this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
for (var i = aChunk.length-1; i >= 0; i--) {
|
||||
this.prepend(aChunk[i]);
|
||||
}
|
||||
}
|
||||
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||
this.children.unshift(aChunk);
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Walk over the tree of JS snippets in this node and its children. The
|
||||
* walking function is called once for each snippet of JS and is passed that
|
||||
* snippet and the its original associated source's line/column location.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
SourceNode.prototype.walk = function SourceNode_walk(aFn) {
|
||||
var chunk;
|
||||
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||
chunk = this.children[i];
|
||||
if (chunk[isSourceNode]) {
|
||||
chunk.walk(aFn);
|
||||
}
|
||||
else {
|
||||
if (chunk !== '') {
|
||||
aFn(chunk, { source: this.source,
|
||||
line: this.line,
|
||||
column: this.column,
|
||||
name: this.name });
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
|
||||
* each of `this.children`.
|
||||
*
|
||||
* @param aSep The separator.
|
||||
*/
|
||||
SourceNode.prototype.join = function SourceNode_join(aSep) {
|
||||
var newChildren;
|
||||
var i;
|
||||
var len = this.children.length;
|
||||
if (len > 0) {
|
||||
newChildren = [];
|
||||
for (i = 0; i < len-1; i++) {
|
||||
newChildren.push(this.children[i]);
|
||||
newChildren.push(aSep);
|
||||
}
|
||||
newChildren.push(this.children[i]);
|
||||
this.children = newChildren;
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Call String.prototype.replace on the very right-most source snippet. Useful
|
||||
* for trimming whitespace from the end of a source node, etc.
|
||||
*
|
||||
* @param aPattern The pattern to replace.
|
||||
* @param aReplacement The thing to replace the pattern with.
|
||||
*/
|
||||
SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
|
||||
var lastChild = this.children[this.children.length - 1];
|
||||
if (lastChild[isSourceNode]) {
|
||||
lastChild.replaceRight(aPattern, aReplacement);
|
||||
}
|
||||
else if (typeof lastChild === 'string') {
|
||||
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
|
||||
}
|
||||
else {
|
||||
this.children.push(''.replace(aPattern, aReplacement));
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the source content for a source file. This will be added to the SourceMapGenerator
|
||||
* in the sourcesContent field.
|
||||
*
|
||||
* @param aSourceFile The filename of the source file
|
||||
* @param aSourceContent The content of the source file
|
||||
*/
|
||||
SourceNode.prototype.setSourceContent =
|
||||
function SourceNode_setSourceContent(aSourceFile, aSourceContent) {
|
||||
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
|
||||
};
|
||||
|
||||
/**
|
||||
* Walk over the tree of SourceNodes. The walking function is called for each
|
||||
* source file content and is passed the filename and source content.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
SourceNode.prototype.walkSourceContents =
|
||||
function SourceNode_walkSourceContents(aFn) {
|
||||
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||
if (this.children[i][isSourceNode]) {
|
||||
this.children[i].walkSourceContents(aFn);
|
||||
}
|
||||
}
|
||||
|
||||
var sources = Object.keys(this.sourceContents);
|
||||
for (var i = 0, len = sources.length; i < len; i++) {
|
||||
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the string representation of this source node. Walks over the tree
|
||||
* and concatenates all the various snippets together to one string.
|
||||
*/
|
||||
SourceNode.prototype.toString = function SourceNode_toString() {
|
||||
var str = "";
|
||||
this.walk(function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the string representation of this source node along with a source
|
||||
* map.
|
||||
*/
|
||||
SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
|
||||
var generated = {
|
||||
code: "",
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var map = new SourceMapGenerator(aArgs);
|
||||
var sourceMappingActive = false;
|
||||
var lastOriginalSource = null;
|
||||
var lastOriginalLine = null;
|
||||
var lastOriginalColumn = null;
|
||||
var lastOriginalName = null;
|
||||
this.walk(function (chunk, original) {
|
||||
generated.code += chunk;
|
||||
if (original.source !== null
|
||||
&& original.line !== null
|
||||
&& original.column !== null) {
|
||||
if(lastOriginalSource !== original.source
|
||||
|| lastOriginalLine !== original.line
|
||||
|| lastOriginalColumn !== original.column
|
||||
|| lastOriginalName !== original.name) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
lastOriginalSource = original.source;
|
||||
lastOriginalLine = original.line;
|
||||
lastOriginalColumn = original.column;
|
||||
lastOriginalName = original.name;
|
||||
sourceMappingActive = true;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
}
|
||||
});
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
}
|
||||
for (var idx = 0, length = chunk.length; idx < length; idx++) {
|
||||
if (chunk.charCodeAt(idx) === NEWLINE_CODE) {
|
||||
generated.line++;
|
||||
generated.column = 0;
|
||||
// Mappings end at eol
|
||||
if (idx + 1 === length) {
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
} else {
|
||||
generated.column++;
|
||||
}
|
||||
}
|
||||
});
|
||||
this.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
map.setSourceContent(sourceFile, sourceContent);
|
||||
});
|
||||
|
||||
return { code: generated.code, map: map };
|
||||
};
|
||||
|
||||
exports.SourceNode = SourceNode;
|
||||
|
||||
});
|
||||
319
node_modules/transformers/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
319
node_modules/transformers/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
@@ -0,0 +1,319 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
/**
|
||||
* This is a helper function for getting values from parameter/options
|
||||
* objects.
|
||||
*
|
||||
* @param args The object we are extracting values from
|
||||
* @param name The name of the property we are getting.
|
||||
* @param defaultValue An optional value to return if the property is missing
|
||||
* from the object. If this is not specified and the property is missing, an
|
||||
* error will be thrown.
|
||||
*/
|
||||
function getArg(aArgs, aName, aDefaultValue) {
|
||||
if (aName in aArgs) {
|
||||
return aArgs[aName];
|
||||
} else if (arguments.length === 3) {
|
||||
return aDefaultValue;
|
||||
} else {
|
||||
throw new Error('"' + aName + '" is a required argument.');
|
||||
}
|
||||
}
|
||||
exports.getArg = getArg;
|
||||
|
||||
var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/;
|
||||
var dataUrlRegexp = /^data:.+\,.+$/;
|
||||
|
||||
function urlParse(aUrl) {
|
||||
var match = aUrl.match(urlRegexp);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
scheme: match[1],
|
||||
auth: match[2],
|
||||
host: match[3],
|
||||
port: match[4],
|
||||
path: match[5]
|
||||
};
|
||||
}
|
||||
exports.urlParse = urlParse;
|
||||
|
||||
function urlGenerate(aParsedUrl) {
|
||||
var url = '';
|
||||
if (aParsedUrl.scheme) {
|
||||
url += aParsedUrl.scheme + ':';
|
||||
}
|
||||
url += '//';
|
||||
if (aParsedUrl.auth) {
|
||||
url += aParsedUrl.auth + '@';
|
||||
}
|
||||
if (aParsedUrl.host) {
|
||||
url += aParsedUrl.host;
|
||||
}
|
||||
if (aParsedUrl.port) {
|
||||
url += ":" + aParsedUrl.port
|
||||
}
|
||||
if (aParsedUrl.path) {
|
||||
url += aParsedUrl.path;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
exports.urlGenerate = urlGenerate;
|
||||
|
||||
/**
|
||||
* Normalizes a path, or the path portion of a URL:
|
||||
*
|
||||
* - Replaces consequtive slashes with one slash.
|
||||
* - Removes unnecessary '.' parts.
|
||||
* - Removes unnecessary '<dir>/..' parts.
|
||||
*
|
||||
* Based on code in the Node.js 'path' core module.
|
||||
*
|
||||
* @param aPath The path or url to normalize.
|
||||
*/
|
||||
function normalize(aPath) {
|
||||
var path = aPath;
|
||||
var url = urlParse(aPath);
|
||||
if (url) {
|
||||
if (!url.path) {
|
||||
return aPath;
|
||||
}
|
||||
path = url.path;
|
||||
}
|
||||
var isAbsolute = (path.charAt(0) === '/');
|
||||
|
||||
var parts = path.split(/\/+/);
|
||||
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
|
||||
part = parts[i];
|
||||
if (part === '.') {
|
||||
parts.splice(i, 1);
|
||||
} else if (part === '..') {
|
||||
up++;
|
||||
} else if (up > 0) {
|
||||
if (part === '') {
|
||||
// The first part is blank if the path is absolute. Trying to go
|
||||
// above the root is a no-op. Therefore we can remove all '..' parts
|
||||
// directly after the root.
|
||||
parts.splice(i + 1, up);
|
||||
up = 0;
|
||||
} else {
|
||||
parts.splice(i, 2);
|
||||
up--;
|
||||
}
|
||||
}
|
||||
}
|
||||
path = parts.join('/');
|
||||
|
||||
if (path === '') {
|
||||
path = isAbsolute ? '/' : '.';
|
||||
}
|
||||
|
||||
if (url) {
|
||||
url.path = path;
|
||||
return urlGenerate(url);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
exports.normalize = normalize;
|
||||
|
||||
/**
|
||||
* Joins two paths/URLs.
|
||||
*
|
||||
* @param aRoot The root path or URL.
|
||||
* @param aPath The path or URL to be joined with the root.
|
||||
*
|
||||
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
||||
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
||||
* first.
|
||||
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
||||
* is updated with the result and aRoot is returned. Otherwise the result
|
||||
* is returned.
|
||||
* - If aPath is absolute, the result is aPath.
|
||||
* - Otherwise the two paths are joined with a slash.
|
||||
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
||||
*/
|
||||
function join(aRoot, aPath) {
|
||||
if (aRoot === "") {
|
||||
aRoot = ".";
|
||||
}
|
||||
if (aPath === "") {
|
||||
aPath = ".";
|
||||
}
|
||||
var aPathUrl = urlParse(aPath);
|
||||
var aRootUrl = urlParse(aRoot);
|
||||
if (aRootUrl) {
|
||||
aRoot = aRootUrl.path || '/';
|
||||
}
|
||||
|
||||
// `join(foo, '//www.example.org')`
|
||||
if (aPathUrl && !aPathUrl.scheme) {
|
||||
if (aRootUrl) {
|
||||
aPathUrl.scheme = aRootUrl.scheme;
|
||||
}
|
||||
return urlGenerate(aPathUrl);
|
||||
}
|
||||
|
||||
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
||||
return aPath;
|
||||
}
|
||||
|
||||
// `join('http://', 'www.example.com')`
|
||||
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
||||
aRootUrl.host = aPath;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
|
||||
var joined = aPath.charAt(0) === '/'
|
||||
? aPath
|
||||
: normalize(aRoot.replace(/\/+$/, '') + '/' + aPath);
|
||||
|
||||
if (aRootUrl) {
|
||||
aRootUrl.path = joined;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
return joined;
|
||||
}
|
||||
exports.join = join;
|
||||
|
||||
/**
|
||||
* Make a path relative to a URL or another path.
|
||||
*
|
||||
* @param aRoot The root path or URL.
|
||||
* @param aPath The path or URL to be made relative to aRoot.
|
||||
*/
|
||||
function relative(aRoot, aPath) {
|
||||
if (aRoot === "") {
|
||||
aRoot = ".";
|
||||
}
|
||||
|
||||
aRoot = aRoot.replace(/\/$/, '');
|
||||
|
||||
// XXX: It is possible to remove this block, and the tests still pass!
|
||||
var url = urlParse(aRoot);
|
||||
if (aPath.charAt(0) == "/" && url && url.path == "/") {
|
||||
return aPath.slice(1);
|
||||
}
|
||||
|
||||
return aPath.indexOf(aRoot + '/') === 0
|
||||
? aPath.substr(aRoot.length + 1)
|
||||
: aPath;
|
||||
}
|
||||
exports.relative = relative;
|
||||
|
||||
/**
|
||||
* Because behavior goes wacky when you set `__proto__` on objects, we
|
||||
* have to prefix all the strings in our set with an arbitrary character.
|
||||
*
|
||||
* See https://github.com/mozilla/source-map/pull/31 and
|
||||
* https://github.com/mozilla/source-map/issues/30
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
function toSetString(aStr) {
|
||||
return '$' + aStr;
|
||||
}
|
||||
exports.toSetString = toSetString;
|
||||
|
||||
function fromSetString(aStr) {
|
||||
return aStr.substr(1);
|
||||
}
|
||||
exports.fromSetString = fromSetString;
|
||||
|
||||
function strcmp(aStr1, aStr2) {
|
||||
var s1 = aStr1 || "";
|
||||
var s2 = aStr2 || "";
|
||||
return (s1 > s2) - (s1 < s2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the original positions are compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same original source/line/column, but different generated
|
||||
* line and column the same. Useful when searching for a mapping with a
|
||||
* stubbed out mapping.
|
||||
*/
|
||||
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
||||
var cmp;
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp || onlyCompareOriginal) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.name, mappingB.name);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
};
|
||||
exports.compareByOriginalPositions = compareByOriginalPositions;
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the generated positions are
|
||||
* compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same generated line and column, but different
|
||||
* source/name/original line and column the same. Useful when searching for a
|
||||
* mapping with a stubbed out mapping.
|
||||
*/
|
||||
function compareByGeneratedPositions(mappingA, mappingB, onlyCompareGenerated) {
|
||||
var cmp;
|
||||
|
||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
if (cmp || onlyCompareGenerated) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return strcmp(mappingA.name, mappingB.name);
|
||||
};
|
||||
exports.compareByGeneratedPositions = compareByGeneratedPositions;
|
||||
|
||||
});
|
||||
211
node_modules/transformers/node_modules/source-map/package.json
generated
vendored
Normal file
211
node_modules/transformers/node_modules/source-map/package.json
generated
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
{
|
||||
"_args": [
|
||||
[
|
||||
"source-map@~0.1.7",
|
||||
"/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers/node_modules/uglify-js"
|
||||
]
|
||||
],
|
||||
"_from": "source-map@>=0.1.7 <0.2.0",
|
||||
"_id": "source-map@0.1.43",
|
||||
"_inCache": true,
|
||||
"_installable": true,
|
||||
"_location": "/transformers/source-map",
|
||||
"_npmUser": {
|
||||
"email": "fitzgen@gmail.com",
|
||||
"name": "nickfitzgerald"
|
||||
},
|
||||
"_npmVersion": "1.4.9",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"name": "source-map",
|
||||
"raw": "source-map@~0.1.7",
|
||||
"rawSpec": "~0.1.7",
|
||||
"scope": null,
|
||||
"spec": ">=0.1.7 <0.2.0",
|
||||
"type": "range"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/transformers/uglify-js"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz",
|
||||
"_shasum": "c24bc146ca517c1471f5dacbe2571b2b7f9e3346",
|
||||
"_shrinkwrap": null,
|
||||
"_spec": "source-map@~0.1.7",
|
||||
"_where": "/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers/node_modules/uglify-js",
|
||||
"author": {
|
||||
"email": "nfitzgerald@mozilla.com",
|
||||
"name": "Nick Fitzgerald"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/mozilla/source-map/issues"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Evan Wallace",
|
||||
"email": "evan.exe@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Tobias Koppers",
|
||||
"email": "tobias.koppers@googlemail.com"
|
||||
},
|
||||
{
|
||||
"name": "Stephen Crane",
|
||||
"email": "scrane@mozilla.com"
|
||||
},
|
||||
{
|
||||
"name": "Ryan Seddon",
|
||||
"email": "seddon.ryan@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Miles Elam",
|
||||
"email": "miles.elam@deem.com"
|
||||
},
|
||||
{
|
||||
"name": "Mihai Bazon",
|
||||
"email": "mihai.bazon@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Michael Ficarra",
|
||||
"email": "github.public.email@michael.ficarra.me"
|
||||
},
|
||||
{
|
||||
"name": "Todd Wolfson",
|
||||
"email": "todd@twolfson.com"
|
||||
},
|
||||
{
|
||||
"name": "Alexander Solovyov",
|
||||
"email": "alexander@solovyov.net"
|
||||
},
|
||||
{
|
||||
"name": "Felix Gnass",
|
||||
"email": "fgnass@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Conrad Irwin",
|
||||
"email": "conrad.irwin@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "usrbincc",
|
||||
"email": "usrbincc@yahoo.com"
|
||||
},
|
||||
{
|
||||
"name": "David Glasser",
|
||||
"email": "glasser@davidglasser.net"
|
||||
},
|
||||
{
|
||||
"name": "Chase Douglas",
|
||||
"email": "chase@newrelic.com"
|
||||
},
|
||||
{
|
||||
"name": "Duncan Beevers",
|
||||
"email": "duncan@dweebd.com"
|
||||
},
|
||||
{
|
||||
"name": "Heather Arthur",
|
||||
"email": "fayearthur@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Hugh Kennedy",
|
||||
"email": "hughskennedy@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "David Glasser",
|
||||
"email": "glasser@davidglasser.net"
|
||||
},
|
||||
{
|
||||
"name": "Simon Lydell",
|
||||
"email": "simon.lydell@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Jmeas Smith",
|
||||
"email": "jellyes2@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Michael Z Goddard",
|
||||
"email": "mzgoddard@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "azu",
|
||||
"email": "azu@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "John Gozde",
|
||||
"email": "john@gozde.ca"
|
||||
},
|
||||
{
|
||||
"name": "Adam Kirkton",
|
||||
"email": "akirkton@truefitinnovation.com"
|
||||
},
|
||||
{
|
||||
"name": "Chris Montgomery",
|
||||
"email": "christopher.montgomery@dowjones.com"
|
||||
},
|
||||
{
|
||||
"name": "J. Ryan Stinnett",
|
||||
"email": "jryans@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Jack Herrington",
|
||||
"email": "jherrington@walmartlabs.com"
|
||||
},
|
||||
{
|
||||
"name": "Chris Truter",
|
||||
"email": "jeffpalentine@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Daniel Espeset",
|
||||
"email": "daniel@danielespeset.com"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"amdefine": ">=0.0.4"
|
||||
},
|
||||
"description": "Generates and consumes source maps",
|
||||
"devDependencies": {
|
||||
"dryice": ">=0.4.8"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"dist": {
|
||||
"shasum": "c24bc146ca517c1471f5dacbe2571b2b7f9e3346",
|
||||
"tarball": "http://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
},
|
||||
"homepage": "https://github.com/mozilla/source-map",
|
||||
"licenses": [
|
||||
{
|
||||
"type": "BSD",
|
||||
"url": "http://opensource.org/licenses/BSD-3-Clause"
|
||||
}
|
||||
],
|
||||
"main": "./lib/source-map.js",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "mozilla-devtools",
|
||||
"email": "mozilla-developer-tools@googlegroups.com"
|
||||
},
|
||||
{
|
||||
"name": "mozilla",
|
||||
"email": "dherman@mozilla.com"
|
||||
},
|
||||
{
|
||||
"name": "nickfitzgerald",
|
||||
"email": "fitzgen@gmail.com"
|
||||
}
|
||||
],
|
||||
"name": "source-map",
|
||||
"optionalDependencies": {},
|
||||
"readme": "ERROR: No README data found!",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+ssh://git@github.com/mozilla/source-map.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "node Makefile.dryice.js",
|
||||
"test": "node test/run-tests.js"
|
||||
},
|
||||
"version": "0.1.43"
|
||||
}
|
||||
62
node_modules/transformers/node_modules/source-map/test/run-tests.js
generated
vendored
Executable file
62
node_modules/transformers/node_modules/source-map/test/run-tests.js
generated
vendored
Executable file
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env node
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var assert = require('assert');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var util = require('./source-map/util');
|
||||
|
||||
function run(tests) {
|
||||
var total = 0;
|
||||
var passed = 0;
|
||||
|
||||
for (var i = 0; i < tests.length; i++) {
|
||||
for (var k in tests[i].testCase) {
|
||||
if (/^test/.test(k)) {
|
||||
total++;
|
||||
try {
|
||||
tests[i].testCase[k](assert, util);
|
||||
passed++;
|
||||
}
|
||||
catch (e) {
|
||||
console.log('FAILED ' + tests[i].name + ': ' + k + '!');
|
||||
console.log(e.stack);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('');
|
||||
console.log(passed + ' / ' + total + ' tests passed.');
|
||||
console.log('');
|
||||
|
||||
return total - passed;
|
||||
}
|
||||
|
||||
function isTestFile(f) {
|
||||
var testToRun = process.argv[2];
|
||||
return testToRun
|
||||
? path.basename(testToRun) === f
|
||||
: /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
function toModule(f) {
|
||||
return './source-map/' + f.replace(/\.js$/, '');
|
||||
}
|
||||
|
||||
var requires = fs.readdirSync(path.join(__dirname, 'source-map'))
|
||||
.filter(isTestFile)
|
||||
.map(toModule);
|
||||
|
||||
var code = run(requires.map(require).map(function (mod, i) {
|
||||
return {
|
||||
name: requires[i],
|
||||
testCase: mod
|
||||
};
|
||||
}));
|
||||
|
||||
process.exit(code);
|
||||
26
node_modules/transformers/node_modules/source-map/test/source-map/test-api.js
generated
vendored
Normal file
26
node_modules/transformers/node_modules/source-map/test/source-map/test-api.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2012 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var sourceMap;
|
||||
try {
|
||||
sourceMap = require('../../lib/source-map');
|
||||
} catch (e) {
|
||||
sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
}
|
||||
|
||||
exports['test that the api is properly exposed in the top level'] = function (assert, util) {
|
||||
assert.equal(typeof sourceMap.SourceMapGenerator, "function");
|
||||
assert.equal(typeof sourceMap.SourceMapConsumer, "function");
|
||||
assert.equal(typeof sourceMap.SourceNode, "function");
|
||||
};
|
||||
|
||||
});
|
||||
104
node_modules/transformers/node_modules/source-map/test/source-map/test-array-set.js
generated
vendored
Normal file
104
node_modules/transformers/node_modules/source-map/test/source-map/test-array-set.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var ArraySet = require('../../lib/source-map/array-set').ArraySet;
|
||||
|
||||
function makeTestSet() {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
set.add(String(i));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
exports['test .has() membership'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.ok(set.has(String(i)));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .indexOf() elements'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.indexOf(String(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .at() indexing'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.at(i), String(i));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test creating from an array'] = function (assert, util) {
|
||||
var set = ArraySet.fromArray(['foo', 'bar', 'baz', 'quux', 'hasOwnProperty']);
|
||||
|
||||
assert.ok(set.has('foo'));
|
||||
assert.ok(set.has('bar'));
|
||||
assert.ok(set.has('baz'));
|
||||
assert.ok(set.has('quux'));
|
||||
assert.ok(set.has('hasOwnProperty'));
|
||||
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.indexOf('bar'), 1);
|
||||
assert.strictEqual(set.indexOf('baz'), 2);
|
||||
assert.strictEqual(set.indexOf('quux'), 3);
|
||||
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'bar');
|
||||
assert.strictEqual(set.at(2), 'baz');
|
||||
assert.strictEqual(set.at(3), 'quux');
|
||||
};
|
||||
|
||||
exports['test that you can add __proto__; see github issue #30'] = function (assert, util) {
|
||||
var set = new ArraySet();
|
||||
set.add('__proto__');
|
||||
assert.ok(set.has('__proto__'));
|
||||
assert.strictEqual(set.at(0), '__proto__');
|
||||
assert.strictEqual(set.indexOf('__proto__'), 0);
|
||||
};
|
||||
|
||||
exports['test .fromArray() with duplicates'] = function (assert, util) {
|
||||
var set = ArraySet.fromArray(['foo', 'foo']);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 1);
|
||||
|
||||
set = ArraySet.fromArray(['foo', 'foo'], true);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 2);
|
||||
};
|
||||
|
||||
exports['test .add() with duplicates'] = function (assert, util) {
|
||||
var set = new ArraySet();
|
||||
set.add('foo');
|
||||
|
||||
set.add('foo');
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 1);
|
||||
|
||||
set.add('foo', true);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 2);
|
||||
};
|
||||
|
||||
});
|
||||
23
node_modules/transformers/node_modules/source-map/test/source-map/test-base64-vlq.js
generated
vendored
Normal file
23
node_modules/transformers/node_modules/source-map/test/source-map/test-base64-vlq.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('../../lib/source-map/base64-vlq');
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
var result = {};
|
||||
for (var i = -255; i < 256; i++) {
|
||||
base64VLQ.decode(base64VLQ.encode(i), result);
|
||||
assert.equal(result.value, i);
|
||||
assert.equal(result.rest, "");
|
||||
}
|
||||
};
|
||||
|
||||
});
|
||||
35
node_modules/transformers/node_modules/source-map/test/source-map/test-base64.js
generated
vendored
Normal file
35
node_modules/transformers/node_modules/source-map/test/source-map/test-base64.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('../../lib/source-map/base64');
|
||||
|
||||
exports['test out of range encoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.encode(-1);
|
||||
});
|
||||
assert.throws(function () {
|
||||
base64.encode(64);
|
||||
});
|
||||
};
|
||||
|
||||
exports['test out of range decoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.decode('=');
|
||||
});
|
||||
};
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
for (var i = 0; i < 64; i++) {
|
||||
assert.equal(base64.decode(base64.encode(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
});
|
||||
54
node_modules/transformers/node_modules/source-map/test/source-map/test-binary-search.js
generated
vendored
Normal file
54
node_modules/transformers/node_modules/source-map/test/source-map/test-binary-search.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var binarySearch = require('../../lib/source-map/binary-search');
|
||||
|
||||
function numberCompare(a, b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
exports['test too high'] = function (assert, util) {
|
||||
var needle = 30;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 20);
|
||||
};
|
||||
|
||||
exports['test too low'] = function (assert, util) {
|
||||
var needle = 1;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare), -1);
|
||||
};
|
||||
|
||||
exports['test exact search'] = function (assert, util) {
|
||||
var needle = 4;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 4);
|
||||
};
|
||||
|
||||
exports['test fuzzy search'] = function (assert, util) {
|
||||
var needle = 19;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 18);
|
||||
};
|
||||
|
||||
});
|
||||
84
node_modules/transformers/node_modules/source-map/test/source-map/test-dog-fooding.js
generated
vendored
Normal file
84
node_modules/transformers/node_modules/source-map/test/source-map/test-dog-fooding.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
|
||||
exports['test eating our own dog food'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
file: 'testing.js',
|
||||
sourceRoot: '/wu/tang'
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 1, column: 0 },
|
||||
generated: { line: 2, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 2, column: 0 },
|
||||
generated: { line: 3, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 3, column: 0 },
|
||||
generated: { line: 4, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 4, column: 0 },
|
||||
generated: { line: 5, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 5, column: 10 },
|
||||
generated: { line: 6, column: 12 }
|
||||
});
|
||||
|
||||
var smc = new SourceMapConsumer(smg.toString());
|
||||
|
||||
// Exact
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 0, null, smc, assert);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, smc, assert);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, smc, assert);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, smc, assert);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 10, null, smc, assert);
|
||||
|
||||
// Fuzzy
|
||||
|
||||
// Generated to original
|
||||
util.assertMapping(2, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(2, 9, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true);
|
||||
util.assertMapping(3, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(3, 9, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true);
|
||||
util.assertMapping(4, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(4, 9, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true);
|
||||
util.assertMapping(5, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(5, 9, '/wu/tang/gza.coffee', 4, 0, null, smc, assert, true);
|
||||
util.assertMapping(6, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 9, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 13, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true);
|
||||
|
||||
// Original to generated
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 1, null, smc, assert, null, true);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 3, null, smc, assert, null, true);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 6, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 5, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 6, 19, null, smc, assert, null, true);
|
||||
};
|
||||
|
||||
});
|
||||
702
node_modules/transformers/node_modules/source-map/test/source-map/test-source-map-consumer.js
generated
vendored
Normal file
702
node_modules/transformers/node_modules/source-map/test/source-map/test-source-map-consumer.js
generated
vendored
Normal file
@@ -0,0 +1,702 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
|
||||
exports['test that we can instantiate with a string or an object'] = function (assert, util) {
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
});
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(JSON.stringify(util.testMap));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that the `sources` field has the original sources'] = function (assert, util) {
|
||||
var map;
|
||||
var sources;
|
||||
|
||||
map = new SourceMapConsumer(util.testMap);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], '/the/root/one.js');
|
||||
assert.equal(sources[1], '/the/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], 'one.js');
|
||||
assert.equal(sources[1], 'two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], 'one.js');
|
||||
assert.equal(sources[1], 'two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
};
|
||||
|
||||
exports['test that the source root is reflected in a mapping\'s source field'] = function (assert, util) {
|
||||
var map;
|
||||
var mapping;
|
||||
|
||||
map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, '/the/root/two.js');
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, '/the/root/one.js');
|
||||
|
||||
|
||||
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'two.js');
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'one.js');
|
||||
|
||||
|
||||
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'two.js');
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'one.js');
|
||||
};
|
||||
|
||||
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||
};
|
||||
|
||||
exports['test mapping tokens fuzzy'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
// Finding original positions
|
||||
util.assertMapping(1, 20, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
|
||||
util.assertMapping(1, 30, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
|
||||
util.assertMapping(2, 12, '/the/root/two.js', 1, 11, null, map, assert, true);
|
||||
|
||||
// Finding generated positions
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 22, 'bar', map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 13, 'baz', map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test mappings and end of lines'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
|
||||
var map = SourceMapConsumer.fromSourceMap(smg);
|
||||
|
||||
// When finding original positions, mappings end at the end of the line.
|
||||
util.assertMapping(2, 1, null, null, null, null, map, assert, true)
|
||||
|
||||
// When finding generated positions, mappings do not end at the end of the line.
|
||||
util.assertMapping(1, 1, 'bar.js', 2, 1, null, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) {
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(")]}'" + JSON.stringify(util.testMap));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test eachMapping'] = function (assert, util) {
|
||||
var map;
|
||||
|
||||
map = new SourceMapConsumer(util.testMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.generatedLine >= previousLine);
|
||||
|
||||
assert.ok(mapping.source === '/the/root/one.js' || mapping.source === '/the/root/two.js');
|
||||
|
||||
if (mapping.generatedLine === previousLine) {
|
||||
assert.ok(mapping.generatedColumn >= previousColumn);
|
||||
previousColumn = mapping.generatedColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.generatedLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
});
|
||||
|
||||
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js');
|
||||
});
|
||||
|
||||
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js');
|
||||
});
|
||||
};
|
||||
|
||||
exports['test iterating over mappings in a different order'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
var previousSource = "";
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source >= previousSource);
|
||||
|
||||
if (mapping.source === previousSource) {
|
||||
assert.ok(mapping.originalLine >= previousLine);
|
||||
|
||||
if (mapping.originalLine === previousLine) {
|
||||
assert.ok(mapping.originalColumn >= previousColumn);
|
||||
previousColumn = mapping.originalColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.originalLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}
|
||||
else {
|
||||
previousSource = mapping.source;
|
||||
previousLine = -Infinity;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||
};
|
||||
|
||||
exports['test that we can set the context for `this` in eachMapping'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var context = {};
|
||||
map.eachMapping(function () {
|
||||
assert.equal(this, context);
|
||||
}, context);
|
||||
};
|
||||
|
||||
exports['test that the `sourcesContent` field has the original sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||
var sourcesContent = map.sourcesContent;
|
||||
|
||||
assert.equal(sourcesContent[0], ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(sourcesContent[1], ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(sourcesContent.length, 2);
|
||||
};
|
||||
|
||||
exports['test that we can get the original sources for the sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("/the/root/three.js");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("three.js");
|
||||
}, Error);
|
||||
};
|
||||
|
||||
exports['test that we can get the original source content with relative source paths'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapRelativeSources);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("/the/root/three.js");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("three.js");
|
||||
}, Error);
|
||||
};
|
||||
|
||||
exports['test sourceRoot + generatedPositionFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'foo/bar',
|
||||
file: 'baz.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 5, column: 5 },
|
||||
generated: { line: 6, column: 6 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
// Should handle without sourceRoot.
|
||||
var pos = map.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
|
||||
// Should handle with sourceRoot.
|
||||
var pos = map.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'foo/bar/bang.coffee'
|
||||
});
|
||||
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 1 },
|
||||
generated: { line: 3, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 3, column: 3 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 1 },
|
||||
generated: { line: 4, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 2);
|
||||
assert.equal(mappings[0].line, 3);
|
||||
assert.equal(mappings[0].column, 2);
|
||||
assert.equal(mappings[1].line, 3);
|
||||
assert.equal(mappings[1].column, 3);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor for line with no mappings'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 1 },
|
||||
generated: { line: 4, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 0);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor source map with no mappings'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 0);
|
||||
};
|
||||
|
||||
exports['test computeColumnSpans'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 1 },
|
||||
generated: { line: 2, column: 1 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 2, column: 10 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 3 },
|
||||
generated: { line: 2, column: 20 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 1 },
|
||||
generated: { line: 3, column: 1 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 2 },
|
||||
generated: { line: 3, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
map.computeColumnSpans();
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 1,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 1);
|
||||
assert.equal(mappings[0].lastColumn, Infinity);
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 3);
|
||||
assert.equal(mappings[0].lastColumn, 9);
|
||||
assert.equal(mappings[1].lastColumn, 19);
|
||||
assert.equal(mappings[2].lastColumn, Infinity);
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 3,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 2);
|
||||
assert.equal(mappings[0].lastColumn, 1);
|
||||
assert.equal(mappings[1].lastColumn, Infinity);
|
||||
};
|
||||
|
||||
exports['test sourceRoot + originalPositionFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'foo/bar',
|
||||
file: 'baz.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2,
|
||||
});
|
||||
|
||||
// Should always have the prepended source root
|
||||
assert.equal(pos.source, 'foo/bar/bang.coffee');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
};
|
||||
|
||||
exports['test github issue #56'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://',
|
||||
file: 'www.example.com/foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'www.example.com/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1);
|
||||
assert.equal(sources[0], 'http://www.example.com/original.js');
|
||||
};
|
||||
|
||||
exports['test github issue #43'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com',
|
||||
file: 'foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'http://cdn.example.com/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1,
|
||||
'Should only be one source.');
|
||||
assert.equal(sources[0], 'http://cdn.example.com/original.js',
|
||||
'Should not be joined with the sourceRoot.');
|
||||
};
|
||||
|
||||
exports['test absolute path, but same host sources'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com/foo/bar',
|
||||
file: 'foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: '/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1,
|
||||
'Should only be one source.');
|
||||
assert.equal(sources[0], 'http://example.com/original.js',
|
||||
'Source should be relative the host of the source root.');
|
||||
};
|
||||
|
||||
exports['test github issue #64'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sourceRoot": "http://example.com/",
|
||||
"sources": ["/a"],
|
||||
"names": [],
|
||||
"mappings": "AACA",
|
||||
"sourcesContent": ["foo"]
|
||||
});
|
||||
|
||||
assert.equal(map.sourceContentFor("a"), "foo");
|
||||
assert.equal(map.sourceContentFor("/a"), "foo");
|
||||
};
|
||||
|
||||
exports['test bug 885597'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sourceRoot": "file:///Users/AlGore/Invented/The/Internet/",
|
||||
"sources": ["/a"],
|
||||
"names": [],
|
||||
"mappings": "AACA",
|
||||
"sourcesContent": ["foo"]
|
||||
});
|
||||
|
||||
var s = map.sources[0];
|
||||
assert.equal(map.sourceContentFor(s), "foo");
|
||||
};
|
||||
|
||||
exports['test github issue #72, duplicate sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sources": ["source1.js", "source1.js", "source3.js"],
|
||||
"names": [],
|
||||
"mappings": ";EAAC;;IAEE;;MEEE",
|
||||
"sourceRoot": "http://example.com"
|
||||
});
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||
assert.equal(pos.line, 3);
|
||||
assert.equal(pos.column, 3);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 6,
|
||||
column: 6
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source3.js');
|
||||
assert.equal(pos.line, 5);
|
||||
assert.equal(pos.column, 5);
|
||||
};
|
||||
|
||||
exports['test github issue #72, duplicate names'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sources": ["source.js"],
|
||||
"names": ["name1", "name1", "name3"],
|
||||
"mappings": ";EAACA;;IAEEA;;MAEEE",
|
||||
"sourceRoot": "http://example.com"
|
||||
});
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.name, 'name1');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.name, 'name1');
|
||||
assert.equal(pos.line, 3);
|
||||
assert.equal(pos.column, 3);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 6,
|
||||
column: 6
|
||||
});
|
||||
assert.equal(pos.name, 'name3');
|
||||
assert.equal(pos.line, 5);
|
||||
assert.equal(pos.column, 5);
|
||||
};
|
||||
|
||||
exports['test SourceMapConsumer.fromSourceMap'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com/',
|
||||
file: 'foo.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 4, column: 4 },
|
||||
source: 'baz.js',
|
||||
name: 'dirtMcGirt'
|
||||
});
|
||||
smg.setSourceContent('baz.js', 'baz.js content');
|
||||
|
||||
var smc = SourceMapConsumer.fromSourceMap(smg);
|
||||
assert.equal(smc.file, 'foo.js');
|
||||
assert.equal(smc.sourceRoot, 'http://example.com/');
|
||||
assert.equal(smc.sources.length, 2);
|
||||
assert.equal(smc.sources[0], 'http://example.com/bar.js');
|
||||
assert.equal(smc.sources[1], 'http://example.com/baz.js');
|
||||
assert.equal(smc.sourceContentFor('baz.js'), 'baz.js content');
|
||||
|
||||
var pos = smc.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
assert.equal(pos.source, 'http://example.com/bar.js');
|
||||
assert.equal(pos.name, null);
|
||||
|
||||
pos = smc.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'http://example.com/bar.js'
|
||||
});
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
|
||||
pos = smc.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
assert.equal(pos.source, 'http://example.com/baz.js');
|
||||
assert.equal(pos.name, 'dirtMcGirt');
|
||||
|
||||
pos = smc.generatedPositionFor({
|
||||
line: 2,
|
||||
column: 2,
|
||||
source: 'http://example.com/baz.js'
|
||||
});
|
||||
assert.equal(pos.line, 4);
|
||||
assert.equal(pos.column, 4);
|
||||
};
|
||||
});
|
||||
679
node_modules/transformers/node_modules/source-map/test/source-map/test-source-map-generator.js
generated
vendored
Normal file
679
node_modules/transformers/node_modules/source-map/test/source-map/test-source-map-generator.js
generated
vendored
Normal file
@@ -0,0 +1,679 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceNode = require('../../lib/source-map/source-node').SourceNode;
|
||||
var util = require('./util');
|
||||
|
||||
exports['test some simple stuff'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
assert.ok(true);
|
||||
|
||||
var map = new SourceMapGenerator().toJSON();
|
||||
assert.ok(!('file' in map));
|
||||
assert.ok(!('sourceRoot' in map));
|
||||
};
|
||||
|
||||
exports['test JSON serialization'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
assert.equal(map.toString(), JSON.stringify(map));
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 1)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 2)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 3)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 },
|
||||
name: 'someToken'
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (invalid)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
// Not enough info.
|
||||
assert.throws(function () {
|
||||
map.addMapping({});
|
||||
});
|
||||
|
||||
// Original file position, but no source.
|
||||
assert.throws(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings with skipValidation'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.',
|
||||
skipValidation: true
|
||||
});
|
||||
|
||||
// Not enough info, caught by `util.getArgs`
|
||||
assert.throws(function () {
|
||||
map.addMapping({});
|
||||
});
|
||||
|
||||
// Original file position, but no source. Not checked.
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that the correct mappings are being generated'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'min.js',
|
||||
sourceRoot: '/the/root'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 5 },
|
||||
original: { line: 1, column: 5 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 9 },
|
||||
original: { line: 1, column: 11 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 18 },
|
||||
original: { line: 1, column: 21 },
|
||||
source: 'one.js',
|
||||
name: 'bar'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 21 },
|
||||
original: { line: 2, column: 3 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 28 },
|
||||
original: { line: 2, column: 10 },
|
||||
source: 'one.js',
|
||||
name: 'baz'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 32 },
|
||||
original: { line: 2, column: 14 },
|
||||
source: 'one.js',
|
||||
name: 'bar'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 5 },
|
||||
original: { line: 1, column: 5 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 9 },
|
||||
original: { line: 1, column: 11 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 18 },
|
||||
original: { line: 1, column: 21 },
|
||||
source: 'two.js',
|
||||
name: 'n'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 21 },
|
||||
original: { line: 2, column: 3 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 28 },
|
||||
original: { line: 2, column: 10 },
|
||||
source: 'two.js',
|
||||
name: 'n'
|
||||
});
|
||||
|
||||
map = JSON.parse(map.toString());
|
||||
|
||||
util.assertEqualMaps(assert, map, util.testMap);
|
||||
};
|
||||
|
||||
exports['test that adding a mapping with an empty string name does not break generation'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 },
|
||||
name: ''
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
JSON.parse(map.toString());
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that source content can be set'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'min.js',
|
||||
sourceRoot: '/the/root'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.setSourceContent('one.js', 'one file content');
|
||||
|
||||
map = JSON.parse(map.toString());
|
||||
assert.equal(map.sources[0], 'one.js');
|
||||
assert.equal(map.sources[1], 'two.js');
|
||||
assert.equal(map.sourcesContent[0], 'one file content');
|
||||
assert.equal(map.sourcesContent[1], null);
|
||||
};
|
||||
|
||||
exports['test .fromSourceMap'] = function (assert, util) {
|
||||
var map = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(util.testMap));
|
||||
util.assertEqualMaps(assert, map.toJSON(), util.testMap);
|
||||
};
|
||||
|
||||
exports['test .fromSourceMap with sourcesContent'] = function (assert, util) {
|
||||
var map = SourceMapGenerator.fromSourceMap(
|
||||
new SourceMapConsumer(util.testMapWithSourcesContent));
|
||||
util.assertEqualMaps(assert, map.toJSON(), util.testMapWithSourcesContent);
|
||||
};
|
||||
|
||||
exports['test applySourceMap'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null, [
|
||||
new SourceNode(2, 0, 'fileX', 'lineX2\n'),
|
||||
'genA1\n',
|
||||
new SourceNode(2, 0, 'fileY', 'lineY2\n'),
|
||||
'genA2\n',
|
||||
new SourceNode(1, 0, 'fileX', 'lineX1\n'),
|
||||
'genA3\n',
|
||||
new SourceNode(1, 0, 'fileY', 'lineY1\n')
|
||||
]);
|
||||
var mapStep1 = node.toStringWithSourceMap({
|
||||
file: 'fileA'
|
||||
}).map;
|
||||
mapStep1.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||
mapStep1 = mapStep1.toJSON();
|
||||
|
||||
node = new SourceNode(null, null, null, [
|
||||
'gen1\n',
|
||||
new SourceNode(1, 0, 'fileA', 'lineA1\n'),
|
||||
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||
new SourceNode(3, 0, 'fileA', 'lineA3\n'),
|
||||
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||
'gen2\n'
|
||||
]);
|
||||
var mapStep2 = node.toStringWithSourceMap({
|
||||
file: 'fileGen'
|
||||
}).map;
|
||||
mapStep2.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||
mapStep2 = mapStep2.toJSON();
|
||||
|
||||
node = new SourceNode(null, null, null, [
|
||||
'gen1\n',
|
||||
new SourceNode(2, 0, 'fileX', 'lineA1\n'),
|
||||
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||
new SourceNode(2, 0, 'fileY', 'lineA3\n'),
|
||||
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||
'gen2\n'
|
||||
]);
|
||||
var expectedMap = node.toStringWithSourceMap({
|
||||
file: 'fileGen'
|
||||
}).map;
|
||||
expectedMap.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||
expectedMap.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||
expectedMap = expectedMap.toJSON();
|
||||
|
||||
// apply source map "mapStep1" to "mapStep2"
|
||||
var generator = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(mapStep2));
|
||||
generator.applySourceMap(new SourceMapConsumer(mapStep1));
|
||||
var actualMap = generator.toJSON();
|
||||
|
||||
util.assertEqualMaps(assert, actualMap, expectedMap);
|
||||
};
|
||||
|
||||
exports['test applySourceMap throws when file is missing'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
var map2 = new SourceMapGenerator();
|
||||
assert.throws(function() {
|
||||
map.applySourceMap(new SourceMapConsumer(map2.toJSON()));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test the two additional parameters of applySourceMap'] = function (assert, util) {
|
||||
// Assume the following directory structure:
|
||||
//
|
||||
// http://foo.org/
|
||||
// bar.coffee
|
||||
// app/
|
||||
// coffee/
|
||||
// foo.coffee
|
||||
// temp/
|
||||
// bundle.js
|
||||
// temp_maps/
|
||||
// bundle.js.map
|
||||
// public/
|
||||
// bundle.min.js
|
||||
// bundle.min.js.map
|
||||
//
|
||||
// http://www.example.com/
|
||||
// baz.coffee
|
||||
|
||||
var bundleMap = new SourceMapGenerator({
|
||||
file: 'bundle.js'
|
||||
});
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 3, column: 3 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: '../../coffee/foo.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent('../../coffee/foo.coffee', 'foo coffee');
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 13, column: 13 },
|
||||
original: { line: 12, column: 12 },
|
||||
source: '/bar.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent('/bar.coffee', 'bar coffee');
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 23, column: 23 },
|
||||
original: { line: 22, column: 22 },
|
||||
source: 'http://www.example.com/baz.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent(
|
||||
'http://www.example.com/baz.coffee',
|
||||
'baz coffee'
|
||||
);
|
||||
bundleMap = new SourceMapConsumer(bundleMap.toJSON());
|
||||
|
||||
var minifiedMap = new SourceMapGenerator({
|
||||
file: 'bundle.min.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 3, column: 3 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 11, column: 11 },
|
||||
original: { line: 13, column: 13 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 21, column: 21 },
|
||||
original: { line: 23, column: 23 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap = new SourceMapConsumer(minifiedMap.toJSON());
|
||||
|
||||
var expectedMap = function (sources) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'bundle.min.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: sources[0]
|
||||
});
|
||||
map.setSourceContent(sources[0], 'foo coffee');
|
||||
map.addMapping({
|
||||
generated: { line: 11, column: 11 },
|
||||
original: { line: 12, column: 12 },
|
||||
source: sources[1]
|
||||
});
|
||||
map.setSourceContent(sources[1], 'bar coffee');
|
||||
map.addMapping({
|
||||
generated: { line: 21, column: 21 },
|
||||
original: { line: 22, column: 22 },
|
||||
source: sources[2]
|
||||
});
|
||||
map.setSourceContent(sources[2], 'baz coffee');
|
||||
return map.toJSON();
|
||||
}
|
||||
|
||||
var actualMap = function (aSourceMapPath) {
|
||||
var map = SourceMapGenerator.fromSourceMap(minifiedMap);
|
||||
// Note that relying on `bundleMap.file` (which is simply 'bundle.js')
|
||||
// instead of supplying the second parameter wouldn't work here.
|
||||
map.applySourceMap(bundleMap, '../temp/bundle.js', aSourceMapPath);
|
||||
return map.toJSON();
|
||||
}
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('../temp/temp_maps'), expectedMap([
|
||||
'coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('/app/temp/temp_maps'), expectedMap([
|
||||
'/app/coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('http://foo.org/app/temp/temp_maps'), expectedMap([
|
||||
'http://foo.org/app/coffee/foo.coffee',
|
||||
'http://foo.org/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
// If the third parameter is omitted or set to the current working
|
||||
// directory we get incorrect source paths:
|
||||
|
||||
util.assertEqualMaps(assert, actualMap(), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap(''), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('.'), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('./'), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
};
|
||||
|
||||
exports['test applySourceMap name handling'] = function (assert, util) {
|
||||
// Imagine some CoffeeScript code being compiled into JavaScript and then
|
||||
// minified.
|
||||
|
||||
var assertName = function(coffeeName, jsName, expectedName) {
|
||||
var minifiedMap = new SourceMapGenerator({
|
||||
file: 'test.js.min'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 1, column: 4 },
|
||||
original: { line: 1, column: 4 },
|
||||
source: 'test.js',
|
||||
name: jsName
|
||||
});
|
||||
|
||||
var coffeeMap = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
coffeeMap.addMapping({
|
||||
generated: { line: 1, column: 4 },
|
||||
original: { line: 1, column: 0 },
|
||||
source: 'test.coffee',
|
||||
name: coffeeName
|
||||
});
|
||||
|
||||
minifiedMap.applySourceMap(new SourceMapConsumer(coffeeMap.toJSON()));
|
||||
|
||||
new SourceMapConsumer(minifiedMap.toJSON()).eachMapping(function(mapping) {
|
||||
assert.equal(mapping.name, expectedName);
|
||||
});
|
||||
};
|
||||
|
||||
// `foo = 1` -> `var foo = 1;` -> `var a=1`
|
||||
// CoffeeScript doesn’t rename variables, so there’s no need for it to
|
||||
// provide names in its source maps. Minifiers do rename variables and
|
||||
// therefore do provide names in their source maps. So that name should be
|
||||
// retained if the original map lacks names.
|
||||
assertName(null, 'foo', 'foo');
|
||||
|
||||
// `foo = 1` -> `var coffee$foo = 1;` -> `var a=1`
|
||||
// Imagine that CoffeeScript prefixed all variables with `coffee$`. Even
|
||||
// though the minifier then also provides a name, the original name is
|
||||
// what corresponds to the source.
|
||||
assertName('foo', 'coffee$foo', 'foo');
|
||||
|
||||
// `foo = 1` -> `var coffee$foo = 1;` -> `var coffee$foo=1`
|
||||
// Minifiers can turn off variable mangling. Then there’s no need to
|
||||
// provide names in the source map, but the names from the original map are
|
||||
// still needed.
|
||||
assertName('foo', null, 'foo');
|
||||
|
||||
// `foo = 1` -> `var foo = 1;` -> `var foo=1`
|
||||
// No renaming at all.
|
||||
assertName(null, null, null);
|
||||
};
|
||||
|
||||
exports['test sorting with duplicate generated mappings'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
original: { line: 2, column: 0 },
|
||||
source: 'a.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 0 }
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 0 }
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 1, column: 0 },
|
||||
source: 'a.js'
|
||||
});
|
||||
|
||||
util.assertEqualMaps(assert, map.toJSON(), {
|
||||
version: 3,
|
||||
file: 'test.js',
|
||||
sources: ['a.js'],
|
||||
names: [],
|
||||
mappings: 'AAAA;A;AACA'
|
||||
});
|
||||
};
|
||||
|
||||
exports['test ignore duplicate mappings.'] = function (assert, util) {
|
||||
var init = { file: 'min.js', sourceRoot: '/the/root' };
|
||||
var map1, map2;
|
||||
|
||||
// null original source location
|
||||
var nullMapping1 = {
|
||||
generated: { line: 1, column: 0 }
|
||||
};
|
||||
var nullMapping2 = {
|
||||
generated: { line: 2, column: 2 }
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(nullMapping1);
|
||||
map1.addMapping(nullMapping1);
|
||||
|
||||
map2.addMapping(nullMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(nullMapping2);
|
||||
map1.addMapping(nullMapping1);
|
||||
|
||||
map2.addMapping(nullMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
// original source location
|
||||
var srcMapping1 = {
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'srcMapping1.js'
|
||||
};
|
||||
var srcMapping2 = {
|
||||
generated: { line: 2, column: 2 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'srcMapping2.js'
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(srcMapping1);
|
||||
map1.addMapping(srcMapping1);
|
||||
|
||||
map2.addMapping(srcMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(srcMapping2);
|
||||
map1.addMapping(srcMapping1);
|
||||
|
||||
map2.addMapping(srcMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
// full original source and name information
|
||||
var fullMapping1 = {
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'fullMapping1.js',
|
||||
name: 'fullMapping1'
|
||||
};
|
||||
var fullMapping2 = {
|
||||
generated: { line: 2, column: 2 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'fullMapping2.js',
|
||||
name: 'fullMapping2'
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(fullMapping1);
|
||||
map1.addMapping(fullMapping1);
|
||||
|
||||
map2.addMapping(fullMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(fullMapping2);
|
||||
map1.addMapping(fullMapping1);
|
||||
|
||||
map2.addMapping(fullMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
};
|
||||
|
||||
exports['test github issue #72, check for duplicate names or sources'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: 'a.js',
|
||||
name: 'foo'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 3, column: 3 },
|
||||
original: { line: 4, column: 4 },
|
||||
source: 'a.js',
|
||||
name: 'foo'
|
||||
});
|
||||
util.assertEqualMaps(assert, map.toJSON(), {
|
||||
version: 3,
|
||||
file: 'test.js',
|
||||
sources: ['a.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'CACEA;;GAEEA'
|
||||
});
|
||||
};
|
||||
|
||||
exports['test setting sourcesContent to null when already null'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({ file: "foo.js" });
|
||||
assert.doesNotThrow(function() {
|
||||
smg.setSourceContent("bar.js", null);
|
||||
});
|
||||
};
|
||||
|
||||
});
|
||||
612
node_modules/transformers/node_modules/source-map/test/source-map/test-source-node.js
generated
vendored
Normal file
612
node_modules/transformers/node_modules/source-map/test/source-map/test-source-node.js
generated
vendored
Normal file
@@ -0,0 +1,612 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceNode = require('../../lib/source-map/source-node').SourceNode;
|
||||
|
||||
function forEachNewline(fn) {
|
||||
return function (assert, util) {
|
||||
['\n', '\r\n'].forEach(fn.bind(null, assert, util));
|
||||
}
|
||||
}
|
||||
|
||||
exports['test .add()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null);
|
||||
|
||||
// Adding a string works.
|
||||
node.add('function noop() {}');
|
||||
|
||||
// Adding another source node works.
|
||||
node.add(new SourceNode(null, null, null));
|
||||
|
||||
// Adding an array works.
|
||||
node.add(['function foo() {',
|
||||
new SourceNode(null, null, null,
|
||||
'return 10;'),
|
||||
'}']);
|
||||
|
||||
// Adding other stuff doesn't.
|
||||
assert.throws(function () {
|
||||
node.add({});
|
||||
});
|
||||
assert.throws(function () {
|
||||
node.add(function () {});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .prepend()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null);
|
||||
|
||||
// Prepending a string works.
|
||||
node.prepend('function noop() {}');
|
||||
assert.equal(node.children[0], 'function noop() {}');
|
||||
assert.equal(node.children.length, 1);
|
||||
|
||||
// Prepending another source node works.
|
||||
node.prepend(new SourceNode(null, null, null));
|
||||
assert.equal(node.children[0], '');
|
||||
assert.equal(node.children[1], 'function noop() {}');
|
||||
assert.equal(node.children.length, 2);
|
||||
|
||||
// Prepending an array works.
|
||||
node.prepend(['function foo() {',
|
||||
new SourceNode(null, null, null,
|
||||
'return 10;'),
|
||||
'}']);
|
||||
assert.equal(node.children[0], 'function foo() {');
|
||||
assert.equal(node.children[1], 'return 10;');
|
||||
assert.equal(node.children[2], '}');
|
||||
assert.equal(node.children[3], '');
|
||||
assert.equal(node.children[4], 'function noop() {}');
|
||||
assert.equal(node.children.length, 5);
|
||||
|
||||
// Prepending other stuff doesn't.
|
||||
assert.throws(function () {
|
||||
node.prepend({});
|
||||
});
|
||||
assert.throws(function () {
|
||||
node.prepend(function () {});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .toString()'] = function (assert, util) {
|
||||
assert.equal((new SourceNode(null, null, null,
|
||||
['function foo() {',
|
||||
new SourceNode(null, null, null, 'return 10;'),
|
||||
'}'])).toString(),
|
||||
'function foo() {return 10;}');
|
||||
};
|
||||
|
||||
exports['test .join()'] = function (assert, util) {
|
||||
assert.equal((new SourceNode(null, null, null,
|
||||
['a', 'b', 'c', 'd'])).join(', ').toString(),
|
||||
'a, b, c, d');
|
||||
};
|
||||
|
||||
exports['test .walk()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', new SourceNode(1, 0, 'a.js', ['someCall()']), ';\n',
|
||||
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';\n',
|
||||
'}());']);
|
||||
var expected = [
|
||||
{ str: '(function () {\n', source: null, line: null, column: null },
|
||||
{ str: ' ', source: null, line: null, column: null },
|
||||
{ str: 'someCall()', source: 'a.js', line: 1, column: 0 },
|
||||
{ str: ';\n', source: null, line: null, column: null },
|
||||
{ str: ' ', source: null, line: null, column: null },
|
||||
{ str: 'if (foo) bar()', source: 'b.js', line: 2, column: 0 },
|
||||
{ str: ';\n', source: null, line: null, column: null },
|
||||
{ str: '}());', source: null, line: null, column: null },
|
||||
];
|
||||
var i = 0;
|
||||
node.walk(function (chunk, loc) {
|
||||
assert.equal(expected[i].str, chunk);
|
||||
assert.equal(expected[i].source, loc.source);
|
||||
assert.equal(expected[i].line, loc.line);
|
||||
assert.equal(expected[i].column, loc.column);
|
||||
i++;
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .replaceRight'] = function (assert, util) {
|
||||
var node;
|
||||
|
||||
// Not nested
|
||||
node = new SourceNode(null, null, null, 'hello world');
|
||||
node.replaceRight(/world/, 'universe');
|
||||
assert.equal(node.toString(), 'hello universe');
|
||||
|
||||
// Nested
|
||||
node = new SourceNode(null, null, null,
|
||||
[new SourceNode(null, null, null, 'hey sexy mama, '),
|
||||
new SourceNode(null, null, null, 'want to kill all humans?')]);
|
||||
node.replaceRight(/kill all humans/, 'watch Futurama');
|
||||
assert.equal(node.toString(), 'hey sexy mama, want to watch Futurama?');
|
||||
};
|
||||
|
||||
exports['test .toStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {' + nl,
|
||||
' ',
|
||||
new SourceNode(1, 0, 'a.js', 'someCall', 'originalCall'),
|
||||
new SourceNode(1, 8, 'a.js', '()'),
|
||||
';' + nl,
|
||||
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';' + nl,
|
||||
'}());']);
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(result.code, [
|
||||
'(function () {',
|
||||
' someCall();',
|
||||
' if (foo) bar();',
|
||||
'}());'
|
||||
].join(nl));
|
||||
|
||||
var map = result.map;
|
||||
var mapWithoutOptions = node.toStringWithSourceMap().map;
|
||||
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
assert.ok(mapWithoutOptions instanceof SourceMapGenerator, 'mapWithoutOptions instanceof SourceMapGenerator');
|
||||
assert.ok(!('file' in mapWithoutOptions));
|
||||
mapWithoutOptions._file = 'foo.js';
|
||||
util.assertEqualMaps(assert, map.toJSON(), mapWithoutOptions.toJSON());
|
||||
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var actual;
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, 'a.js');
|
||||
assert.equal(actual.line, 1);
|
||||
assert.equal(actual.column, 0);
|
||||
assert.equal(actual.name, 'originalCall');
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 3,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, 'b.js');
|
||||
assert.equal(actual.line, 2);
|
||||
assert.equal(actual.column, 0);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 3,
|
||||
column: 16
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||
var testCode = util.testGeneratedCode.replace(/\n/g, nl);
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
testCode,
|
||||
new SourceMapConsumer(util.testMap));
|
||||
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'min.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, testCode);
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
assert.equal(map.version, util.testMap.version);
|
||||
assert.equal(map.file, util.testMap.file);
|
||||
assert.equal(map.mappings, util.testMap.mappings);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() empty map'] = forEachNewline(function (assert, util, nl) {
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
util.testGeneratedCode.replace(/\n/g, nl),
|
||||
new SourceMapConsumer(util.emptyMap));
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'min.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, util.testGeneratedCode.replace(/\n/g, nl));
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
assert.equal(map.version, util.emptyMap.version);
|
||||
assert.equal(map.file, util.emptyMap.file);
|
||||
assert.equal(map.mappings.length, util.emptyMap.mappings.length);
|
||||
assert.equal(map.mappings, util.emptyMap.mappings);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() complex version'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
"(function() {" + nl,
|
||||
" var Test = {};" + nl,
|
||||
" ", new SourceNode(1, 0, "a.js", "Test.A = { value: 1234 };" + nl),
|
||||
" ", new SourceNode(2, 0, "a.js", "Test.A.x = 'xyz';"), nl,
|
||||
"}());" + nl,
|
||||
"/* Generated Source */"]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
input.code,
|
||||
new SourceMapConsumer(input.map.toString()));
|
||||
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, input.code);
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
var inputMap = input.map.toJSON();
|
||||
util.assertEqualMaps(assert, map, inputMap);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() third argument'] = function (assert, util) {
|
||||
// Assume the following directory structure:
|
||||
//
|
||||
// http://foo.org/
|
||||
// bar.coffee
|
||||
// app/
|
||||
// coffee/
|
||||
// foo.coffee
|
||||
// coffeeBundle.js # Made from {foo,bar,baz}.coffee
|
||||
// maps/
|
||||
// coffeeBundle.js.map
|
||||
// js/
|
||||
// foo.js
|
||||
// public/
|
||||
// app.js # Made from {foo,coffeeBundle}.js
|
||||
// app.js.map
|
||||
//
|
||||
// http://www.example.com/
|
||||
// baz.coffee
|
||||
|
||||
var coffeeBundle = new SourceNode(1, 0, 'foo.coffee', 'foo(coffee);\n');
|
||||
coffeeBundle.setSourceContent('foo.coffee', 'foo coffee');
|
||||
coffeeBundle.add(new SourceNode(2, 0, '/bar.coffee', 'bar(coffee);\n'));
|
||||
coffeeBundle.add(new SourceNode(3, 0, 'http://www.example.com/baz.coffee', 'baz(coffee);'));
|
||||
coffeeBundle = coffeeBundle.toStringWithSourceMap({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
|
||||
var foo = new SourceNode(1, 0, 'foo.js', 'foo(js);');
|
||||
|
||||
var test = function(relativePath, expectedSources) {
|
||||
var app = new SourceNode();
|
||||
app.add(SourceNode.fromStringWithSourceMap(
|
||||
coffeeBundle.code,
|
||||
new SourceMapConsumer(coffeeBundle.map.toString()),
|
||||
relativePath));
|
||||
app.add(foo);
|
||||
var i = 0;
|
||||
app.walk(function (chunk, loc) {
|
||||
assert.equal(loc.source, expectedSources[i]);
|
||||
i++;
|
||||
});
|
||||
app.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
assert.equal(sourceFile, expectedSources[0]);
|
||||
assert.equal(sourceContent, 'foo coffee');
|
||||
})
|
||||
};
|
||||
|
||||
test('../coffee/maps', [
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
// If the third parameter is omitted or set to the current working
|
||||
// directory we get incorrect source paths:
|
||||
|
||||
test(undefined, [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
test('', [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
test('.', [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
test('./', [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
};
|
||||
|
||||
exports['test .toStringWithSourceMap() merging duplicate mappings'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
new SourceNode(1, 0, "a.js", "(function"),
|
||||
new SourceNode(1, 0, "a.js", "() {" + nl),
|
||||
" ",
|
||||
new SourceNode(1, 0, "a.js", "var Test = "),
|
||||
new SourceNode(1, 0, "b.js", "{};" + nl),
|
||||
new SourceNode(2, 0, "b.js", "Test"),
|
||||
new SourceNode(2, 0, "b.js", ".A", "A"),
|
||||
new SourceNode(2, 20, "b.js", " = { value: ", "A"),
|
||||
"1234",
|
||||
new SourceNode(2, 40, "b.js", " };" + nl, "A"),
|
||||
"}());" + nl,
|
||||
"/* Generated Source */"
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"(function() {",
|
||||
" var Test = {};",
|
||||
"Test.A = { value: 1234 };",
|
||||
"}());",
|
||||
"/* Generated Source */"
|
||||
].join(nl))
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
// Here is no need for a empty mapping,
|
||||
// because mappings ends at eol
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 13 },
|
||||
source: 'b.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 4 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 6 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 20 }
|
||||
});
|
||||
// This empty mapping is required,
|
||||
// because there is a hole in the middle of the line
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 18 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 22 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 40 }
|
||||
});
|
||||
// Here is no need for a empty mapping,
|
||||
// because mappings ends at eol
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test .toStringWithSourceMap() multi-line SourceNodes'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
new SourceNode(1, 0, "a.js", "(function() {" + nl + "var nextLine = 1;" + nl + "anotherLine();" + nl),
|
||||
new SourceNode(2, 2, "b.js", "Test.call(this, 123);" + nl),
|
||||
new SourceNode(2, 2, "b.js", "this['stuff'] = 'v';" + nl),
|
||||
new SourceNode(2, 2, "b.js", "anotherLine();" + nl),
|
||||
"/*" + nl + "Generated" + nl + "Source" + nl + "*/" + nl,
|
||||
new SourceNode(3, 4, "c.js", "anotherLine();" + nl),
|
||||
"/*" + nl + "Generated" + nl + "Source" + nl + "*/"
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"(function() {",
|
||||
"var nextLine = 1;",
|
||||
"anotherLine();",
|
||||
"Test.call(this, 123);",
|
||||
"this['stuff'] = 'v';",
|
||||
"anotherLine();",
|
||||
"/*",
|
||||
"Generated",
|
||||
"Source",
|
||||
"*/",
|
||||
"anotherLine();",
|
||||
"/*",
|
||||
"Generated",
|
||||
"Source",
|
||||
"*/"
|
||||
].join(nl));
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 4, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 5, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 6, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 11, column: 0 },
|
||||
source: 'c.js',
|
||||
original: { line: 3, column: 4 }
|
||||
});
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test .toStringWithSourceMap() with empty string'] = function (assert, util) {
|
||||
var node = new SourceNode(1, 0, 'empty.js', '');
|
||||
var result = node.toStringWithSourceMap();
|
||||
assert.equal(result.code, '');
|
||||
};
|
||||
|
||||
exports['test .toStringWithSourceMap() with consecutive newlines'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
"/***/" + nl + nl,
|
||||
new SourceNode(1, 0, "a.js", "'use strict';" + nl),
|
||||
new SourceNode(2, 0, "a.js", "a();"),
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"/***/",
|
||||
"",
|
||||
"'use strict';",
|
||||
"a();",
|
||||
].join(nl));
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 4, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test setSourceContent with toStringWithSourceMap'] = function (assert, util) {
|
||||
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||
aNode.setSourceContent('a.js', 'someContent');
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', aNode,
|
||||
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||
'}());']);
|
||||
node.setSourceContent('b.js', 'otherContent');
|
||||
var map = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
}).map;
|
||||
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
assert.equal(map.sources.length, 2);
|
||||
assert.equal(map.sources[0], 'a.js');
|
||||
assert.equal(map.sources[1], 'b.js');
|
||||
assert.equal(map.sourcesContent.length, 2);
|
||||
assert.equal(map.sourcesContent[0], 'someContent');
|
||||
assert.equal(map.sourcesContent[1], 'otherContent');
|
||||
};
|
||||
|
||||
exports['test walkSourceContents'] = function (assert, util) {
|
||||
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||
aNode.setSourceContent('a.js', 'someContent');
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', aNode,
|
||||
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||
'}());']);
|
||||
node.setSourceContent('b.js', 'otherContent');
|
||||
var results = [];
|
||||
node.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
results.push([sourceFile, sourceContent]);
|
||||
});
|
||||
assert.equal(results.length, 2);
|
||||
assert.equal(results[0][0], 'a.js');
|
||||
assert.equal(results[0][1], 'someContent');
|
||||
assert.equal(results[1][0], 'b.js');
|
||||
assert.equal(results[1][1], 'otherContent');
|
||||
};
|
||||
});
|
||||
216
node_modules/transformers/node_modules/source-map/test/source-map/test-util.js
generated
vendored
Normal file
216
node_modules/transformers/node_modules/source-map/test/source-map/test-util.js
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2014 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var libUtil = require('../../lib/source-map/util');
|
||||
|
||||
exports['test urls'] = function (assert, util) {
|
||||
var assertUrl = function (url) {
|
||||
assert.equal(url, libUtil.urlGenerate(libUtil.urlParse(url)));
|
||||
};
|
||||
assertUrl('http://');
|
||||
assertUrl('http://www.example.com');
|
||||
assertUrl('http://user:pass@www.example.com');
|
||||
assertUrl('http://www.example.com:80');
|
||||
assertUrl('http://www.example.com/');
|
||||
assertUrl('http://www.example.com/foo/bar');
|
||||
assertUrl('http://www.example.com/foo/bar/');
|
||||
assertUrl('http://user:pass@www.example.com:80/foo/bar/');
|
||||
|
||||
assertUrl('//');
|
||||
assertUrl('//www.example.com');
|
||||
assertUrl('file:///www.example.com');
|
||||
|
||||
assert.equal(libUtil.urlParse(''), null);
|
||||
assert.equal(libUtil.urlParse('.'), null);
|
||||
assert.equal(libUtil.urlParse('..'), null);
|
||||
assert.equal(libUtil.urlParse('a'), null);
|
||||
assert.equal(libUtil.urlParse('a/b'), null);
|
||||
assert.equal(libUtil.urlParse('a//b'), null);
|
||||
assert.equal(libUtil.urlParse('/a'), null);
|
||||
assert.equal(libUtil.urlParse('data:foo,bar'), null);
|
||||
};
|
||||
|
||||
exports['test normalize()'] = function (assert, util) {
|
||||
assert.equal(libUtil.normalize('/..'), '/');
|
||||
assert.equal(libUtil.normalize('/../'), '/');
|
||||
assert.equal(libUtil.normalize('/../../../..'), '/');
|
||||
assert.equal(libUtil.normalize('/../../../../a/b/c'), '/a/b/c');
|
||||
assert.equal(libUtil.normalize('/a/b/c/../../../d/../../e'), '/e');
|
||||
|
||||
assert.equal(libUtil.normalize('..'), '..');
|
||||
assert.equal(libUtil.normalize('../'), '../');
|
||||
assert.equal(libUtil.normalize('../../a/'), '../../a/');
|
||||
assert.equal(libUtil.normalize('a/..'), '.');
|
||||
assert.equal(libUtil.normalize('a/../../..'), '../..');
|
||||
|
||||
assert.equal(libUtil.normalize('/.'), '/');
|
||||
assert.equal(libUtil.normalize('/./'), '/');
|
||||
assert.equal(libUtil.normalize('/./././.'), '/');
|
||||
assert.equal(libUtil.normalize('/././././a/b/c'), '/a/b/c');
|
||||
assert.equal(libUtil.normalize('/a/b/c/./././d/././e'), '/a/b/c/d/e');
|
||||
|
||||
assert.equal(libUtil.normalize(''), '.');
|
||||
assert.equal(libUtil.normalize('.'), '.');
|
||||
assert.equal(libUtil.normalize('./'), '.');
|
||||
assert.equal(libUtil.normalize('././a'), 'a');
|
||||
assert.equal(libUtil.normalize('a/./'), 'a/');
|
||||
assert.equal(libUtil.normalize('a/././.'), 'a');
|
||||
|
||||
assert.equal(libUtil.normalize('/a/b//c////d/////'), '/a/b/c/d/');
|
||||
assert.equal(libUtil.normalize('///a/b//c////d/////'), '///a/b/c/d/');
|
||||
assert.equal(libUtil.normalize('a/b//c////d'), 'a/b/c/d');
|
||||
|
||||
assert.equal(libUtil.normalize('.///.././../a/b//./..'), '../../a')
|
||||
|
||||
assert.equal(libUtil.normalize('http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.normalize('http://www.example.com/'), 'http://www.example.com/');
|
||||
assert.equal(libUtil.normalize('http://www.example.com/./..//a/b/c/.././d//'), 'http://www.example.com/a/b/d/');
|
||||
};
|
||||
|
||||
exports['test join()'] = function (assert, util) {
|
||||
assert.equal(libUtil.join('a', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a/', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a//', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a', 'b/'), 'a/b/');
|
||||
assert.equal(libUtil.join('a', 'b//'), 'a/b/');
|
||||
assert.equal(libUtil.join('a/', '/b'), '/b');
|
||||
assert.equal(libUtil.join('a//', '//b'), '//b');
|
||||
|
||||
assert.equal(libUtil.join('a', '..'), '.');
|
||||
assert.equal(libUtil.join('a', '../b'), 'b');
|
||||
assert.equal(libUtil.join('a/b', '../c'), 'a/c');
|
||||
|
||||
assert.equal(libUtil.join('a', '.'), 'a');
|
||||
assert.equal(libUtil.join('a', './b'), 'a/b');
|
||||
assert.equal(libUtil.join('a/b', './c'), 'a/b/c');
|
||||
|
||||
assert.equal(libUtil.join('a', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('a', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('', 'b'), 'b');
|
||||
assert.equal(libUtil.join('.', 'b'), 'b');
|
||||
assert.equal(libUtil.join('', 'b/'), 'b/');
|
||||
assert.equal(libUtil.join('.', 'b/'), 'b/');
|
||||
assert.equal(libUtil.join('', 'b//'), 'b/');
|
||||
assert.equal(libUtil.join('.', 'b//'), 'b/');
|
||||
|
||||
assert.equal(libUtil.join('', '..'), '..');
|
||||
assert.equal(libUtil.join('.', '..'), '..');
|
||||
assert.equal(libUtil.join('', '../b'), '../b');
|
||||
assert.equal(libUtil.join('.', '../b'), '../b');
|
||||
|
||||
assert.equal(libUtil.join('', '.'), '.');
|
||||
assert.equal(libUtil.join('.', '.'), '.');
|
||||
assert.equal(libUtil.join('', './b'), 'b');
|
||||
assert.equal(libUtil.join('.', './b'), 'b');
|
||||
|
||||
assert.equal(libUtil.join('', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('.', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('', 'data:foo,bar'), 'data:foo,bar');
|
||||
assert.equal(libUtil.join('.', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('..', 'b'), '../b');
|
||||
assert.equal(libUtil.join('..', 'b/'), '../b/');
|
||||
assert.equal(libUtil.join('..', 'b//'), '../b/');
|
||||
|
||||
assert.equal(libUtil.join('..', '..'), '../..');
|
||||
assert.equal(libUtil.join('..', '../b'), '../../b');
|
||||
|
||||
assert.equal(libUtil.join('..', '.'), '..');
|
||||
assert.equal(libUtil.join('..', './b'), '../b');
|
||||
|
||||
assert.equal(libUtil.join('..', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('..', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('a', ''), 'a');
|
||||
assert.equal(libUtil.join('a', '.'), 'a');
|
||||
assert.equal(libUtil.join('a/', ''), 'a');
|
||||
assert.equal(libUtil.join('a/', '.'), 'a');
|
||||
assert.equal(libUtil.join('a//', ''), 'a');
|
||||
assert.equal(libUtil.join('a//', '.'), 'a');
|
||||
assert.equal(libUtil.join('/a', ''), '/a');
|
||||
assert.equal(libUtil.join('/a', '.'), '/a');
|
||||
assert.equal(libUtil.join('', ''), '.');
|
||||
assert.equal(libUtil.join('.', ''), '.');
|
||||
assert.equal(libUtil.join('.', ''), '.');
|
||||
assert.equal(libUtil.join('.', '.'), '.');
|
||||
assert.equal(libUtil.join('..', ''), '..');
|
||||
assert.equal(libUtil.join('..', '.'), '..');
|
||||
assert.equal(libUtil.join('http://foo.org/a', ''), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', ''), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', ''), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org', ''), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org', '.'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org/', ''), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org/', '.'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org//', ''), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org//', '.'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('//www.example.com', ''), '//www.example.com/');
|
||||
assert.equal(libUtil.join('//www.example.com', '.'), '//www.example.com/');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b/'), 'http://foo.org/a/b/');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b//'), 'http://foo.org/a/b/');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', '/b'), 'http://foo.org/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', '//b'), 'http://b');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', '..'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org/a', '../b'), 'http://foo.org/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/b', '../c'), 'http://foo.org/a/c');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a', './b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/b', './c'), 'http://foo.org/a/b/c');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org//', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org', '/a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/', '/a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org//', '/a'), 'http://foo.org/a');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://', 'www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('file:///', 'www.example.com'), 'file:///www.example.com');
|
||||
assert.equal(libUtil.join('http://', 'ftp://example.com'), 'ftp://example.com');
|
||||
|
||||
assert.equal(libUtil.join('http://www.example.com', '//foo.org/bar'), 'http://foo.org/bar');
|
||||
assert.equal(libUtil.join('//www.example.com', '//foo.org/bar'), '//foo.org/bar');
|
||||
};
|
||||
|
||||
// TODO Issue #128: Define and test this function properly.
|
||||
exports['test relative()'] = function (assert, util) {
|
||||
assert.equal(libUtil.relative('/the/root', '/the/root/one.js'), 'one.js');
|
||||
assert.equal(libUtil.relative('/the/root', '/the/rootone.js'), '/the/rootone.js');
|
||||
|
||||
assert.equal(libUtil.relative('', '/the/root/one.js'), '/the/root/one.js');
|
||||
assert.equal(libUtil.relative('.', '/the/root/one.js'), '/the/root/one.js');
|
||||
assert.equal(libUtil.relative('', 'the/root/one.js'), 'the/root/one.js');
|
||||
assert.equal(libUtil.relative('.', 'the/root/one.js'), 'the/root/one.js');
|
||||
|
||||
assert.equal(libUtil.relative('/', '/the/root/one.js'), 'the/root/one.js');
|
||||
assert.equal(libUtil.relative('/', 'the/root/one.js'), 'the/root/one.js');
|
||||
};
|
||||
|
||||
});
|
||||
192
node_modules/transformers/node_modules/source-map/test/source-map/util.js
generated
vendored
Normal file
192
node_modules/transformers/node_modules/source-map/test/source-map/util.js
generated
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('../../lib/source-map/util');
|
||||
|
||||
// This is a test mapping which maps functions from two different files
|
||||
// (one.js and two.js) to a minified generated source.
|
||||
//
|
||||
// Here is one.js:
|
||||
//
|
||||
// ONE.foo = function (bar) {
|
||||
// return baz(bar);
|
||||
// };
|
||||
//
|
||||
// Here is two.js:
|
||||
//
|
||||
// TWO.inc = function (n) {
|
||||
// return n + 1;
|
||||
// };
|
||||
//
|
||||
// And here is the generated code (min.js):
|
||||
//
|
||||
// ONE.foo=function(a){return baz(a);};
|
||||
// TWO.inc=function(a){return a+1;};
|
||||
exports.testGeneratedCode = " ONE.foo=function(a){return baz(a);};\n"+
|
||||
" TWO.inc=function(a){return a+1;};";
|
||||
exports.testMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapNoSourceRoot = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapEmptySourceRoot = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: '',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapWithSourcesContent = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapRelativeSources = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['./one.js', './two.js'],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.emptyMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: [],
|
||||
sources: [],
|
||||
mappings: ''
|
||||
};
|
||||
|
||||
|
||||
function assertMapping(generatedLine, generatedColumn, originalSource,
|
||||
originalLine, originalColumn, name, map, assert,
|
||||
dontTestGenerated, dontTestOriginal) {
|
||||
if (!dontTestOriginal) {
|
||||
var origMapping = map.originalPositionFor({
|
||||
line: generatedLine,
|
||||
column: generatedColumn
|
||||
});
|
||||
assert.equal(origMapping.name, name,
|
||||
'Incorrect name, expected ' + JSON.stringify(name)
|
||||
+ ', got ' + JSON.stringify(origMapping.name));
|
||||
assert.equal(origMapping.line, originalLine,
|
||||
'Incorrect line, expected ' + JSON.stringify(originalLine)
|
||||
+ ', got ' + JSON.stringify(origMapping.line));
|
||||
assert.equal(origMapping.column, originalColumn,
|
||||
'Incorrect column, expected ' + JSON.stringify(originalColumn)
|
||||
+ ', got ' + JSON.stringify(origMapping.column));
|
||||
|
||||
var expectedSource;
|
||||
|
||||
if (originalSource && map.sourceRoot && originalSource.indexOf(map.sourceRoot) === 0) {
|
||||
expectedSource = originalSource;
|
||||
} else if (originalSource) {
|
||||
expectedSource = map.sourceRoot
|
||||
? util.join(map.sourceRoot, originalSource)
|
||||
: originalSource;
|
||||
} else {
|
||||
expectedSource = null;
|
||||
}
|
||||
|
||||
assert.equal(origMapping.source, expectedSource,
|
||||
'Incorrect source, expected ' + JSON.stringify(expectedSource)
|
||||
+ ', got ' + JSON.stringify(origMapping.source));
|
||||
}
|
||||
|
||||
if (!dontTestGenerated) {
|
||||
var genMapping = map.generatedPositionFor({
|
||||
source: originalSource,
|
||||
line: originalLine,
|
||||
column: originalColumn
|
||||
});
|
||||
assert.equal(genMapping.line, generatedLine,
|
||||
'Incorrect line, expected ' + JSON.stringify(generatedLine)
|
||||
+ ', got ' + JSON.stringify(genMapping.line));
|
||||
assert.equal(genMapping.column, generatedColumn,
|
||||
'Incorrect column, expected ' + JSON.stringify(generatedColumn)
|
||||
+ ', got ' + JSON.stringify(genMapping.column));
|
||||
}
|
||||
}
|
||||
exports.assertMapping = assertMapping;
|
||||
|
||||
function assertEqualMaps(assert, actualMap, expectedMap) {
|
||||
assert.equal(actualMap.version, expectedMap.version, "version mismatch");
|
||||
assert.equal(actualMap.file, expectedMap.file, "file mismatch");
|
||||
assert.equal(actualMap.names.length,
|
||||
expectedMap.names.length,
|
||||
"names length mismatch: " +
|
||||
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||
for (var i = 0; i < actualMap.names.length; i++) {
|
||||
assert.equal(actualMap.names[i],
|
||||
expectedMap.names[i],
|
||||
"names[" + i + "] mismatch: " +
|
||||
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||
}
|
||||
assert.equal(actualMap.sources.length,
|
||||
expectedMap.sources.length,
|
||||
"sources length mismatch: " +
|
||||
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||
for (var i = 0; i < actualMap.sources.length; i++) {
|
||||
assert.equal(actualMap.sources[i],
|
||||
expectedMap.sources[i],
|
||||
"sources[" + i + "] length mismatch: " +
|
||||
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||
}
|
||||
assert.equal(actualMap.sourceRoot,
|
||||
expectedMap.sourceRoot,
|
||||
"sourceRoot mismatch: " +
|
||||
actualMap.sourceRoot + " != " + expectedMap.sourceRoot);
|
||||
assert.equal(actualMap.mappings, expectedMap.mappings,
|
||||
"mappings mismatch:\nActual: " + actualMap.mappings + "\nExpected: " + expectedMap.mappings);
|
||||
if (actualMap.sourcesContent) {
|
||||
assert.equal(actualMap.sourcesContent.length,
|
||||
expectedMap.sourcesContent.length,
|
||||
"sourcesContent length mismatch");
|
||||
for (var i = 0; i < actualMap.sourcesContent.length; i++) {
|
||||
assert.equal(actualMap.sourcesContent[i],
|
||||
expectedMap.sourcesContent[i],
|
||||
"sourcesContent[" + i + "] mismatch");
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.assertEqualMaps = assertEqualMaps;
|
||||
|
||||
});
|
||||
2
node_modules/transformers/node_modules/uglify-js/.npmignore
generated
vendored
Normal file
2
node_modules/transformers/node_modules/uglify-js/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
tmp/
|
||||
node_modules/
|
||||
544
node_modules/transformers/node_modules/uglify-js/README.md
generated
vendored
Normal file
544
node_modules/transformers/node_modules/uglify-js/README.md
generated
vendored
Normal file
@@ -0,0 +1,544 @@
|
||||
UglifyJS 2
|
||||
==========
|
||||
|
||||
UglifyJS is a JavaScript parser, minifier, compressor or beautifier toolkit.
|
||||
|
||||
This page documents the command line utility. For
|
||||
[API and internals documentation see my website](http://lisperator.net/uglifyjs/).
|
||||
There's also an
|
||||
[in-browser online demo](http://lisperator.net/uglifyjs/#demo) (for Firefox,
|
||||
Chrome and probably Safari).
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
First make sure you have installed the latest version of [node.js](http://nodejs.org/)
|
||||
(You may need to restart your computer after this step).
|
||||
|
||||
From NPM for use as a command line app:
|
||||
|
||||
npm install uglify-js -g
|
||||
|
||||
From NPM for programmatic use:
|
||||
|
||||
npm install uglify-js
|
||||
|
||||
From Git:
|
||||
|
||||
git clone git://github.com/mishoo/UglifyJS2.git
|
||||
cd UglifyJS2
|
||||
npm link .
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
uglifyjs [input files] [options]
|
||||
|
||||
UglifyJS2 can take multiple input files. It's recommended that you pass the
|
||||
input files first, then pass the options. UglifyJS will parse input files
|
||||
in sequence and apply any compression options. The files are parsed in the
|
||||
same global scope, that is, a reference from a file to some
|
||||
variable/function declared in another file will be matched properly.
|
||||
|
||||
If you want to read from STDIN instead, pass a single dash instead of input
|
||||
files.
|
||||
|
||||
The available options are:
|
||||
|
||||
--source-map Specify an output file where to generate source map.
|
||||
[string]
|
||||
--source-map-root The path to the original source to be included in the
|
||||
source map. [string]
|
||||
--source-map-url The path to the source map to be added in //@
|
||||
sourceMappingURL. Defaults to the value passed with
|
||||
--source-map. [string]
|
||||
--in-source-map Input source map, useful if you're compressing JS that was
|
||||
generated from some other original code.
|
||||
-p, --prefix Skip prefix for original filenames that appear in source
|
||||
maps. For example -p 3 will drop 3 directories from file
|
||||
names and ensure they are relative paths.
|
||||
-o, --output Output file (default STDOUT).
|
||||
-b, --beautify Beautify output/specify output options. [string]
|
||||
-m, --mangle Mangle names/pass mangler options. [string]
|
||||
-r, --reserved Reserved names to exclude from mangling.
|
||||
-c, --compress Enable compressor/pass compressor options. Pass options
|
||||
like -c hoist_vars=false,if_return=false. Use -c with no
|
||||
argument to use the default compression options. [string]
|
||||
-d, --define Global definitions [string]
|
||||
--comments Preserve copyright comments in the output. By default this
|
||||
works like Google Closure, keeping JSDoc-style comments
|
||||
that contain "@license" or "@preserve". You can optionally
|
||||
pass one of the following arguments to this flag:
|
||||
- "all" to keep all comments
|
||||
- a valid JS regexp (needs to start with a slash) to keep
|
||||
only comments that match.
|
||||
Note that currently not *all* comments can be kept when
|
||||
compression is on, because of dead code removal or
|
||||
cascading statements into sequences. [string]
|
||||
--stats Display operations run time on STDERR. [boolean]
|
||||
--acorn Use Acorn for parsing. [boolean]
|
||||
--spidermonkey Assume input fles are SpiderMonkey AST format (as JSON).
|
||||
[boolean]
|
||||
--self Build itself (UglifyJS2) as a library (implies
|
||||
--wrap=UglifyJS --export-all) [boolean]
|
||||
--wrap Embed everything in a big function, making the “exports”
|
||||
and “global” variables available. You need to pass an
|
||||
argument to this option to specify the name that your
|
||||
module will take when included in, say, a browser.
|
||||
[string]
|
||||
--export-all Only used when --wrap, this tells UglifyJS to add code to
|
||||
automatically export all globals. [boolean]
|
||||
--lint Display some scope warnings [boolean]
|
||||
-v, --verbose Verbose [boolean]
|
||||
-V, --version Print version number and exit. [boolean]
|
||||
|
||||
Specify `--output` (`-o`) to declare the output file. Otherwise the output
|
||||
goes to STDOUT.
|
||||
|
||||
## Source map options
|
||||
|
||||
UglifyJS2 can generate a source map file, which is highly useful for
|
||||
debugging your compressed JavaScript. To get a source map, pass
|
||||
`--source-map output.js.map` (full path to the file where you want the
|
||||
source map dumped).
|
||||
|
||||
Additionally you might need `--source-map-root` to pass the URL where the
|
||||
original files can be found. In case you are passing full paths to input
|
||||
files to UglifyJS, you can use `--prefix` (`-p`) to specify the number of
|
||||
directories to drop from the path prefix when declaring files in the source
|
||||
map.
|
||||
|
||||
For example:
|
||||
|
||||
uglifyjs /home/doe/work/foo/src/js/file1.js \
|
||||
/home/doe/work/foo/src/js/file2.js \
|
||||
-o foo.min.js \
|
||||
--source-map foo.min.js.map \
|
||||
--source-map-root http://foo.com/src \
|
||||
-p 5 -c -m
|
||||
|
||||
The above will compress and mangle `file1.js` and `file2.js`, will drop the
|
||||
output in `foo.min.js` and the source map in `foo.min.js.map`. The source
|
||||
mapping will refer to `http://foo.com/src/js/file1.js` and
|
||||
`http://foo.com/src/js/file2.js` (in fact it will list `http://foo.com/src`
|
||||
as the source map root, and the original files as `js/file1.js` and
|
||||
`js/file2.js`).
|
||||
|
||||
### Composed source map
|
||||
|
||||
When you're compressing JS code that was output by a compiler such as
|
||||
CoffeeScript, mapping to the JS code won't be too helpful. Instead, you'd
|
||||
like to map back to the original code (i.e. CoffeeScript). UglifyJS has an
|
||||
option to take an input source map. Assuming you have a mapping from
|
||||
CoffeeScript → compiled JS, UglifyJS can generate a map from CoffeeScript →
|
||||
compressed JS by mapping every token in the compiled JS to its original
|
||||
location.
|
||||
|
||||
To use this feature you need to pass `--in-source-map
|
||||
/path/to/input/source.map`. Normally the input source map should also point
|
||||
to the file containing the generated JS, so if that's correct you can omit
|
||||
input files from the command line.
|
||||
|
||||
## Mangler options
|
||||
|
||||
To enable the mangler you need to pass `--mangle` (`-m`). Optionally you
|
||||
can pass `-m sort=true` (we'll possibly have other flags in the future) in order
|
||||
to assign shorter names to most frequently used variables. This saves a few
|
||||
hundred bytes on jQuery before gzip, but the output is _bigger_ after gzip
|
||||
(and seems to happen for other libraries I tried it on) therefore it's not
|
||||
enabled by default.
|
||||
|
||||
When mangling is enabled but you want to prevent certain names from being
|
||||
mangled, you can declare those names with `--reserved` (`-r`) — pass a
|
||||
comma-separated list of names. For example:
|
||||
|
||||
uglifyjs ... -m -r '$,require,exports'
|
||||
|
||||
to prevent the `require`, `exports` and `$` names from being changed.
|
||||
|
||||
## Compressor options
|
||||
|
||||
You need to pass `--compress` (`-c`) to enable the compressor. Optionally
|
||||
you can pass a comma-separated list of options. Options are in the form
|
||||
`foo=bar`, or just `foo` (the latter implies a boolean option that you want
|
||||
to set `true`; it's effectively a shortcut for `foo=true`).
|
||||
|
||||
The defaults should be tuned for maximum compression on most code. Here are
|
||||
the available options (all are `true` by default, except `hoist_vars`):
|
||||
|
||||
- `sequences` -- join consecutive simple statements using the comma operator
|
||||
- `properties` -- rewrite property access using the dot notation, for
|
||||
example `foo["bar"] → foo.bar`
|
||||
- `dead_code` -- remove unreachable code
|
||||
- `drop_debugger` -- remove `debugger;` statements
|
||||
- `unsafe` -- apply "unsafe" transformations (discussion below)
|
||||
- `conditionals` -- apply optimizations for `if`-s and conditional
|
||||
expressions
|
||||
- `comparisons` -- apply certain optimizations to binary nodes, for example:
|
||||
`!(a <= b) → a > b` (only when `unsafe`), attempts to negate binary nodes,
|
||||
e.g. `a = !b && !c && !d && !e → a=!(b||c||d||e)` etc.
|
||||
- `evaluate` -- attempt to evaluate constant expressions
|
||||
- `booleans` -- various optimizations for boolean context, for example `!!a
|
||||
? b : c → a ? b : c`
|
||||
- `loops` -- optimizations for `do`, `while` and `for` loops when we can
|
||||
statically determine the condition
|
||||
- `unused` -- drop unreferenced functions and variables
|
||||
- `hoist_funs` -- hoist function declarations
|
||||
- `hoist_vars` -- hoist `var` declarations (this is `false` by default
|
||||
because it seems to increase the size of the output in general)
|
||||
- `if_return` -- optimizations for if/return and if/continue
|
||||
- `join_vars` -- join consecutive `var` statements
|
||||
- `cascade` -- small optimization for sequences, transform `x, x` into `x`
|
||||
and `x = something(), x` into `x = something()`
|
||||
- `warnings` -- display warnings when dropping unreachable code or unused
|
||||
declarations etc.
|
||||
|
||||
### Conditional compilation
|
||||
|
||||
You can use the `--define` (`-d`) switch in order to declare global
|
||||
variables that UglifyJS will assume to be constants (unless defined in
|
||||
scope). For example if you pass `--define DEBUG=false` then, coupled with
|
||||
dead code removal UglifyJS will discard the following from the output:
|
||||
|
||||
if (DEBUG) {
|
||||
console.log("debug stuff");
|
||||
}
|
||||
|
||||
UglifyJS will warn about the condition being always false and about dropping
|
||||
unreachable code; for now there is no option to turn off only this specific
|
||||
warning, you can pass `warnings=false` to turn off *all* warnings.
|
||||
|
||||
Another way of doing that is to declare your globals as constants in a
|
||||
separate file and include it into the build. For example you can have a
|
||||
`build/defines.js` file with the following:
|
||||
|
||||
const DEBUG = false;
|
||||
const PRODUCTION = true;
|
||||
// etc.
|
||||
|
||||
and build your code like this:
|
||||
|
||||
uglifyjs build/defines.js js/foo.js js/bar.js... -c
|
||||
|
||||
UglifyJS will notice the constants and, since they cannot be altered, it
|
||||
will evaluate references to them to the value itself and drop unreachable
|
||||
code as usual. The possible downside of this approach is that the build
|
||||
will contain the `const` declarations.
|
||||
|
||||
<a name="codegen-options"></a>
|
||||
## Beautifier options
|
||||
|
||||
The code generator tries to output shortest code possible by default. In
|
||||
case you want beautified output, pass `--beautify` (`-b`). Optionally you
|
||||
can pass additional arguments that control the code output:
|
||||
|
||||
- `beautify` (default `true`) -- whether to actually beautify the output.
|
||||
Passing `-b` will set this to true, but you might need to pass `-b` even
|
||||
when you want to generate minified code, in order to specify additional
|
||||
arguments, so you can use `-b beautify=false` to override it.
|
||||
- `indent-level` (default 4)
|
||||
- `indent-start` (default 0) -- prefix all lines by that many spaces
|
||||
- `quote-keys` (default `false`) -- pass `true` to quote all keys in literal
|
||||
objects
|
||||
- `space-colon` (default `true`) -- insert a space after the colon signs
|
||||
- `ascii-only` (default `false`) -- escape Unicode characters in strings and
|
||||
regexps
|
||||
- `inline-script` (default `false`) -- escape the slash in occurrences of
|
||||
`</script` in strings
|
||||
- `width` (default 80) -- only takes effect when beautification is on, this
|
||||
specifies an (orientative) line width that the beautifier will try to
|
||||
obey. It refers to the width of the line text (excluding indentation).
|
||||
It doesn't work very well currently, but it does make the code generated
|
||||
by UglifyJS more readable.
|
||||
- `max-line-len` (default 32000) -- maximum line length (for uglified code)
|
||||
- `ie-proof` (default `true`) -- generate “IE-proof” code (for now this
|
||||
means add brackets around the do/while in code like this: `if (foo) do
|
||||
something(); while (bar); else ...`.
|
||||
- `bracketize` (default `false`) -- always insert brackets in `if`, `for`,
|
||||
`do`, `while` or `with` statements, even if their body is a single
|
||||
statement.
|
||||
- `semicolons` (default `true`) -- separate statements with semicolons. If
|
||||
you pass `false` then whenever possible we will use a newline instead of a
|
||||
semicolon, leading to more readable output of uglified code (size before
|
||||
gzip could be smaller; size after gzip insignificantly larger).
|
||||
|
||||
### Keeping copyright notices or other comments
|
||||
|
||||
You can pass `--comments` to retain certain comments in the output. By
|
||||
default it will keep JSDoc-style comments that contain "@preserve",
|
||||
"@license" or "@cc_on" (conditional compilation for IE). You can pass
|
||||
`--comments all` to keep all the comments, or a valid JavaScript regexp to
|
||||
keep only comments that match this regexp. For example `--comments
|
||||
'/foo|bar/'` will keep only comments that contain "foo" or "bar".
|
||||
|
||||
Note, however, that there might be situations where comments are lost. For
|
||||
example:
|
||||
|
||||
function f() {
|
||||
/** @preserve Foo Bar */
|
||||
function g() {
|
||||
// this function is never called
|
||||
}
|
||||
return something();
|
||||
}
|
||||
|
||||
Even though it has "@preserve", the comment will be lost because the inner
|
||||
function `g` (which is the AST node to which the comment is attached to) is
|
||||
discarded by the compressor as not referenced.
|
||||
|
||||
The safest comments where to place copyright information (or other info that
|
||||
needs to be kept in the output) are comments attached to toplevel nodes.
|
||||
|
||||
## Support for the SpiderMonkey AST
|
||||
|
||||
UglifyJS2 has its own abstract syntax tree format; for
|
||||
[practical reasons](http://lisperator.net/blog/uglifyjs-why-not-switching-to-spidermonkey-ast/)
|
||||
we can't easily change to using the SpiderMonkey AST internally. However,
|
||||
UglifyJS now has a converter which can import a SpiderMonkey AST.
|
||||
|
||||
For example [Acorn][acorn] is a super-fast parser that produces a
|
||||
SpiderMonkey AST. It has a small CLI utility that parses one file and dumps
|
||||
the AST in JSON on the standard output. To use UglifyJS to mangle and
|
||||
compress that:
|
||||
|
||||
acorn file.js | uglifyjs --spidermonkey -m -c
|
||||
|
||||
The `--spidermonkey` option tells UglifyJS that all input files are not
|
||||
JavaScript, but JS code described in SpiderMonkey AST in JSON. Therefore we
|
||||
don't use our own parser in this case, but just transform that AST into our
|
||||
internal AST.
|
||||
|
||||
### Use Acorn for parsing
|
||||
|
||||
More for fun, I added the `--acorn` option which will use Acorn to do all
|
||||
the parsing. If you pass this option, UglifyJS will `require("acorn")`.
|
||||
|
||||
Acorn is really fast (e.g. 250ms instead of 380ms on some 650K code), but
|
||||
converting the SpiderMonkey tree that Acorn produces takes another 150ms so
|
||||
in total it's a bit more than just using UglifyJS's own parser.
|
||||
|
||||
API Reference
|
||||
-------------
|
||||
|
||||
Assuming installation via NPM, you can load UglifyJS in your application
|
||||
like this:
|
||||
|
||||
var UglifyJS = require("uglify-js");
|
||||
|
||||
It exports a lot of names, but I'll discuss here the basics that are needed
|
||||
for parsing, mangling and compressing a piece of code. The sequence is (1)
|
||||
parse, (2) compress, (3) mangle, (4) generate output code.
|
||||
|
||||
### The simple way
|
||||
|
||||
There's a single toplevel function which combines all the steps. If you
|
||||
don't need additional customization, you might want to go with `minify`.
|
||||
Example:
|
||||
|
||||
var result = UglifyJS.minify("/path/to/file.js");
|
||||
console.log(result.code); // minified output
|
||||
// if you need to pass code instead of file name
|
||||
var result = UglifyJS.minify("var b = function () {};", {fromString: true});
|
||||
|
||||
You can also compress multiple files:
|
||||
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ]);
|
||||
console.log(result.code);
|
||||
|
||||
To generate a source map:
|
||||
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||
outSourceMap: "out.js.map"
|
||||
});
|
||||
console.log(result.code); // minified output
|
||||
console.log(result.map);
|
||||
|
||||
Note that the source map is not saved in a file, it's just returned in
|
||||
`result.map`. The value passed for `outSourceMap` is only used to set the
|
||||
`file` attribute in the source map (see [the spec][sm-spec]).
|
||||
|
||||
You can also specify sourceRoot property to be included in source map:
|
||||
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||
outSourceMap: "out.js.map",
|
||||
sourceRoot: "http://example.com/src"
|
||||
});
|
||||
|
||||
|
||||
If you're compressing compiled JavaScript and have a source map for it, you
|
||||
can use the `inSourceMap` argument:
|
||||
|
||||
var result = UglifyJS.minify("compiled.js", {
|
||||
inSourceMap: "compiled.js.map",
|
||||
outSourceMap: "minified.js.map"
|
||||
});
|
||||
// same as before, it returns `code` and `map`
|
||||
|
||||
The `inSourceMap` is only used if you also request `outSourceMap` (it makes
|
||||
no sense otherwise).
|
||||
|
||||
Other options:
|
||||
|
||||
- `warnings` (default `false`) — pass `true` to display compressor warnings.
|
||||
|
||||
- `fromString` (default `false`) — if you pass `true` then you can pass
|
||||
JavaScript source code, rather than file names.
|
||||
|
||||
- `mangle` — pass `false` to skip mangling names.
|
||||
|
||||
- `output` (default `null`) — pass an object if you wish to specify
|
||||
additional [output options][codegen]. The defaults are optimized
|
||||
for best compression.
|
||||
|
||||
- `compress` (default `{}`) — pass `false` to skip compressing entirely.
|
||||
Pass an object to specify custom [compressor options][compressor].
|
||||
|
||||
We could add more options to `UglifyJS.minify` — if you need additional
|
||||
functionality please suggest!
|
||||
|
||||
### The hard way
|
||||
|
||||
Following there's more detailed API info, in case the `minify` function is
|
||||
too simple for your needs.
|
||||
|
||||
#### The parser
|
||||
|
||||
var toplevel_ast = UglifyJS.parse(code, options);
|
||||
|
||||
`options` is optional and if present it must be an object. The following
|
||||
properties are available:
|
||||
|
||||
- `strict` — disable automatic semicolon insertion and support for trailing
|
||||
comma in arrays and objects
|
||||
- `filename` — the name of the file where this code is coming from
|
||||
- `toplevel` — a `toplevel` node (as returned by a previous invocation of
|
||||
`parse`)
|
||||
|
||||
The last two options are useful when you'd like to minify multiple files and
|
||||
get a single file as the output and a proper source map. Our CLI tool does
|
||||
something like this:
|
||||
|
||||
var toplevel = null;
|
||||
files.forEach(function(file){
|
||||
var code = fs.readFileSync(file);
|
||||
toplevel = UglifyJS.parse(code, {
|
||||
filename: file,
|
||||
toplevel: toplevel
|
||||
});
|
||||
});
|
||||
|
||||
After this, we have in `toplevel` a big AST containing all our files, with
|
||||
each token having proper information about where it came from.
|
||||
|
||||
#### Scope information
|
||||
|
||||
UglifyJS contains a scope analyzer that you need to call manually before
|
||||
compressing or mangling. Basically it augments various nodes in the AST
|
||||
with information about where is a name defined, how many times is a name
|
||||
referenced, if it is a global or not, if a function is using `eval` or the
|
||||
`with` statement etc. I will discuss this some place else, for now what's
|
||||
important to know is that you need to call the following before doing
|
||||
anything with the tree:
|
||||
|
||||
toplevel.figure_out_scope()
|
||||
|
||||
#### Compression
|
||||
|
||||
Like this:
|
||||
|
||||
var compressor = UglifyJS.Compressor(options);
|
||||
var compressed_ast = toplevel.transform(compressor);
|
||||
|
||||
The `options` can be missing. Available options are discussed above in
|
||||
“Compressor options”. Defaults should lead to best compression in most
|
||||
scripts.
|
||||
|
||||
The compressor is destructive, so don't rely that `toplevel` remains the
|
||||
original tree.
|
||||
|
||||
#### Mangling
|
||||
|
||||
After compression it is a good idea to call again `figure_out_scope` (since
|
||||
the compressor might drop unused variables / unreachable code and this might
|
||||
change the number of identifiers or their position). Optionally, you can
|
||||
call a trick that helps after Gzip (counting character frequency in
|
||||
non-mangleable words). Example:
|
||||
|
||||
compressed_ast.figure_out_scope();
|
||||
compressed_ast.compute_char_frequency();
|
||||
compressed_ast.mangle_names();
|
||||
|
||||
#### Generating output
|
||||
|
||||
AST nodes have a `print` method that takes an output stream. Essentially,
|
||||
to generate code you do this:
|
||||
|
||||
var stream = UglifyJS.OutputStream(options);
|
||||
compressed_ast.print(stream);
|
||||
var code = stream.toString(); // this is your minified code
|
||||
|
||||
or, for a shortcut you can do:
|
||||
|
||||
var code = compressed_ast.print_to_string(options);
|
||||
|
||||
As usual, `options` is optional. The output stream accepts a lot of otions,
|
||||
most of them documented above in section “Beautifier options”. The two
|
||||
which we care about here are `source_map` and `comments`.
|
||||
|
||||
#### Keeping comments in the output
|
||||
|
||||
In order to keep certain comments in the output you need to pass the
|
||||
`comments` option. Pass a RegExp or a function. If you pass a RegExp, only
|
||||
those comments whose body matches the regexp will be kept. Note that body
|
||||
means without the initial `//` or `/*`. If you pass a function, it will be
|
||||
called for every comment in the tree and will receive two arguments: the
|
||||
node that the comment is attached to, and the comment token itself.
|
||||
|
||||
The comment token has these properties:
|
||||
|
||||
- `type`: "comment1" for single-line comments or "comment2" for multi-line
|
||||
comments
|
||||
- `value`: the comment body
|
||||
- `pos` and `endpos`: the start/end positions (zero-based indexes) in the
|
||||
original code where this comment appears
|
||||
- `line` and `col`: the line and column where this comment appears in the
|
||||
original code
|
||||
- `file` — the file name of the original file
|
||||
- `nlb` — true if there was a newline before this comment in the original
|
||||
code, or if this comment contains a newline.
|
||||
|
||||
Your function should return `true` to keep the comment, or a falsy value
|
||||
otherwise.
|
||||
|
||||
#### Generating a source mapping
|
||||
|
||||
You need to pass the `source_map` argument when calling `print`. It needs
|
||||
to be a `SourceMap` object (which is a thin wrapper on top of the
|
||||
[source-map][source-map] library).
|
||||
|
||||
Example:
|
||||
|
||||
var source_map = UglifyJS.SourceMap(source_map_options);
|
||||
var stream = UglifyJS.OutputStream({
|
||||
...
|
||||
source_map: source_map
|
||||
});
|
||||
compressed_ast.print(stream);
|
||||
|
||||
var code = stream.toString();
|
||||
var map = source_map.toString(); // json output for your source map
|
||||
|
||||
The `source_map_options` (optional) can contain the following properties:
|
||||
|
||||
- `file`: the name of the JavaScript output file that this mapping refers to
|
||||
- `root`: the `sourceRoot` property (see the [spec][sm-spec])
|
||||
- `orig`: the "original source map", handy when you compress generated JS
|
||||
and want to map the minified output back to the original code where it
|
||||
came from. It can be simply a string in JSON, or a JSON object containing
|
||||
the original source map.
|
||||
|
||||
[acorn]: https://github.com/marijnh/acorn
|
||||
[source-map]: https://github.com/mozilla/source-map
|
||||
[sm-spec]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
[codegen]: http://lisperator.net/uglifyjs/codegen
|
||||
[compressor]: http://lisperator.net/uglifyjs/compress
|
||||
370
node_modules/transformers/node_modules/uglify-js/bin/uglifyjs
generated
vendored
Executable file
370
node_modules/transformers/node_modules/uglify-js/bin/uglifyjs
generated
vendored
Executable file
@@ -0,0 +1,370 @@
|
||||
#! /usr/bin/env node
|
||||
// -*- js -*-
|
||||
|
||||
"use strict";
|
||||
|
||||
var UglifyJS = require("../tools/node");
|
||||
var sys = require("util");
|
||||
var optimist = require("optimist");
|
||||
var fs = require("fs");
|
||||
var acorn;
|
||||
var ARGS = optimist
|
||||
.usage("$0 input1.js [input2.js ...] [options]\n\
|
||||
Use a single dash to read input from the standard input.\
|
||||
\n\n\
|
||||
NOTE: by default there is no mangling/compression.\n\
|
||||
Without [options] it will simply parse input files and dump the AST\n\
|
||||
with whitespace and comments discarded. To achieve compression and\n\
|
||||
mangling you need to use `-c` and `-m`.\
|
||||
")
|
||||
.describe("source-map", "Specify an output file where to generate source map.")
|
||||
.describe("source-map-root", "The path to the original source to be included in the source map.")
|
||||
.describe("source-map-url", "The path to the source map to be added in //@ sourceMappingURL. Defaults to the value passed with --source-map.")
|
||||
.describe("in-source-map", "Input source map, useful if you're compressing JS that was generated from some other original code.")
|
||||
.describe("p", "Skip prefix for original filenames that appear in source maps. \
|
||||
For example -p 3 will drop 3 directories from file names and ensure they are relative paths.")
|
||||
.describe("o", "Output file (default STDOUT).")
|
||||
.describe("b", "Beautify output/specify output options.")
|
||||
.describe("m", "Mangle names/pass mangler options.")
|
||||
.describe("r", "Reserved names to exclude from mangling.")
|
||||
.describe("c", "Enable compressor/pass compressor options. \
|
||||
Pass options like -c hoist_vars=false,if_return=false. \
|
||||
Use -c with no argument to use the default compression options.")
|
||||
.describe("d", "Global definitions")
|
||||
|
||||
.describe("comments", "Preserve copyright comments in the output. \
|
||||
By default this works like Google Closure, keeping JSDoc-style comments that contain \"@license\" or \"@preserve\". \
|
||||
You can optionally pass one of the following arguments to this flag:\n\
|
||||
- \"all\" to keep all comments\n\
|
||||
- a valid JS regexp (needs to start with a slash) to keep only comments that match.\n\
|
||||
\
|
||||
Note that currently not *all* comments can be kept when compression is on, \
|
||||
because of dead code removal or cascading statements into sequences.")
|
||||
|
||||
.describe("stats", "Display operations run time on STDERR.")
|
||||
.describe("acorn", "Use Acorn for parsing.")
|
||||
.describe("spidermonkey", "Assume input fles are SpiderMonkey AST format (as JSON).")
|
||||
.describe("self", "Build itself (UglifyJS2) as a library (implies --wrap=UglifyJS --export-all)")
|
||||
.describe("wrap", "Embed everything in a big function, making the “exports” and “global” variables available. \
|
||||
You need to pass an argument to this option to specify the name that your module will take when included in, say, a browser.")
|
||||
.describe("export-all", "Only used when --wrap, this tells UglifyJS to add code to automatically export all globals.")
|
||||
.describe("lint", "Display some scope warnings")
|
||||
.describe("v", "Verbose")
|
||||
.describe("V", "Print version number and exit.")
|
||||
|
||||
.alias("p", "prefix")
|
||||
.alias("o", "output")
|
||||
.alias("v", "verbose")
|
||||
.alias("b", "beautify")
|
||||
.alias("m", "mangle")
|
||||
.alias("c", "compress")
|
||||
.alias("d", "define")
|
||||
.alias("r", "reserved")
|
||||
.alias("V", "version")
|
||||
|
||||
.string("source-map")
|
||||
.string("source-map-root")
|
||||
.string("source-map-url")
|
||||
.string("b")
|
||||
.string("m")
|
||||
.string("c")
|
||||
.string("d")
|
||||
.string("comments")
|
||||
.string("wrap")
|
||||
.boolean("export-all")
|
||||
.boolean("self")
|
||||
.boolean("v")
|
||||
.boolean("stats")
|
||||
.boolean("acorn")
|
||||
.boolean("spidermonkey")
|
||||
.boolean("lint")
|
||||
.boolean("V")
|
||||
|
||||
.wrap(80)
|
||||
|
||||
.argv
|
||||
;
|
||||
|
||||
normalize(ARGS);
|
||||
|
||||
if (ARGS.version || ARGS.V) {
|
||||
var json = require("../package.json");
|
||||
sys.puts(json.name + ' ' + json.version);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (ARGS.ast_help) {
|
||||
var desc = UglifyJS.describe_ast();
|
||||
sys.puts(typeof desc == "string" ? desc : JSON.stringify(desc, null, 2));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (ARGS.h || ARGS.help) {
|
||||
sys.puts(optimist.help());
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (ARGS.acorn) {
|
||||
acorn = require("acorn");
|
||||
}
|
||||
|
||||
var COMPRESS = getOptions("c", true);
|
||||
var MANGLE = getOptions("m", true);
|
||||
var BEAUTIFY = getOptions("b", true);
|
||||
|
||||
if (COMPRESS && ARGS.d) {
|
||||
COMPRESS.global_defs = getOptions("d");
|
||||
}
|
||||
|
||||
if (MANGLE && ARGS.r) {
|
||||
MANGLE.except = ARGS.r.replace(/^\s+|\s+$/g).split(/\s*,+\s*/);
|
||||
}
|
||||
|
||||
var OUTPUT_OPTIONS = {
|
||||
beautify: BEAUTIFY ? true : false
|
||||
};
|
||||
|
||||
if (BEAUTIFY)
|
||||
UglifyJS.merge(OUTPUT_OPTIONS, BEAUTIFY);
|
||||
|
||||
if (ARGS.comments) {
|
||||
if (/^\//.test(ARGS.comments)) {
|
||||
OUTPUT_OPTIONS.comments = new Function("return(" + ARGS.comments + ")")();
|
||||
} else if (ARGS.comments == "all") {
|
||||
OUTPUT_OPTIONS.comments = true;
|
||||
} else {
|
||||
OUTPUT_OPTIONS.comments = function(node, comment) {
|
||||
var text = comment.value;
|
||||
var type = comment.type;
|
||||
if (type == "comment2") {
|
||||
// multiline comment
|
||||
return /@preserve|@license|@cc_on/i.test(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var files = ARGS._.slice();
|
||||
|
||||
if (ARGS.self) {
|
||||
if (files.length > 0) {
|
||||
sys.error("WARN: Ignoring input files since --self was passed");
|
||||
}
|
||||
files = UglifyJS.FILES;
|
||||
if (!ARGS.wrap) ARGS.wrap = "UglifyJS";
|
||||
ARGS.export_all = true;
|
||||
}
|
||||
|
||||
var ORIG_MAP = ARGS.in_source_map;
|
||||
|
||||
if (ORIG_MAP) {
|
||||
ORIG_MAP = JSON.parse(fs.readFileSync(ORIG_MAP));
|
||||
if (files.length == 0) {
|
||||
sys.error("INFO: Using file from the input source map: " + ORIG_MAP.file);
|
||||
files = [ ORIG_MAP.file ];
|
||||
}
|
||||
if (ARGS.source_map_root == null) {
|
||||
ARGS.source_map_root = ORIG_MAP.sourceRoot;
|
||||
}
|
||||
}
|
||||
|
||||
if (files.length == 0) {
|
||||
files = [ "-" ];
|
||||
}
|
||||
|
||||
if (files.indexOf("-") >= 0 && ARGS.source_map) {
|
||||
sys.error("ERROR: Source map doesn't work with input from STDIN");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (files.filter(function(el){ return el == "-" }).length > 1) {
|
||||
sys.error("ERROR: Can read a single file from STDIN (two or more dashes specified)");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var STATS = {};
|
||||
var OUTPUT_FILE = ARGS.o;
|
||||
var TOPLEVEL = null;
|
||||
|
||||
var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({
|
||||
file: OUTPUT_FILE,
|
||||
root: ARGS.source_map_root,
|
||||
orig: ORIG_MAP,
|
||||
}) : null;
|
||||
|
||||
OUTPUT_OPTIONS.source_map = SOURCE_MAP;
|
||||
|
||||
try {
|
||||
var output = UglifyJS.OutputStream(OUTPUT_OPTIONS);
|
||||
var compressor = COMPRESS && UglifyJS.Compressor(COMPRESS);
|
||||
} catch(ex) {
|
||||
if (ex instanceof UglifyJS.DefaultsError) {
|
||||
sys.error(ex.msg);
|
||||
sys.error("Supported options:");
|
||||
sys.error(sys.inspect(ex.defs));
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
files.forEach(function(file) {
|
||||
var code = read_whole_file(file);
|
||||
if (ARGS.p != null) {
|
||||
file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/");
|
||||
}
|
||||
time_it("parse", function(){
|
||||
if (ARGS.spidermonkey) {
|
||||
var program = JSON.parse(code);
|
||||
if (!TOPLEVEL) TOPLEVEL = program;
|
||||
else TOPLEVEL.body = TOPLEVEL.body.concat(program.body);
|
||||
}
|
||||
else if (ARGS.acorn) {
|
||||
TOPLEVEL = acorn.parse(code, {
|
||||
locations : true,
|
||||
trackComments : true,
|
||||
sourceFile : file,
|
||||
program : TOPLEVEL
|
||||
});
|
||||
}
|
||||
else {
|
||||
TOPLEVEL = UglifyJS.parse(code, {
|
||||
filename: file,
|
||||
toplevel: TOPLEVEL
|
||||
});
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
if (ARGS.acorn || ARGS.spidermonkey) time_it("convert_ast", function(){
|
||||
TOPLEVEL = UglifyJS.AST_Node.from_mozilla_ast(TOPLEVEL);
|
||||
});
|
||||
|
||||
if (ARGS.wrap) {
|
||||
TOPLEVEL = TOPLEVEL.wrap_commonjs(ARGS.wrap, ARGS.export_all);
|
||||
}
|
||||
|
||||
var SCOPE_IS_NEEDED = COMPRESS || MANGLE || ARGS.lint;
|
||||
|
||||
if (SCOPE_IS_NEEDED) {
|
||||
time_it("scope", function(){
|
||||
TOPLEVEL.figure_out_scope();
|
||||
if (ARGS.lint) {
|
||||
TOPLEVEL.scope_warnings();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (COMPRESS) {
|
||||
time_it("squeeze", function(){
|
||||
TOPLEVEL = TOPLEVEL.transform(compressor);
|
||||
});
|
||||
}
|
||||
|
||||
if (SCOPE_IS_NEEDED) {
|
||||
time_it("scope", function(){
|
||||
TOPLEVEL.figure_out_scope();
|
||||
if (MANGLE) {
|
||||
TOPLEVEL.compute_char_frequency(MANGLE);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (MANGLE) time_it("mangle", function(){
|
||||
TOPLEVEL.mangle_names(MANGLE);
|
||||
});
|
||||
time_it("generate", function(){
|
||||
TOPLEVEL.print(output);
|
||||
});
|
||||
|
||||
output = output.get();
|
||||
|
||||
if (SOURCE_MAP) {
|
||||
fs.writeFileSync(ARGS.source_map, SOURCE_MAP, "utf8");
|
||||
output += "\n/*\n//@ sourceMappingURL=" + (ARGS.source_map_url || ARGS.source_map) + "\n*/";
|
||||
}
|
||||
|
||||
if (OUTPUT_FILE) {
|
||||
fs.writeFileSync(OUTPUT_FILE, output, "utf8");
|
||||
} else {
|
||||
sys.print(output);
|
||||
sys.error("\n");
|
||||
}
|
||||
|
||||
if (ARGS.stats) {
|
||||
sys.error(UglifyJS.string_template("Timing information (compressed {count} files):", {
|
||||
count: files.length
|
||||
}));
|
||||
for (var i in STATS) if (STATS.hasOwnProperty(i)) {
|
||||
sys.error(UglifyJS.string_template("- {name}: {time}s", {
|
||||
name: i,
|
||||
time: (STATS[i] / 1000).toFixed(3)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
/* -----[ functions ]----- */
|
||||
|
||||
function normalize(o) {
|
||||
for (var i in o) if (o.hasOwnProperty(i) && /-/.test(i)) {
|
||||
o[i.replace(/-/g, "_")] = o[i];
|
||||
delete o[i];
|
||||
}
|
||||
}
|
||||
|
||||
function getOptions(x, constants) {
|
||||
x = ARGS[x];
|
||||
if (!x) return null;
|
||||
var ret = {};
|
||||
if (x !== true) {
|
||||
var ast;
|
||||
try {
|
||||
ast = UglifyJS.parse(x);
|
||||
} catch(ex) {
|
||||
if (ex instanceof UglifyJS.JS_Parse_Error) {
|
||||
sys.error("Error parsing arguments in: " + x);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
ast.walk(new UglifyJS.TreeWalker(function(node){
|
||||
if (node instanceof UglifyJS.AST_Toplevel) return; // descend
|
||||
if (node instanceof UglifyJS.AST_SimpleStatement) return; // descend
|
||||
if (node instanceof UglifyJS.AST_Seq) return; // descend
|
||||
if (node instanceof UglifyJS.AST_Assign) {
|
||||
var name = node.left.print_to_string({ beautify: false }).replace(/-/g, "_");
|
||||
var value = node.right;
|
||||
if (constants)
|
||||
value = new Function("return (" + value.print_to_string() + ")")();
|
||||
ret[name] = value;
|
||||
return true; // no descend
|
||||
}
|
||||
sys.error(node.TYPE)
|
||||
sys.error("Error parsing arguments in: " + x);
|
||||
process.exit(1);
|
||||
}));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
function read_whole_file(filename) {
|
||||
if (filename == "-") {
|
||||
// XXX: this sucks. How does one read the whole STDIN
|
||||
// synchronously?
|
||||
filename = "/dev/stdin";
|
||||
}
|
||||
try {
|
||||
return fs.readFileSync(filename, "utf8");
|
||||
} catch(ex) {
|
||||
sys.error("ERROR: can't read file: " + filename);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function time_it(name, cont) {
|
||||
var t1 = new Date().getTime();
|
||||
var ret = cont();
|
||||
if (ARGS.stats) {
|
||||
var spent = new Date().getTime() - t1;
|
||||
if (STATS[name]) STATS[name] += spent;
|
||||
else STATS[name] = spent;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
964
node_modules/transformers/node_modules/uglify-js/lib/ast.js
generated
vendored
Normal file
964
node_modules/transformers/node_modules/uglify-js/lib/ast.js
generated
vendored
Normal file
@@ -0,0 +1,964 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
function DEFNODE(type, props, methods, base) {
|
||||
if (arguments.length < 4) base = AST_Node;
|
||||
if (!props) props = [];
|
||||
else props = props.split(/\s+/);
|
||||
var self_props = props;
|
||||
if (base && base.PROPS)
|
||||
props = props.concat(base.PROPS);
|
||||
var code = "return function AST_" + type + "(props){ if (props) { ";
|
||||
for (var i = props.length; --i >= 0;) {
|
||||
code += "this." + props[i] + " = props." + props[i] + ";";
|
||||
}
|
||||
var proto = base && new base;
|
||||
if (proto && proto.initialize || (methods && methods.initialize))
|
||||
code += "this.initialize();";
|
||||
code += "}}";
|
||||
var ctor = new Function(code)();
|
||||
if (proto) {
|
||||
ctor.prototype = proto;
|
||||
ctor.BASE = base;
|
||||
}
|
||||
if (base) base.SUBCLASSES.push(ctor);
|
||||
ctor.prototype.CTOR = ctor;
|
||||
ctor.PROPS = props || null;
|
||||
ctor.SELF_PROPS = self_props;
|
||||
ctor.SUBCLASSES = [];
|
||||
if (type) {
|
||||
ctor.prototype.TYPE = ctor.TYPE = type;
|
||||
}
|
||||
if (methods) for (i in methods) if (methods.hasOwnProperty(i)) {
|
||||
if (/^\$/.test(i)) {
|
||||
ctor[i.substr(1)] = methods[i];
|
||||
} else {
|
||||
ctor.prototype[i] = methods[i];
|
||||
}
|
||||
}
|
||||
ctor.DEFMETHOD = function(name, method) {
|
||||
this.prototype[name] = method;
|
||||
};
|
||||
return ctor;
|
||||
};
|
||||
|
||||
var AST_Token = DEFNODE("Token", "type value line col pos endpos nlb comments_before file", {
|
||||
}, null);
|
||||
|
||||
var AST_Node = DEFNODE("Node", "start end", {
|
||||
clone: function() {
|
||||
return new this.CTOR(this);
|
||||
},
|
||||
$documentation: "Base class of all AST nodes",
|
||||
$propdoc: {
|
||||
start: "[AST_Token] The first token of this node",
|
||||
end: "[AST_Token] The last token of this node"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this);
|
||||
},
|
||||
walk: function(visitor) {
|
||||
return this._walk(visitor); // not sure the indirection will be any help
|
||||
}
|
||||
}, null);
|
||||
|
||||
AST_Node.warn_function = null;
|
||||
AST_Node.warn = function(txt, props) {
|
||||
if (AST_Node.warn_function)
|
||||
AST_Node.warn_function(string_template(txt, props));
|
||||
};
|
||||
|
||||
/* -----[ statements ]----- */
|
||||
|
||||
var AST_Statement = DEFNODE("Statement", null, {
|
||||
$documentation: "Base class of all statements",
|
||||
});
|
||||
|
||||
var AST_Debugger = DEFNODE("Debugger", null, {
|
||||
$documentation: "Represents a debugger statement",
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_Directive = DEFNODE("Directive", "value scope", {
|
||||
$documentation: "Represents a directive, like \"use strict\";",
|
||||
$propdoc: {
|
||||
value: "[string] The value of this directive as a plain string (it's not an AST_String!)",
|
||||
scope: "[AST_Scope/S] The scope that this directive affects"
|
||||
},
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", {
|
||||
$documentation: "A statement consisting of an expression, i.e. a = 1 + 2",
|
||||
$propdoc: {
|
||||
body: "[AST_Node] an expression node (should not be instanceof AST_Statement)"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
function walk_body(node, visitor) {
|
||||
if (node.body instanceof AST_Statement) {
|
||||
node.body._walk(visitor);
|
||||
}
|
||||
else node.body.forEach(function(stat){
|
||||
stat._walk(visitor);
|
||||
});
|
||||
};
|
||||
|
||||
var AST_Block = DEFNODE("Block", "body", {
|
||||
$documentation: "A body of statements (usually bracketed)",
|
||||
$propdoc: {
|
||||
body: "[AST_Statement*] an array of statements"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_BlockStatement = DEFNODE("BlockStatement", null, {
|
||||
$documentation: "A block statement",
|
||||
}, AST_Block);
|
||||
|
||||
var AST_EmptyStatement = DEFNODE("EmptyStatement", null, {
|
||||
$documentation: "The empty statement (empty block or simply a semicolon)",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this);
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", {
|
||||
$documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`",
|
||||
$propdoc: {
|
||||
body: "[AST_Statement] the body; this should always be present, even if it's an AST_EmptyStatement"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
|
||||
$documentation: "Statement with a label",
|
||||
$propdoc: {
|
||||
label: "[AST_Label] a label definition"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.label._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
||||
$documentation: "Base class for do/while statements",
|
||||
$propdoc: {
|
||||
condition: "[AST_Node] the loop condition. Should not be instanceof AST_Statement"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.condition._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_Do = DEFNODE("Do", null, {
|
||||
$documentation: "A `do` statement",
|
||||
}, AST_DWLoop);
|
||||
|
||||
var AST_While = DEFNODE("While", null, {
|
||||
$documentation: "A `while` statement",
|
||||
}, AST_DWLoop);
|
||||
|
||||
var AST_For = DEFNODE("For", "init condition step", {
|
||||
$documentation: "A `for` statement",
|
||||
$propdoc: {
|
||||
init: "[AST_Node?] the `for` initialization code, or null if empty",
|
||||
condition: "[AST_Node?] the `for` termination clause, or null if empty",
|
||||
step: "[AST_Node?] the `for` update clause, or null if empty"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
if (this.init) this.init._walk(visitor);
|
||||
if (this.condition) this.condition._walk(visitor);
|
||||
if (this.step) this.step._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
||||
$documentation: "A `for ... in` statement",
|
||||
$propdoc: {
|
||||
init: "[AST_Node] the `for/in` initialization code",
|
||||
name: "[AST_SymbolRef?] the loop variable, only if `init` is AST_Var",
|
||||
object: "[AST_Node] the object that we're looping through"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.init._walk(visitor);
|
||||
this.object._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_With = DEFNODE("With", "expression", {
|
||||
$documentation: "A `with` statement",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the `with` expression"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
/* -----[ scope and functions ]----- */
|
||||
|
||||
var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_eval parent_scope enclosed cname", {
|
||||
$documentation: "Base class for all statements introducing a lexical scope",
|
||||
$propdoc: {
|
||||
directives: "[string*/S] an array of directives declared in this scope",
|
||||
variables: "[Object/S] a map of name -> SymbolDef for all variables/functions defined in this scope",
|
||||
functions: "[Object/S] like `variables`, but only lists function declarations",
|
||||
uses_with: "[boolean/S] tells whether this scope uses the `with` statement",
|
||||
uses_eval: "[boolean/S] tells whether this scope contains a direct call to the global `eval`",
|
||||
parent_scope: "[AST_Scope?/S] link to the parent scope",
|
||||
enclosed: "[SymbolDef*/S] a list of all symbol definitions that are accessed from this scope or any subscopes",
|
||||
cname: "[integer/S] current index for mangling variables (used internally by the mangler)",
|
||||
},
|
||||
}, AST_Block);
|
||||
|
||||
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
|
||||
$documentation: "The toplevel scope",
|
||||
$propdoc: {
|
||||
globals: "[Object/S] a map of name -> SymbolDef for all undeclared names",
|
||||
},
|
||||
wrap_commonjs: function(name, export_all) {
|
||||
var self = this;
|
||||
var to_export = [];
|
||||
if (export_all) {
|
||||
self.figure_out_scope();
|
||||
self.walk(new TreeWalker(function(node){
|
||||
if (node instanceof AST_SymbolDeclaration && node.definition().global) {
|
||||
if (!find_if(function(n){ return n.name == node.name }, to_export))
|
||||
to_export.push(node);
|
||||
}
|
||||
}));
|
||||
}
|
||||
var wrapped_tl = "(function(exports, global){ global['" + name + "'] = exports; '$ORIG'; '$EXPORTS'; }({}, (function(){return this}())))";
|
||||
wrapped_tl = parse(wrapped_tl);
|
||||
wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){
|
||||
if (node instanceof AST_SimpleStatement) {
|
||||
node = node.body;
|
||||
if (node instanceof AST_String) switch (node.getValue()) {
|
||||
case "$ORIG":
|
||||
return MAP.splice(self.body);
|
||||
case "$EXPORTS":
|
||||
var body = [];
|
||||
to_export.forEach(function(sym){
|
||||
body.push(new AST_SimpleStatement({
|
||||
body: new AST_Assign({
|
||||
left: new AST_Sub({
|
||||
expression: new AST_SymbolRef({ name: "exports" }),
|
||||
property: new AST_String({ value: sym.name }),
|
||||
}),
|
||||
operator: "=",
|
||||
right: new AST_SymbolRef(sym),
|
||||
}),
|
||||
}));
|
||||
});
|
||||
return MAP.splice(body);
|
||||
}
|
||||
}
|
||||
}));
|
||||
return wrapped_tl;
|
||||
}
|
||||
}, AST_Scope);
|
||||
|
||||
var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
|
||||
$documentation: "Base class for functions",
|
||||
$propdoc: {
|
||||
name: "[AST_SymbolDeclaration?] the name of this function",
|
||||
argnames: "[AST_SymbolFunarg*] array of function arguments",
|
||||
uses_arguments: "[boolean/S] tells whether this function accesses the arguments array"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
if (this.name) this.name._walk(visitor);
|
||||
this.argnames.forEach(function(arg){
|
||||
arg._walk(visitor);
|
||||
});
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Scope);
|
||||
|
||||
var AST_Accessor = DEFNODE("Accessor", null, {
|
||||
$documentation: "A setter/getter function"
|
||||
}, AST_Lambda);
|
||||
|
||||
var AST_Function = DEFNODE("Function", null, {
|
||||
$documentation: "A function expression"
|
||||
}, AST_Lambda);
|
||||
|
||||
var AST_Defun = DEFNODE("Defun", null, {
|
||||
$documentation: "A function definition"
|
||||
}, AST_Lambda);
|
||||
|
||||
/* -----[ JUMPS ]----- */
|
||||
|
||||
var AST_Jump = DEFNODE("Jump", null, {
|
||||
$documentation: "Base class for “jumps” (for now that's `return`, `throw`, `break` and `continue`)"
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_Exit = DEFNODE("Exit", "value", {
|
||||
$documentation: "Base class for “exits” (`return` and `throw`)",
|
||||
$propdoc: {
|
||||
value: "[AST_Node?] the value returned or thrown by this statement; could be null for AST_Return"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, this.value && function(){
|
||||
this.value._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Jump);
|
||||
|
||||
var AST_Return = DEFNODE("Return", null, {
|
||||
$documentation: "A `return` statement"
|
||||
}, AST_Exit);
|
||||
|
||||
var AST_Throw = DEFNODE("Throw", null, {
|
||||
$documentation: "A `throw` statement"
|
||||
}, AST_Exit);
|
||||
|
||||
var AST_LoopControl = DEFNODE("LoopControl", "label", {
|
||||
$documentation: "Base class for loop control statements (`break` and `continue`)",
|
||||
$propdoc: {
|
||||
label: "[AST_LabelRef?] the label, or null if none",
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, this.label && function(){
|
||||
this.label._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Jump);
|
||||
|
||||
var AST_Break = DEFNODE("Break", null, {
|
||||
$documentation: "A `break` statement"
|
||||
}, AST_LoopControl);
|
||||
|
||||
var AST_Continue = DEFNODE("Continue", null, {
|
||||
$documentation: "A `continue` statement"
|
||||
}, AST_LoopControl);
|
||||
|
||||
/* -----[ IF ]----- */
|
||||
|
||||
var AST_If = DEFNODE("If", "condition alternative", {
|
||||
$documentation: "A `if` statement",
|
||||
$propdoc: {
|
||||
condition: "[AST_Node] the `if` condition",
|
||||
alternative: "[AST_Statement?] the `else` part, or null if not present"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.condition._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
if (this.alternative) this.alternative._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
/* -----[ SWITCH ]----- */
|
||||
|
||||
var AST_Switch = DEFNODE("Switch", "expression", {
|
||||
$documentation: "A `switch` statement",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the `switch` “discriminant”"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Block);
|
||||
|
||||
var AST_SwitchBranch = DEFNODE("SwitchBranch", null, {
|
||||
$documentation: "Base class for `switch` branches",
|
||||
}, AST_Block);
|
||||
|
||||
var AST_Default = DEFNODE("Default", null, {
|
||||
$documentation: "A `default` switch branch",
|
||||
}, AST_SwitchBranch);
|
||||
|
||||
var AST_Case = DEFNODE("Case", "expression", {
|
||||
$documentation: "A `case` switch branch",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the `case` expression"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_SwitchBranch);
|
||||
|
||||
/* -----[ EXCEPTIONS ]----- */
|
||||
|
||||
var AST_Try = DEFNODE("Try", "bcatch bfinally", {
|
||||
$documentation: "A `try` statement",
|
||||
$propdoc: {
|
||||
bcatch: "[AST_Catch?] the catch block, or null if not present",
|
||||
bfinally: "[AST_Finally?] the finally block, or null if not present"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
walk_body(this, visitor);
|
||||
if (this.bcatch) this.bcatch._walk(visitor);
|
||||
if (this.bfinally) this.bfinally._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Block);
|
||||
|
||||
// XXX: this is wrong according to ECMA-262 (12.4). the catch block
|
||||
// should introduce another scope, as the argname should be visible
|
||||
// only inside the catch block. However, doing it this way because of
|
||||
// IE which simply introduces the name in the surrounding scope. If
|
||||
// we ever want to fix this then AST_Catch should inherit from
|
||||
// AST_Scope.
|
||||
var AST_Catch = DEFNODE("Catch", "argname", {
|
||||
$documentation: "A `catch` node; only makes sense as part of a `try` statement",
|
||||
$propdoc: {
|
||||
argname: "[AST_SymbolCatch] symbol for the exception"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.argname._walk(visitor);
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Block);
|
||||
|
||||
var AST_Finally = DEFNODE("Finally", null, {
|
||||
$documentation: "A `finally` node; only makes sense as part of a `try` statement"
|
||||
}, AST_Block);
|
||||
|
||||
/* -----[ VAR/CONST ]----- */
|
||||
|
||||
var AST_Definitions = DEFNODE("Definitions", "definitions", {
|
||||
$documentation: "Base class for `var` or `const` nodes (variable declarations/initializations)",
|
||||
$propdoc: {
|
||||
definitions: "[AST_VarDef*] array of variable definitions"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.definitions.forEach(function(def){
|
||||
def._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_Var = DEFNODE("Var", null, {
|
||||
$documentation: "A `var` statement"
|
||||
}, AST_Definitions);
|
||||
|
||||
var AST_Const = DEFNODE("Const", null, {
|
||||
$documentation: "A `const` statement"
|
||||
}, AST_Definitions);
|
||||
|
||||
var AST_VarDef = DEFNODE("VarDef", "name value", {
|
||||
$documentation: "A variable declaration; only appears in a AST_Definitions node",
|
||||
$propdoc: {
|
||||
name: "[AST_SymbolVar|AST_SymbolConst] name of the variable",
|
||||
value: "[AST_Node?] initializer, or null of there's no initializer"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.name._walk(visitor);
|
||||
if (this.value) this.value._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/* -----[ OTHER ]----- */
|
||||
|
||||
var AST_Call = DEFNODE("Call", "expression args", {
|
||||
$documentation: "A function call expression",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] expression to invoke as function",
|
||||
args: "[AST_Node*] array of arguments"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
this.args.forEach(function(arg){
|
||||
arg._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_New = DEFNODE("New", null, {
|
||||
$documentation: "An object instantiation. Derives from a function call since it has exactly the same properties"
|
||||
}, AST_Call);
|
||||
|
||||
var AST_Seq = DEFNODE("Seq", "car cdr", {
|
||||
$documentation: "A sequence expression (two comma-separated expressions)",
|
||||
$propdoc: {
|
||||
car: "[AST_Node] first element in sequence",
|
||||
cdr: "[AST_Node] second element in sequence"
|
||||
},
|
||||
$cons: function(x, y) {
|
||||
var seq = new AST_Seq(x);
|
||||
seq.car = x;
|
||||
seq.cdr = y;
|
||||
return seq;
|
||||
},
|
||||
$from_array: function(array) {
|
||||
if (array.length == 0) return null;
|
||||
if (array.length == 1) return array[0].clone();
|
||||
var list = null;
|
||||
for (var i = array.length; --i >= 0;) {
|
||||
list = AST_Seq.cons(array[i], list);
|
||||
}
|
||||
var p = list;
|
||||
while (p) {
|
||||
if (p.cdr && !p.cdr.cdr) {
|
||||
p.cdr = p.cdr.car;
|
||||
break;
|
||||
}
|
||||
p = p.cdr;
|
||||
}
|
||||
return list;
|
||||
},
|
||||
to_array: function() {
|
||||
var p = this, a = [];
|
||||
while (p) {
|
||||
a.push(p.car);
|
||||
if (p.cdr && !(p.cdr instanceof AST_Seq)) {
|
||||
a.push(p.cdr);
|
||||
break;
|
||||
}
|
||||
p = p.cdr;
|
||||
}
|
||||
return a;
|
||||
},
|
||||
add: function(node) {
|
||||
var p = this;
|
||||
while (p) {
|
||||
if (!(p.cdr instanceof AST_Seq)) {
|
||||
var cell = AST_Seq.cons(p.cdr, node);
|
||||
return p.cdr = cell;
|
||||
}
|
||||
p = p.cdr;
|
||||
}
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.car._walk(visitor);
|
||||
if (this.cdr) this.cdr._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_PropAccess = DEFNODE("PropAccess", "expression property", {
|
||||
$documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the “container” expression",
|
||||
property: "[AST_Node|string] the property to access. For AST_Dot this is always a plain string, while for AST_Sub it's an arbitrary AST_Node"
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Dot = DEFNODE("Dot", null, {
|
||||
$documentation: "A dotted property access expression",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_PropAccess);
|
||||
|
||||
var AST_Sub = DEFNODE("Sub", null, {
|
||||
$documentation: "Index-style property access, i.e. `a[\"foo\"]`",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
this.property._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_PropAccess);
|
||||
|
||||
var AST_Unary = DEFNODE("Unary", "operator expression", {
|
||||
$documentation: "Base class for unary expressions",
|
||||
$propdoc: {
|
||||
operator: "[string] the operator",
|
||||
expression: "[AST_Node] expression that this unary operator applies to"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_UnaryPrefix = DEFNODE("UnaryPrefix", null, {
|
||||
$documentation: "Unary prefix expression, i.e. `typeof i` or `++i`"
|
||||
}, AST_Unary);
|
||||
|
||||
var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, {
|
||||
$documentation: "Unary postfix expression, i.e. `i++`"
|
||||
}, AST_Unary);
|
||||
|
||||
var AST_Binary = DEFNODE("Binary", "left operator right", {
|
||||
$documentation: "Binary expression, i.e. `a + b`",
|
||||
$propdoc: {
|
||||
left: "[AST_Node] left-hand side expression",
|
||||
operator: "[string] the operator",
|
||||
right: "[AST_Node] right-hand side expression"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.left._walk(visitor);
|
||||
this.right._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", {
|
||||
$documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`",
|
||||
$propdoc: {
|
||||
condition: "[AST_Node]",
|
||||
consequent: "[AST_Node]",
|
||||
alternative: "[AST_Node]"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.condition._walk(visitor);
|
||||
this.consequent._walk(visitor);
|
||||
this.alternative._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Assign = DEFNODE("Assign", null, {
|
||||
$documentation: "An assignment expression — `a = b + 5`",
|
||||
}, AST_Binary);
|
||||
|
||||
/* -----[ LITERALS ]----- */
|
||||
|
||||
var AST_Array = DEFNODE("Array", "elements", {
|
||||
$documentation: "An array literal",
|
||||
$propdoc: {
|
||||
elements: "[AST_Node*] array of elements"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.elements.forEach(function(el){
|
||||
el._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Object = DEFNODE("Object", "properties", {
|
||||
$documentation: "An object literal",
|
||||
$propdoc: {
|
||||
properties: "[AST_ObjectProperty*] array of properties"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.properties.forEach(function(prop){
|
||||
prop._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
|
||||
$documentation: "Base class for literal object properties",
|
||||
$propdoc: {
|
||||
key: "[string] the property name; it's always a plain string in our AST, no matter if it was a string, number or identifier in original code",
|
||||
value: "[AST_Node] property value. For setters and getters this is an AST_Function."
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.value._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_ObjectKeyVal = DEFNODE("ObjectKeyVal", null, {
|
||||
$documentation: "A key: value object property",
|
||||
}, AST_ObjectProperty);
|
||||
|
||||
var AST_ObjectSetter = DEFNODE("ObjectSetter", null, {
|
||||
$documentation: "An object setter property",
|
||||
}, AST_ObjectProperty);
|
||||
|
||||
var AST_ObjectGetter = DEFNODE("ObjectGetter", null, {
|
||||
$documentation: "An object getter property",
|
||||
}, AST_ObjectProperty);
|
||||
|
||||
var AST_Symbol = DEFNODE("Symbol", "scope name thedef", {
|
||||
$propdoc: {
|
||||
name: "[string] name of this symbol",
|
||||
scope: "[AST_Scope/S] the current scope (not necessarily the definition scope)",
|
||||
thedef: "[SymbolDef/S] the definition of this symbol"
|
||||
},
|
||||
$documentation: "Base class for all symbols",
|
||||
});
|
||||
|
||||
var AST_SymbolAccessor = DEFNODE("SymbolAccessor", null, {
|
||||
$documentation: "The name of a property accessor (setter/getter function)"
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", {
|
||||
$documentation: "A declaration symbol (symbol in var/const, function name or argument, symbol in catch)",
|
||||
$propdoc: {
|
||||
init: "[AST_Node*/S] array of initializers for this declaration."
|
||||
}
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_SymbolVar = DEFNODE("SymbolVar", null, {
|
||||
$documentation: "Symbol defining a variable",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolConst = DEFNODE("SymbolConst", null, {
|
||||
$documentation: "A constant declaration"
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolFunarg = DEFNODE("SymbolFunarg", null, {
|
||||
$documentation: "Symbol naming a function argument",
|
||||
}, AST_SymbolVar);
|
||||
|
||||
var AST_SymbolDefun = DEFNODE("SymbolDefun", null, {
|
||||
$documentation: "Symbol defining a function",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolLambda = DEFNODE("SymbolLambda", null, {
|
||||
$documentation: "Symbol naming a function expression",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolCatch = DEFNODE("SymbolCatch", null, {
|
||||
$documentation: "Symbol naming the exception in catch",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_Label = DEFNODE("Label", "references", {
|
||||
$documentation: "Symbol naming a label (declaration)",
|
||||
$propdoc: {
|
||||
references: "[AST_LabelRef*] a list of nodes referring to this label"
|
||||
}
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_SymbolRef = DEFNODE("SymbolRef", null, {
|
||||
$documentation: "Reference to some symbol (not definition/declaration)",
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_LabelRef = DEFNODE("LabelRef", null, {
|
||||
$documentation: "Reference to a label symbol",
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_This = DEFNODE("This", null, {
|
||||
$documentation: "The `this` symbol",
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_Constant = DEFNODE("Constant", null, {
|
||||
$documentation: "Base class for all constants",
|
||||
getValue: function() {
|
||||
return this.value;
|
||||
}
|
||||
});
|
||||
|
||||
var AST_String = DEFNODE("String", "value", {
|
||||
$documentation: "A string literal",
|
||||
$propdoc: {
|
||||
value: "[string] the contents of this string"
|
||||
}
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_Number = DEFNODE("Number", "value", {
|
||||
$documentation: "A number literal",
|
||||
$propdoc: {
|
||||
value: "[number] the numeric value"
|
||||
}
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_RegExp = DEFNODE("RegExp", "value", {
|
||||
$documentation: "A regexp literal",
|
||||
$propdoc: {
|
||||
value: "[RegExp] the actual regexp"
|
||||
}
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_Atom = DEFNODE("Atom", null, {
|
||||
$documentation: "Base class for atoms",
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_Null = DEFNODE("Null", null, {
|
||||
$documentation: "The `null` atom",
|
||||
value: null
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_NaN = DEFNODE("NaN", null, {
|
||||
$documentation: "The impossible value",
|
||||
value: 0/0
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Undefined = DEFNODE("Undefined", null, {
|
||||
$documentation: "The `undefined` value",
|
||||
value: (function(){}())
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Hole = DEFNODE("Hole", null, {
|
||||
$documentation: "A hole in an array",
|
||||
value: (function(){}())
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Infinity = DEFNODE("Infinity", null, {
|
||||
$documentation: "The `Infinity` value",
|
||||
value: 1/0
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Boolean = DEFNODE("Boolean", null, {
|
||||
$documentation: "Base class for booleans",
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_False = DEFNODE("False", null, {
|
||||
$documentation: "The `false` atom",
|
||||
value: false
|
||||
}, AST_Boolean);
|
||||
|
||||
var AST_True = DEFNODE("True", null, {
|
||||
$documentation: "The `true` atom",
|
||||
value: true
|
||||
}, AST_Boolean);
|
||||
|
||||
/* -----[ TreeWalker ]----- */
|
||||
|
||||
function TreeWalker(callback) {
|
||||
this.visit = callback;
|
||||
this.stack = [];
|
||||
};
|
||||
TreeWalker.prototype = {
|
||||
_visit: function(node, descend) {
|
||||
this.stack.push(node);
|
||||
var ret = this.visit(node, descend ? function(){
|
||||
descend.call(node);
|
||||
} : noop);
|
||||
if (!ret && descend) {
|
||||
descend.call(node);
|
||||
}
|
||||
this.stack.pop();
|
||||
return ret;
|
||||
},
|
||||
parent: function(n) {
|
||||
return this.stack[this.stack.length - 2 - (n || 0)];
|
||||
},
|
||||
push: function (node) {
|
||||
this.stack.push(node);
|
||||
},
|
||||
pop: function() {
|
||||
return this.stack.pop();
|
||||
},
|
||||
self: function() {
|
||||
return this.stack[this.stack.length - 1];
|
||||
},
|
||||
find_parent: function(type) {
|
||||
var stack = this.stack;
|
||||
for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof type) return x;
|
||||
}
|
||||
},
|
||||
in_boolean_context: function() {
|
||||
var stack = this.stack;
|
||||
var i = stack.length, self = stack[--i];
|
||||
while (i > 0) {
|
||||
var p = stack[--i];
|
||||
if ((p instanceof AST_If && p.condition === self) ||
|
||||
(p instanceof AST_Conditional && p.condition === self) ||
|
||||
(p instanceof AST_DWLoop && p.condition === self) ||
|
||||
(p instanceof AST_For && p.condition === self) ||
|
||||
(p instanceof AST_UnaryPrefix && p.operator == "!" && p.expression === self))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if (!(p instanceof AST_Binary && (p.operator == "&&" || p.operator == "||")))
|
||||
return false;
|
||||
self = p;
|
||||
}
|
||||
},
|
||||
loopcontrol_target: function(label) {
|
||||
var stack = this.stack;
|
||||
if (label) {
|
||||
for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof AST_LabeledStatement && x.label.name == label.name) {
|
||||
return x.body;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof AST_Switch
|
||||
|| x instanceof AST_For
|
||||
|| x instanceof AST_ForIn
|
||||
|| x instanceof AST_DWLoop) return x;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
1968
node_modules/transformers/node_modules/uglify-js/lib/compress.js
generated
vendored
Normal file
1968
node_modules/transformers/node_modules/uglify-js/lib/compress.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
265
node_modules/transformers/node_modules/uglify-js/lib/mozilla-ast.js
generated
vendored
Normal file
265
node_modules/transformers/node_modules/uglify-js/lib/mozilla-ast.js
generated
vendored
Normal file
@@ -0,0 +1,265 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
(function(){
|
||||
|
||||
var MOZ_TO_ME = {
|
||||
TryStatement : function(M) {
|
||||
return new AST_Try({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
body : from_moz(M.block).body,
|
||||
bcatch : from_moz(M.handlers[0]),
|
||||
bfinally : M.finalizer ? new AST_Finally(from_moz(M.finalizer)) : null
|
||||
});
|
||||
},
|
||||
CatchClause : function(M) {
|
||||
return new AST_Catch({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
argname : from_moz(M.param),
|
||||
body : from_moz(M.body).body
|
||||
});
|
||||
},
|
||||
ObjectExpression : function(M) {
|
||||
return new AST_Object({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
properties : M.properties.map(function(prop){
|
||||
var key = prop.key;
|
||||
var name = key.type == "Identifier" ? key.name : key.value;
|
||||
var args = {
|
||||
start : my_start_token(key),
|
||||
end : my_end_token(prop.value),
|
||||
key : name,
|
||||
value : from_moz(prop.value)
|
||||
};
|
||||
switch (prop.kind) {
|
||||
case "init":
|
||||
return new AST_ObjectKeyVal(args);
|
||||
case "set":
|
||||
args.value.name = from_moz(key);
|
||||
return new AST_ObjectSetter(args);
|
||||
case "get":
|
||||
args.value.name = from_moz(key);
|
||||
return new AST_ObjectGetter(args);
|
||||
}
|
||||
})
|
||||
});
|
||||
},
|
||||
SequenceExpression : function(M) {
|
||||
return AST_Seq.from_array(M.expressions.map(from_moz));
|
||||
},
|
||||
MemberExpression : function(M) {
|
||||
return new (M.computed ? AST_Sub : AST_Dot)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
property : M.computed ? from_moz(M.property) : M.property.name,
|
||||
expression : from_moz(M.object)
|
||||
});
|
||||
},
|
||||
SwitchCase : function(M) {
|
||||
return new (M.test ? AST_Case : AST_Default)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
expression : from_moz(M.test),
|
||||
body : M.consequent.map(from_moz)
|
||||
});
|
||||
},
|
||||
Literal : function(M) {
|
||||
var val = M.value, args = {
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M)
|
||||
};
|
||||
if (val === null) return new AST_Null(args);
|
||||
switch (typeof val) {
|
||||
case "string":
|
||||
args.value = val;
|
||||
return new AST_String(args);
|
||||
case "number":
|
||||
args.value = val;
|
||||
return new AST_Number(args);
|
||||
case "boolean":
|
||||
return new (val ? AST_True : AST_False)(args);
|
||||
default:
|
||||
args.value = val;
|
||||
return new AST_RegExp(args);
|
||||
}
|
||||
},
|
||||
UnaryExpression: From_Moz_Unary,
|
||||
UpdateExpression: From_Moz_Unary,
|
||||
Identifier: function(M) {
|
||||
var p = FROM_MOZ_STACK[FROM_MOZ_STACK.length - 2];
|
||||
return new (M.name == "this" ? AST_This
|
||||
: p.type == "LabeledStatement" ? AST_Label
|
||||
: p.type == "VariableDeclarator" && p.id === M ? (p.kind == "const" ? AST_SymbolConst : AST_SymbolVar)
|
||||
: p.type == "FunctionExpression" ? (p.id === M ? AST_SymbolLambda : AST_SymbolFunarg)
|
||||
: p.type == "FunctionDeclaration" ? (p.id === M ? AST_SymbolDefun : AST_SymbolFunarg)
|
||||
: p.type == "CatchClause" ? AST_SymbolCatch
|
||||
: p.type == "BreakStatement" || p.type == "ContinueStatement" ? AST_LabelRef
|
||||
: AST_SymbolRef)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
name : M.name
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
function From_Moz_Unary(M) {
|
||||
return new (M.prefix ? AST_UnaryPrefix : AST_UnaryPostfix)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
operator : M.operator,
|
||||
expression : from_moz(M.argument)
|
||||
})
|
||||
};
|
||||
|
||||
var ME_TO_MOZ = {};
|
||||
|
||||
map("Node", AST_Node);
|
||||
map("Program", AST_Toplevel, "body@body");
|
||||
map("Function", AST_Function, "id>name, params@argnames, body%body");
|
||||
map("EmptyStatement", AST_EmptyStatement);
|
||||
map("BlockStatement", AST_BlockStatement, "body@body");
|
||||
map("ExpressionStatement", AST_SimpleStatement, "expression>body");
|
||||
map("IfStatement", AST_If, "test>condition, consequent>body, alternate>alternative");
|
||||
map("LabeledStatement", AST_LabeledStatement, "label>label, body>body");
|
||||
map("BreakStatement", AST_Break, "label>label");
|
||||
map("ContinueStatement", AST_Continue, "label>label");
|
||||
map("WithStatement", AST_With, "object>expression, body>body");
|
||||
map("SwitchStatement", AST_Switch, "discriminant>expression, cases@body");
|
||||
map("ReturnStatement", AST_Return, "argument>value");
|
||||
map("ThrowStatement", AST_Throw, "argument>value");
|
||||
map("WhileStatement", AST_While, "test>condition, body>body");
|
||||
map("DoWhileStatement", AST_Do, "test>condition, body>body");
|
||||
map("ForStatement", AST_For, "init>init, test>condition, update>step, body>body");
|
||||
map("ForInStatement", AST_ForIn, "left>init, right>object, body>body");
|
||||
map("DebuggerStatement", AST_Debugger);
|
||||
map("FunctionDeclaration", AST_Defun, "id>name, params@argnames, body%body");
|
||||
map("VariableDeclaration", AST_Var, "declarations@definitions");
|
||||
map("VariableDeclarator", AST_VarDef, "id>name, init>value");
|
||||
|
||||
map("ThisExpression", AST_This);
|
||||
map("ArrayExpression", AST_Array, "elements@elements");
|
||||
map("FunctionExpression", AST_Function, "id>name, params@argnames, body%body");
|
||||
map("BinaryExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("AssignmentExpression", AST_Assign, "operator=operator, left>left, right>right");
|
||||
map("LogicalExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("ConditionalExpression", AST_Conditional, "test>condition, consequent>consequent, alternate>alternative");
|
||||
map("NewExpression", AST_New, "callee>expression, arguments@args");
|
||||
map("CallExpression", AST_Call, "callee>expression, arguments@args");
|
||||
|
||||
/* -----[ tools ]----- */
|
||||
|
||||
function my_start_token(moznode) {
|
||||
return new AST_Token({
|
||||
file : moznode.loc && moznode.loc.source,
|
||||
line : moznode.loc && moznode.loc.start.line,
|
||||
col : moznode.loc && moznode.loc.start.column,
|
||||
pos : moznode.start,
|
||||
endpos : moznode.start
|
||||
});
|
||||
};
|
||||
|
||||
function my_end_token(moznode) {
|
||||
return new AST_Token({
|
||||
file : moznode.loc && moznode.loc.source,
|
||||
line : moznode.loc && moznode.loc.end.line,
|
||||
col : moznode.loc && moznode.loc.end.column,
|
||||
pos : moznode.end,
|
||||
endpos : moznode.end
|
||||
});
|
||||
};
|
||||
|
||||
function map(moztype, mytype, propmap) {
|
||||
var moz_to_me = "function From_Moz_" + moztype + "(M){\n";
|
||||
moz_to_me += "return new mytype({\n" +
|
||||
"start: my_start_token(M),\n" +
|
||||
"end: my_end_token(M)";
|
||||
|
||||
if (propmap) propmap.split(/\s*,\s*/).forEach(function(prop){
|
||||
var m = /([a-z0-9$_]+)(=|@|>|%)([a-z0-9$_]+)/i.exec(prop);
|
||||
if (!m) throw new Error("Can't understand property map: " + prop);
|
||||
var moz = "M." + m[1], how = m[2], my = m[3];
|
||||
moz_to_me += ",\n" + my + ": ";
|
||||
if (how == "@") {
|
||||
moz_to_me += moz + ".map(from_moz)";
|
||||
} else if (how == ">") {
|
||||
moz_to_me += "from_moz(" + moz + ")";
|
||||
} else if (how == "=") {
|
||||
moz_to_me += moz;
|
||||
} else if (how == "%") {
|
||||
moz_to_me += "from_moz(" + moz + ").body";
|
||||
} else throw new Error("Can't understand operator in propmap: " + prop);
|
||||
});
|
||||
moz_to_me += "\n})}";
|
||||
|
||||
// moz_to_me = parse(moz_to_me).print_to_string({ beautify: true });
|
||||
// console.log(moz_to_me);
|
||||
|
||||
moz_to_me = new Function("mytype", "my_start_token", "my_end_token", "from_moz", "return(" + moz_to_me + ")")(
|
||||
mytype, my_start_token, my_end_token, from_moz
|
||||
);
|
||||
return MOZ_TO_ME[moztype] = moz_to_me;
|
||||
};
|
||||
|
||||
var FROM_MOZ_STACK = null;
|
||||
|
||||
function from_moz(node) {
|
||||
FROM_MOZ_STACK.push(node);
|
||||
var ret = node != null ? MOZ_TO_ME[node.type](node) : null;
|
||||
FROM_MOZ_STACK.pop();
|
||||
return ret;
|
||||
};
|
||||
|
||||
AST_Node.from_mozilla_ast = function(node){
|
||||
var save_stack = FROM_MOZ_STACK;
|
||||
FROM_MOZ_STACK = [];
|
||||
var ast = from_moz(node);
|
||||
FROM_MOZ_STACK = save_stack;
|
||||
return ast;
|
||||
};
|
||||
|
||||
})();
|
||||
1220
node_modules/transformers/node_modules/uglify-js/lib/output.js
generated
vendored
Normal file
1220
node_modules/transformers/node_modules/uglify-js/lib/output.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1407
node_modules/transformers/node_modules/uglify-js/lib/parse.js
generated
vendored
Normal file
1407
node_modules/transformers/node_modules/uglify-js/lib/parse.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
580
node_modules/transformers/node_modules/uglify-js/lib/scope.js
generated
vendored
Normal file
580
node_modules/transformers/node_modules/uglify-js/lib/scope.js
generated
vendored
Normal file
@@ -0,0 +1,580 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
function SymbolDef(scope, index, orig) {
|
||||
this.name = orig.name;
|
||||
this.orig = [ orig ];
|
||||
this.scope = scope;
|
||||
this.references = [];
|
||||
this.global = false;
|
||||
this.mangled_name = null;
|
||||
this.undeclared = false;
|
||||
this.constant = false;
|
||||
this.index = index;
|
||||
};
|
||||
|
||||
SymbolDef.prototype = {
|
||||
unmangleable: function(options) {
|
||||
return this.global
|
||||
|| this.undeclared
|
||||
|| (!(options && options.eval) && (this.scope.uses_eval || this.scope.uses_with));
|
||||
},
|
||||
mangle: function(options) {
|
||||
if (!this.mangled_name && !this.unmangleable(options))
|
||||
this.mangled_name = this.scope.next_mangled(options);
|
||||
}
|
||||
};
|
||||
|
||||
AST_Toplevel.DEFMETHOD("figure_out_scope", function(){
|
||||
// This does what ast_add_scope did in UglifyJS v1.
|
||||
//
|
||||
// Part of it could be done at parse time, but it would complicate
|
||||
// the parser (and it's already kinda complex). It's also worth
|
||||
// having it separated because we might need to call it multiple
|
||||
// times on the same tree.
|
||||
|
||||
// pass 1: setup scope chaining and handle definitions
|
||||
var self = this;
|
||||
var scope = self.parent_scope = null;
|
||||
var labels = new Dictionary();
|
||||
var nesting = 0;
|
||||
var tw = new TreeWalker(function(node, descend){
|
||||
if (node instanceof AST_Scope) {
|
||||
node.init_scope_vars(nesting);
|
||||
var save_scope = node.parent_scope = scope;
|
||||
var save_labels = labels;
|
||||
++nesting;
|
||||
scope = node;
|
||||
labels = new Dictionary();
|
||||
descend();
|
||||
labels = save_labels;
|
||||
scope = save_scope;
|
||||
--nesting;
|
||||
return true; // don't descend again in TreeWalker
|
||||
}
|
||||
if (node instanceof AST_Directive) {
|
||||
node.scope = scope;
|
||||
push_uniq(scope.directives, node.value);
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_With) {
|
||||
for (var s = scope; s; s = s.parent_scope)
|
||||
s.uses_with = true;
|
||||
return;
|
||||
}
|
||||
if (node instanceof AST_LabeledStatement) {
|
||||
var l = node.label;
|
||||
if (labels.has(l.name))
|
||||
throw new Error(string_template("Label {name} defined twice", l));
|
||||
labels.set(l.name, l);
|
||||
descend();
|
||||
labels.del(l.name);
|
||||
return true; // no descend again
|
||||
}
|
||||
if (node instanceof AST_Symbol) {
|
||||
node.scope = scope;
|
||||
}
|
||||
if (node instanceof AST_Label) {
|
||||
node.thedef = node;
|
||||
node.init_scope_vars();
|
||||
}
|
||||
if (node instanceof AST_SymbolLambda) {
|
||||
//scope.def_function(node);
|
||||
//
|
||||
// https://github.com/mishoo/UglifyJS2/issues/24 — MSIE
|
||||
// leaks function expression names into the containing
|
||||
// scope. Don't like this fix but seems we can't do any
|
||||
// better. IE: please die. Please!
|
||||
(node.scope = scope.parent_scope).def_function(node);
|
||||
}
|
||||
else if (node instanceof AST_SymbolDefun) {
|
||||
// Careful here, the scope where this should be defined is
|
||||
// the parent scope. The reason is that we enter a new
|
||||
// scope when we encounter the AST_Defun node (which is
|
||||
// instanceof AST_Scope) but we get to the symbol a bit
|
||||
// later.
|
||||
(node.scope = scope.parent_scope).def_function(node);
|
||||
}
|
||||
else if (node instanceof AST_SymbolVar
|
||||
|| node instanceof AST_SymbolConst) {
|
||||
var def = scope.def_variable(node);
|
||||
def.constant = node instanceof AST_SymbolConst;
|
||||
def.init = tw.parent().value;
|
||||
}
|
||||
else if (node instanceof AST_SymbolCatch) {
|
||||
// XXX: this is wrong according to ECMA-262 (12.4). the
|
||||
// `catch` argument name should be visible only inside the
|
||||
// catch block. For a quick fix AST_Catch should inherit
|
||||
// from AST_Scope. Keeping it this way because of IE,
|
||||
// which doesn't obey the standard. (it introduces the
|
||||
// identifier in the enclosing scope)
|
||||
scope.def_variable(node);
|
||||
}
|
||||
if (node instanceof AST_LabelRef) {
|
||||
var sym = labels.get(node.name);
|
||||
if (!sym) throw new Error(string_template("Undefined label {name} [{line},{col}]", {
|
||||
name: node.name,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
}));
|
||||
node.thedef = sym;
|
||||
}
|
||||
});
|
||||
self.walk(tw);
|
||||
|
||||
// pass 2: find back references and eval
|
||||
var func = null;
|
||||
var globals = self.globals = new Dictionary();
|
||||
var tw = new TreeWalker(function(node, descend){
|
||||
if (node instanceof AST_Lambda) {
|
||||
var prev_func = func;
|
||||
func = node;
|
||||
descend();
|
||||
func = prev_func;
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_LabelRef) {
|
||||
node.reference();
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_SymbolRef) {
|
||||
var name = node.name;
|
||||
var sym = node.scope.find_variable(name);
|
||||
if (!sym) {
|
||||
var g;
|
||||
if (globals.has(name)) {
|
||||
g = globals.get(name);
|
||||
} else {
|
||||
g = new SymbolDef(self, globals.size(), node);
|
||||
g.undeclared = true;
|
||||
globals.set(name, g);
|
||||
}
|
||||
node.thedef = g;
|
||||
if (name == "eval" && tw.parent() instanceof AST_Call) {
|
||||
for (var s = node.scope; s && !s.uses_eval; s = s.parent_scope)
|
||||
s.uses_eval = true;
|
||||
}
|
||||
if (name == "arguments") {
|
||||
func.uses_arguments = true;
|
||||
}
|
||||
} else {
|
||||
node.thedef = sym;
|
||||
}
|
||||
node.reference();
|
||||
return true;
|
||||
}
|
||||
});
|
||||
self.walk(tw);
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("init_scope_vars", function(nesting){
|
||||
this.directives = []; // contains the directives defined in this scope, i.e. "use strict"
|
||||
this.variables = new Dictionary(); // map name to AST_SymbolVar (variables defined in this scope; includes functions)
|
||||
this.functions = new Dictionary(); // map name to AST_SymbolDefun (functions defined in this scope)
|
||||
this.uses_with = false; // will be set to true if this or some nested scope uses the `with` statement
|
||||
this.uses_eval = false; // will be set to true if this or nested scope uses the global `eval`
|
||||
this.parent_scope = null; // the parent scope
|
||||
this.enclosed = []; // a list of variables from this or outer scope(s) that are referenced from this or inner scopes
|
||||
this.cname = -1; // the current index for mangling functions/variables
|
||||
this.nesting = nesting; // the nesting level of this scope (0 means toplevel)
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("strict", function(){
|
||||
return this.has_directive("use strict");
|
||||
});
|
||||
|
||||
AST_Lambda.DEFMETHOD("init_scope_vars", function(){
|
||||
AST_Scope.prototype.init_scope_vars.apply(this, arguments);
|
||||
this.uses_arguments = false;
|
||||
});
|
||||
|
||||
AST_SymbolRef.DEFMETHOD("reference", function() {
|
||||
var def = this.definition();
|
||||
def.references.push(this);
|
||||
var s = this.scope;
|
||||
while (s) {
|
||||
push_uniq(s.enclosed, def);
|
||||
if (s === def.scope) break;
|
||||
s = s.parent_scope;
|
||||
}
|
||||
this.frame = this.scope.nesting - def.scope.nesting;
|
||||
});
|
||||
|
||||
AST_Label.DEFMETHOD("init_scope_vars", function(){
|
||||
this.references = [];
|
||||
});
|
||||
|
||||
AST_LabelRef.DEFMETHOD("reference", function(){
|
||||
this.thedef.references.push(this);
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("find_variable", function(name){
|
||||
if (name instanceof AST_Symbol) name = name.name;
|
||||
return this.variables.get(name)
|
||||
|| (this.parent_scope && this.parent_scope.find_variable(name));
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("has_directive", function(value){
|
||||
return this.parent_scope && this.parent_scope.has_directive(value)
|
||||
|| (this.directives.indexOf(value) >= 0 ? this : null);
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("def_function", function(symbol){
|
||||
this.functions.set(symbol.name, this.def_variable(symbol));
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("def_variable", function(symbol){
|
||||
var def;
|
||||
if (!this.variables.has(symbol.name)) {
|
||||
def = new SymbolDef(this, this.variables.size(), symbol);
|
||||
this.variables.set(symbol.name, def);
|
||||
def.global = !this.parent_scope;
|
||||
} else {
|
||||
def = this.variables.get(symbol.name);
|
||||
def.orig.push(symbol);
|
||||
}
|
||||
return symbol.thedef = def;
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("next_mangled", function(options){
|
||||
var ext = this.enclosed, n = ext.length;
|
||||
out: while (true) {
|
||||
var m = base54(++this.cname);
|
||||
if (!is_identifier(m)) continue; // skip over "do"
|
||||
// we must ensure that the mangled name does not shadow a name
|
||||
// from some parent scope that is referenced in this or in
|
||||
// inner scopes.
|
||||
for (var i = n; --i >= 0;) {
|
||||
var sym = ext[i];
|
||||
var name = sym.mangled_name || (sym.unmangleable(options) && sym.name);
|
||||
if (m == name) continue out;
|
||||
}
|
||||
return m;
|
||||
}
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("references", function(sym){
|
||||
if (sym instanceof AST_Symbol) sym = sym.definition();
|
||||
return this.enclosed.indexOf(sym) < 0 ? null : sym;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("unmangleable", function(options){
|
||||
return this.definition().unmangleable(options);
|
||||
});
|
||||
|
||||
// property accessors are not mangleable
|
||||
AST_SymbolAccessor.DEFMETHOD("unmangleable", function(){
|
||||
return true;
|
||||
});
|
||||
|
||||
// labels are always mangleable
|
||||
AST_Label.DEFMETHOD("unmangleable", function(){
|
||||
return false;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("unreferenced", function(){
|
||||
return this.definition().references.length == 0
|
||||
&& !(this.scope.uses_eval || this.scope.uses_with);
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("undeclared", function(){
|
||||
return this.definition().undeclared;
|
||||
});
|
||||
|
||||
AST_LabelRef.DEFMETHOD("undeclared", function(){
|
||||
return false;
|
||||
});
|
||||
|
||||
AST_Label.DEFMETHOD("undeclared", function(){
|
||||
return false;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("definition", function(){
|
||||
return this.thedef;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("global", function(){
|
||||
return this.definition().global;
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("_default_mangler_options", function(options){
|
||||
return defaults(options, {
|
||||
except : [],
|
||||
eval : false,
|
||||
sort : false
|
||||
});
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
||||
options = this._default_mangler_options(options);
|
||||
// We only need to mangle declaration nodes. Special logic wired
|
||||
// into the code generator will display the mangled name if it's
|
||||
// present (and for AST_SymbolRef-s it'll use the mangled name of
|
||||
// the AST_SymbolDeclaration that it points to).
|
||||
var lname = -1;
|
||||
var to_mangle = [];
|
||||
var tw = new TreeWalker(function(node, descend){
|
||||
if (node instanceof AST_LabeledStatement) {
|
||||
// lname is incremented when we get to the AST_Label
|
||||
var save_nesting = lname;
|
||||
descend();
|
||||
lname = save_nesting;
|
||||
return true; // don't descend again in TreeWalker
|
||||
}
|
||||
if (node instanceof AST_Scope) {
|
||||
var p = tw.parent(), a = [];
|
||||
node.variables.each(function(symbol){
|
||||
if (options.except.indexOf(symbol.name) < 0) {
|
||||
a.push(symbol);
|
||||
}
|
||||
});
|
||||
if (options.sort) a.sort(function(a, b){
|
||||
return b.references.length - a.references.length;
|
||||
});
|
||||
to_mangle.push.apply(to_mangle, a);
|
||||
return;
|
||||
}
|
||||
if (node instanceof AST_Label) {
|
||||
var name;
|
||||
do name = base54(++lname); while (!is_identifier(name));
|
||||
node.mangled_name = name;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
this.walk(tw);
|
||||
to_mangle.forEach(function(def){ def.mangle(options) });
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("compute_char_frequency", function(options){
|
||||
options = this._default_mangler_options(options);
|
||||
var tw = new TreeWalker(function(node){
|
||||
if (node instanceof AST_Constant)
|
||||
base54.consider(node.print_to_string());
|
||||
else if (node instanceof AST_Return)
|
||||
base54.consider("return");
|
||||
else if (node instanceof AST_Throw)
|
||||
base54.consider("throw");
|
||||
else if (node instanceof AST_Continue)
|
||||
base54.consider("continue");
|
||||
else if (node instanceof AST_Break)
|
||||
base54.consider("break");
|
||||
else if (node instanceof AST_Debugger)
|
||||
base54.consider("debugger");
|
||||
else if (node instanceof AST_Directive)
|
||||
base54.consider(node.value);
|
||||
else if (node instanceof AST_While)
|
||||
base54.consider("while");
|
||||
else if (node instanceof AST_Do)
|
||||
base54.consider("do while");
|
||||
else if (node instanceof AST_If) {
|
||||
base54.consider("if");
|
||||
if (node.alternative) base54.consider("else");
|
||||
}
|
||||
else if (node instanceof AST_Var)
|
||||
base54.consider("var");
|
||||
else if (node instanceof AST_Const)
|
||||
base54.consider("const");
|
||||
else if (node instanceof AST_Lambda)
|
||||
base54.consider("function");
|
||||
else if (node instanceof AST_For)
|
||||
base54.consider("for");
|
||||
else if (node instanceof AST_ForIn)
|
||||
base54.consider("for in");
|
||||
else if (node instanceof AST_Switch)
|
||||
base54.consider("switch");
|
||||
else if (node instanceof AST_Case)
|
||||
base54.consider("case");
|
||||
else if (node instanceof AST_Default)
|
||||
base54.consider("default");
|
||||
else if (node instanceof AST_With)
|
||||
base54.consider("with");
|
||||
else if (node instanceof AST_ObjectSetter)
|
||||
base54.consider("set" + node.key);
|
||||
else if (node instanceof AST_ObjectGetter)
|
||||
base54.consider("get" + node.key);
|
||||
else if (node instanceof AST_ObjectKeyVal)
|
||||
base54.consider(node.key);
|
||||
else if (node instanceof AST_New)
|
||||
base54.consider("new");
|
||||
else if (node instanceof AST_This)
|
||||
base54.consider("this");
|
||||
else if (node instanceof AST_Try)
|
||||
base54.consider("try");
|
||||
else if (node instanceof AST_Catch)
|
||||
base54.consider("catch");
|
||||
else if (node instanceof AST_Finally)
|
||||
base54.consider("finally");
|
||||
else if (node instanceof AST_Symbol && node.unmangleable(options))
|
||||
base54.consider(node.name);
|
||||
else if (node instanceof AST_Unary || node instanceof AST_Binary)
|
||||
base54.consider(node.operator);
|
||||
else if (node instanceof AST_Dot)
|
||||
base54.consider(node.property);
|
||||
});
|
||||
this.walk(tw);
|
||||
base54.sort();
|
||||
});
|
||||
|
||||
var base54 = (function() {
|
||||
var string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_0123456789";
|
||||
var chars, frequency;
|
||||
function reset() {
|
||||
frequency = Object.create(null);
|
||||
chars = string.split("").map(function(ch){ return ch.charCodeAt(0) });
|
||||
chars.forEach(function(ch){ frequency[ch] = 0 });
|
||||
}
|
||||
base54.consider = function(str){
|
||||
for (var i = str.length; --i >= 0;) {
|
||||
var code = str.charCodeAt(i);
|
||||
if (code in frequency) ++frequency[code];
|
||||
}
|
||||
};
|
||||
base54.sort = function() {
|
||||
chars = mergeSort(chars, function(a, b){
|
||||
if (is_digit(a) && !is_digit(b)) return 1;
|
||||
if (is_digit(b) && !is_digit(a)) return -1;
|
||||
return frequency[b] - frequency[a];
|
||||
});
|
||||
};
|
||||
base54.reset = reset;
|
||||
reset();
|
||||
base54.get = function(){ return chars };
|
||||
base54.freq = function(){ return frequency };
|
||||
function base54(num) {
|
||||
var ret = "", base = 54;
|
||||
do {
|
||||
ret += String.fromCharCode(chars[num % base]);
|
||||
num = Math.floor(num / base);
|
||||
base = 64;
|
||||
} while (num > 0);
|
||||
return ret;
|
||||
};
|
||||
return base54;
|
||||
})();
|
||||
|
||||
AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
||||
options = defaults(options, {
|
||||
undeclared : false, // this makes a lot of noise
|
||||
unreferenced : true,
|
||||
assign_to_global : true,
|
||||
func_arguments : true,
|
||||
nested_defuns : true,
|
||||
eval : true
|
||||
});
|
||||
var tw = new TreeWalker(function(node){
|
||||
if (options.undeclared
|
||||
&& node instanceof AST_SymbolRef
|
||||
&& node.undeclared())
|
||||
{
|
||||
// XXX: this also warns about JS standard names,
|
||||
// i.e. Object, Array, parseInt etc. Should add a list of
|
||||
// exceptions.
|
||||
AST_Node.warn("Undeclared symbol: {name} [{file}:{line},{col}]", {
|
||||
name: node.name,
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
if (options.assign_to_global)
|
||||
{
|
||||
var sym = null;
|
||||
if (node instanceof AST_Assign && node.left instanceof AST_SymbolRef)
|
||||
sym = node.left;
|
||||
else if (node instanceof AST_ForIn && node.init instanceof AST_SymbolRef)
|
||||
sym = node.init;
|
||||
if (sym
|
||||
&& (sym.undeclared()
|
||||
|| (sym.global() && sym.scope !== sym.definition().scope))) {
|
||||
AST_Node.warn("{msg}: {name} [{file}:{line},{col}]", {
|
||||
msg: sym.undeclared() ? "Accidental global?" : "Assignment to global",
|
||||
name: sym.name,
|
||||
file: sym.start.file,
|
||||
line: sym.start.line,
|
||||
col: sym.start.col
|
||||
});
|
||||
}
|
||||
}
|
||||
if (options.eval
|
||||
&& node instanceof AST_SymbolRef
|
||||
&& node.undeclared()
|
||||
&& node.name == "eval") {
|
||||
AST_Node.warn("Eval is used [{file}:{line},{col}]", node.start);
|
||||
}
|
||||
if (options.unreferenced
|
||||
&& (node instanceof AST_SymbolDeclaration || node instanceof AST_Label)
|
||||
&& node.unreferenced()) {
|
||||
AST_Node.warn("{type} {name} is declared but not referenced [{file}:{line},{col}]", {
|
||||
type: node instanceof AST_Label ? "Label" : "Symbol",
|
||||
name: node.name,
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
if (options.func_arguments
|
||||
&& node instanceof AST_Lambda
|
||||
&& node.uses_arguments) {
|
||||
AST_Node.warn("arguments used in function {name} [{file}:{line},{col}]", {
|
||||
name: node.name ? node.name.name : "anonymous",
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
if (options.nested_defuns
|
||||
&& node instanceof AST_Defun
|
||||
&& !(tw.parent() instanceof AST_Scope)) {
|
||||
AST_Node.warn("Function {name} declared in nested statement \"{type}\" [{file}:{line},{col}]", {
|
||||
name: node.name.name,
|
||||
type: tw.parent().TYPE,
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
});
|
||||
this.walk(tw);
|
||||
});
|
||||
81
node_modules/transformers/node_modules/uglify-js/lib/sourcemap.js
generated
vendored
Normal file
81
node_modules/transformers/node_modules/uglify-js/lib/sourcemap.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
// a small wrapper around fitzgen's source-map library
|
||||
function SourceMap(options) {
|
||||
options = defaults(options, {
|
||||
file : null,
|
||||
root : null,
|
||||
orig : null,
|
||||
});
|
||||
var generator = new MOZ_SourceMap.SourceMapGenerator({
|
||||
file : options.file,
|
||||
sourceRoot : options.root
|
||||
});
|
||||
var orig_map = options.orig && new MOZ_SourceMap.SourceMapConsumer(options.orig);
|
||||
function add(source, gen_line, gen_col, orig_line, orig_col, name) {
|
||||
if (orig_map) {
|
||||
var info = orig_map.originalPositionFor({
|
||||
line: orig_line,
|
||||
column: orig_col
|
||||
});
|
||||
source = info.source;
|
||||
orig_line = info.line;
|
||||
orig_col = info.column;
|
||||
name = info.name;
|
||||
}
|
||||
generator.addMapping({
|
||||
generated : { line: gen_line, column: gen_col },
|
||||
original : { line: orig_line, column: orig_col },
|
||||
source : source,
|
||||
name : name
|
||||
});
|
||||
};
|
||||
return {
|
||||
add : add,
|
||||
get : function() { return generator },
|
||||
toString : function() { return generator.toString() }
|
||||
};
|
||||
};
|
||||
218
node_modules/transformers/node_modules/uglify-js/lib/transform.js
generated
vendored
Normal file
218
node_modules/transformers/node_modules/uglify-js/lib/transform.js
generated
vendored
Normal file
@@ -0,0 +1,218 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
// Tree transformer helpers.
|
||||
// XXX: eventually I should refactor the compressor to use this infrastructure.
|
||||
|
||||
function TreeTransformer(before, after) {
|
||||
TreeWalker.call(this);
|
||||
this.before = before;
|
||||
this.after = after;
|
||||
}
|
||||
TreeTransformer.prototype = new TreeWalker;
|
||||
|
||||
(function(undefined){
|
||||
|
||||
function _(node, descend) {
|
||||
node.DEFMETHOD("transform", function(tw, in_list){
|
||||
var x, y;
|
||||
tw.push(this);
|
||||
if (tw.before) x = tw.before(this, descend, in_list);
|
||||
if (x === undefined) {
|
||||
if (!tw.after) {
|
||||
x = this;
|
||||
descend(x, tw);
|
||||
} else {
|
||||
tw.stack[tw.stack.length - 1] = x = this.clone();
|
||||
descend(x, tw);
|
||||
y = tw.after(x, in_list);
|
||||
if (y !== undefined) x = y;
|
||||
}
|
||||
}
|
||||
tw.pop();
|
||||
return x;
|
||||
});
|
||||
};
|
||||
|
||||
function do_list(list, tw) {
|
||||
return MAP(list, function(node){
|
||||
return node.transform(tw, true);
|
||||
});
|
||||
};
|
||||
|
||||
_(AST_Node, noop);
|
||||
|
||||
_(AST_LabeledStatement, function(self, tw){
|
||||
self.label = self.label.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_SimpleStatement, function(self, tw){
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Block, function(self, tw){
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_DWLoop, function(self, tw){
|
||||
self.condition = self.condition.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_For, function(self, tw){
|
||||
if (self.init) self.init = self.init.transform(tw);
|
||||
if (self.condition) self.condition = self.condition.transform(tw);
|
||||
if (self.step) self.step = self.step.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_ForIn, function(self, tw){
|
||||
self.init = self.init.transform(tw);
|
||||
self.object = self.object.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_With, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Exit, function(self, tw){
|
||||
if (self.value) self.value = self.value.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_LoopControl, function(self, tw){
|
||||
if (self.label) self.label = self.label.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_If, function(self, tw){
|
||||
self.condition = self.condition.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
if (self.alternative) self.alternative = self.alternative.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Switch, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Case, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Try, function(self, tw){
|
||||
self.body = do_list(self.body, tw);
|
||||
if (self.bcatch) self.bcatch = self.bcatch.transform(tw);
|
||||
if (self.bfinally) self.bfinally = self.bfinally.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Catch, function(self, tw){
|
||||
self.argname = self.argname.transform(tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Definitions, function(self, tw){
|
||||
self.definitions = do_list(self.definitions, tw);
|
||||
});
|
||||
|
||||
_(AST_VarDef, function(self, tw){
|
||||
if (self.value) self.value = self.value.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Lambda, function(self, tw){
|
||||
if (self.name) self.name = self.name.transform(tw);
|
||||
self.argnames = do_list(self.argnames, tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Call, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.args = do_list(self.args, tw);
|
||||
});
|
||||
|
||||
_(AST_Seq, function(self, tw){
|
||||
self.car = self.car.transform(tw);
|
||||
self.cdr = self.cdr.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Dot, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Sub, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.property = self.property.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Unary, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Binary, function(self, tw){
|
||||
self.left = self.left.transform(tw);
|
||||
self.right = self.right.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Conditional, function(self, tw){
|
||||
self.condition = self.condition.transform(tw);
|
||||
self.consequent = self.consequent.transform(tw);
|
||||
self.alternative = self.alternative.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Array, function(self, tw){
|
||||
self.elements = do_list(self.elements, tw);
|
||||
});
|
||||
|
||||
_(AST_Object, function(self, tw){
|
||||
self.properties = do_list(self.properties, tw);
|
||||
});
|
||||
|
||||
_(AST_ObjectProperty, function(self, tw){
|
||||
self.value = self.value.transform(tw);
|
||||
});
|
||||
|
||||
})();
|
||||
288
node_modules/transformers/node_modules/uglify-js/lib/utils.js
generated
vendored
Normal file
288
node_modules/transformers/node_modules/uglify-js/lib/utils.js
generated
vendored
Normal file
@@ -0,0 +1,288 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
function array_to_hash(a) {
|
||||
var ret = Object.create(null);
|
||||
for (var i = 0; i < a.length; ++i)
|
||||
ret[a[i]] = true;
|
||||
return ret;
|
||||
};
|
||||
|
||||
function slice(a, start) {
|
||||
return Array.prototype.slice.call(a, start || 0);
|
||||
};
|
||||
|
||||
function characters(str) {
|
||||
return str.split("");
|
||||
};
|
||||
|
||||
function member(name, array) {
|
||||
for (var i = array.length; --i >= 0;)
|
||||
if (array[i] == name)
|
||||
return true;
|
||||
return false;
|
||||
};
|
||||
|
||||
function find_if(func, array) {
|
||||
for (var i = 0, n = array.length; i < n; ++i) {
|
||||
if (func(array[i]))
|
||||
return array[i];
|
||||
}
|
||||
};
|
||||
|
||||
function repeat_string(str, i) {
|
||||
if (i <= 0) return "";
|
||||
if (i == 1) return str;
|
||||
var d = repeat_string(str, i >> 1);
|
||||
d += d;
|
||||
if (i & 1) d += str;
|
||||
return d;
|
||||
};
|
||||
|
||||
function DefaultsError(msg, defs) {
|
||||
this.msg = msg;
|
||||
this.defs = defs;
|
||||
};
|
||||
|
||||
function defaults(args, defs, croak) {
|
||||
if (args === true)
|
||||
args = {};
|
||||
var ret = args || {};
|
||||
if (croak) for (var i in ret) if (ret.hasOwnProperty(i) && !defs.hasOwnProperty(i))
|
||||
throw new DefaultsError("`" + i + "` is not a supported option", defs);
|
||||
for (var i in defs) if (defs.hasOwnProperty(i)) {
|
||||
ret[i] = (args && args.hasOwnProperty(i)) ? args[i] : defs[i];
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
function merge(obj, ext) {
|
||||
for (var i in ext) if (ext.hasOwnProperty(i)) {
|
||||
obj[i] = ext[i];
|
||||
}
|
||||
return obj;
|
||||
};
|
||||
|
||||
function noop() {};
|
||||
|
||||
var MAP = (function(){
|
||||
function MAP(a, f, backwards) {
|
||||
var ret = [], top = [], i;
|
||||
function doit() {
|
||||
var val = f(a[i], i);
|
||||
var is_last = val instanceof Last;
|
||||
if (is_last) val = val.v;
|
||||
if (val instanceof AtTop) {
|
||||
val = val.v;
|
||||
if (val instanceof Splice) {
|
||||
top.push.apply(top, backwards ? val.v.slice().reverse() : val.v);
|
||||
} else {
|
||||
top.push(val);
|
||||
}
|
||||
}
|
||||
else if (val !== skip) {
|
||||
if (val instanceof Splice) {
|
||||
ret.push.apply(ret, backwards ? val.v.slice().reverse() : val.v);
|
||||
} else {
|
||||
ret.push(val);
|
||||
}
|
||||
}
|
||||
return is_last;
|
||||
};
|
||||
if (a instanceof Array) {
|
||||
if (backwards) {
|
||||
for (i = a.length; --i >= 0;) if (doit()) break;
|
||||
ret.reverse();
|
||||
top.reverse();
|
||||
} else {
|
||||
for (i = 0; i < a.length; ++i) if (doit()) break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (i in a) if (a.hasOwnProperty(i)) if (doit()) break;
|
||||
}
|
||||
return top.concat(ret);
|
||||
};
|
||||
MAP.at_top = function(val) { return new AtTop(val) };
|
||||
MAP.splice = function(val) { return new Splice(val) };
|
||||
MAP.last = function(val) { return new Last(val) };
|
||||
var skip = MAP.skip = {};
|
||||
function AtTop(val) { this.v = val };
|
||||
function Splice(val) { this.v = val };
|
||||
function Last(val) { this.v = val };
|
||||
return MAP;
|
||||
})();
|
||||
|
||||
function push_uniq(array, el) {
|
||||
if (array.indexOf(el) < 0)
|
||||
array.push(el);
|
||||
};
|
||||
|
||||
function string_template(text, props) {
|
||||
return text.replace(/\{(.+?)\}/g, function(str, p){
|
||||
return props[p];
|
||||
});
|
||||
};
|
||||
|
||||
function remove(array, el) {
|
||||
for (var i = array.length; --i >= 0;) {
|
||||
if (array[i] === el) array.splice(i, 1);
|
||||
}
|
||||
};
|
||||
|
||||
function mergeSort(array, cmp) {
|
||||
if (array.length < 2) return array.slice();
|
||||
function merge(a, b) {
|
||||
var r = [], ai = 0, bi = 0, i = 0;
|
||||
while (ai < a.length && bi < b.length) {
|
||||
cmp(a[ai], b[bi]) <= 0
|
||||
? r[i++] = a[ai++]
|
||||
: r[i++] = b[bi++];
|
||||
}
|
||||
if (ai < a.length) r.push.apply(r, a.slice(ai));
|
||||
if (bi < b.length) r.push.apply(r, b.slice(bi));
|
||||
return r;
|
||||
};
|
||||
function _ms(a) {
|
||||
if (a.length <= 1)
|
||||
return a;
|
||||
var m = Math.floor(a.length / 2), left = a.slice(0, m), right = a.slice(m);
|
||||
left = _ms(left);
|
||||
right = _ms(right);
|
||||
return merge(left, right);
|
||||
};
|
||||
return _ms(array);
|
||||
};
|
||||
|
||||
function set_difference(a, b) {
|
||||
return a.filter(function(el){
|
||||
return b.indexOf(el) < 0;
|
||||
});
|
||||
};
|
||||
|
||||
function set_intersection(a, b) {
|
||||
return a.filter(function(el){
|
||||
return b.indexOf(el) >= 0;
|
||||
});
|
||||
};
|
||||
|
||||
// this function is taken from Acorn [1], written by Marijn Haverbeke
|
||||
// [1] https://github.com/marijnh/acorn
|
||||
function makePredicate(words) {
|
||||
if (!(words instanceof Array)) words = words.split(" ");
|
||||
var f = "", cats = [];
|
||||
out: for (var i = 0; i < words.length; ++i) {
|
||||
for (var j = 0; j < cats.length; ++j)
|
||||
if (cats[j][0].length == words[i].length) {
|
||||
cats[j].push(words[i]);
|
||||
continue out;
|
||||
}
|
||||
cats.push([words[i]]);
|
||||
}
|
||||
function compareTo(arr) {
|
||||
if (arr.length == 1) return f += "return str === " + JSON.stringify(arr[0]) + ";";
|
||||
f += "switch(str){";
|
||||
for (var i = 0; i < arr.length; ++i) f += "case " + JSON.stringify(arr[i]) + ":";
|
||||
f += "return true}return false;";
|
||||
}
|
||||
// When there are more than three length categories, an outer
|
||||
// switch first dispatches on the lengths, to save on comparisons.
|
||||
if (cats.length > 3) {
|
||||
cats.sort(function(a, b) {return b.length - a.length;});
|
||||
f += "switch(str.length){";
|
||||
for (var i = 0; i < cats.length; ++i) {
|
||||
var cat = cats[i];
|
||||
f += "case " + cat[0].length + ":";
|
||||
compareTo(cat);
|
||||
}
|
||||
f += "}";
|
||||
// Otherwise, simply generate a flat `switch` statement.
|
||||
} else {
|
||||
compareTo(words);
|
||||
}
|
||||
return new Function("str", f);
|
||||
};
|
||||
|
||||
function Dictionary() {
|
||||
this._values = Object.create(null);
|
||||
this._size = 0;
|
||||
};
|
||||
Dictionary.prototype = {
|
||||
set: function(key, val) {
|
||||
if (!this.has(key)) ++this._size;
|
||||
this._values["$" + key] = val;
|
||||
return this;
|
||||
},
|
||||
add: function(key, val) {
|
||||
if (this.has(key)) {
|
||||
this.get(key).push(val);
|
||||
} else {
|
||||
this.set(key, [ val ]);
|
||||
}
|
||||
return this;
|
||||
},
|
||||
get: function(key) { return this._values["$" + key] },
|
||||
del: function(key) {
|
||||
if (this.has(key)) {
|
||||
--this._size;
|
||||
delete this._values["$" + key];
|
||||
}
|
||||
return this;
|
||||
},
|
||||
has: function(key) { return ("$" + key) in this._values },
|
||||
each: function(f) {
|
||||
for (var i in this._values)
|
||||
f(this._values[i], i.substr(1));
|
||||
},
|
||||
size: function() {
|
||||
return this._size;
|
||||
},
|
||||
map: function(f) {
|
||||
var ret = [];
|
||||
for (var i in this._values)
|
||||
ret.push(f(this._values[i], i.substr(1)));
|
||||
return ret;
|
||||
}
|
||||
};
|
||||
88
node_modules/transformers/node_modules/uglify-js/package.json
generated
vendored
Normal file
88
node_modules/transformers/node_modules/uglify-js/package.json
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
{
|
||||
"_args": [
|
||||
[
|
||||
"uglify-js@~2.2.5",
|
||||
"/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers"
|
||||
]
|
||||
],
|
||||
"_from": "uglify-js@>=2.2.5 <2.3.0",
|
||||
"_id": "uglify-js@2.2.5",
|
||||
"_inCache": true,
|
||||
"_installable": true,
|
||||
"_location": "/transformers/uglify-js",
|
||||
"_npmUser": {
|
||||
"email": "mihai.bazon@gmail.com",
|
||||
"name": "mishoo"
|
||||
},
|
||||
"_npmVersion": "1.1.66",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"name": "uglify-js",
|
||||
"raw": "uglify-js@~2.2.5",
|
||||
"rawSpec": "~2.2.5",
|
||||
"scope": null,
|
||||
"spec": ">=2.2.5 <2.3.0",
|
||||
"type": "range"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/transformers"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.2.5.tgz",
|
||||
"_shasum": "a6e02a70d839792b9780488b7b8b184c095c99c7",
|
||||
"_shrinkwrap": null,
|
||||
"_spec": "uglify-js@~2.2.5",
|
||||
"_where": "/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/transformers",
|
||||
"bin": {
|
||||
"uglifyjs": "bin/uglifyjs"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/mishoo/UglifyJS2/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"optimist": "~0.3.5",
|
||||
"source-map": "~0.1.7"
|
||||
},
|
||||
"description": "JavaScript parser, mangler/compressor and beautifier toolkit",
|
||||
"devDependencies": {},
|
||||
"directories": {},
|
||||
"dist": {
|
||||
"shasum": "a6e02a70d839792b9780488b7b8b184c095c99c7",
|
||||
"tarball": "http://registry.npmjs.org/uglify-js/-/uglify-js-2.2.5.tgz"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
},
|
||||
"homepage": "http://lisperator.net/uglifyjs",
|
||||
"main": "tools/node.js",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "caires",
|
||||
"email": "cairesvs@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "mape",
|
||||
"email": "mape@mape.me"
|
||||
},
|
||||
{
|
||||
"name": "mishoo",
|
||||
"email": "mihai.bazon@gmail.com"
|
||||
}
|
||||
],
|
||||
"name": "uglify-js",
|
||||
"optionalDependencies": {},
|
||||
"readme": "ERROR: No README data found!",
|
||||
"repositories": [
|
||||
{
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/mishoo/UglifyJS2.git"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/mishoo/UglifyJS2.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test/run-tests.js"
|
||||
},
|
||||
"version": "2.2.5"
|
||||
}
|
||||
12
node_modules/transformers/node_modules/uglify-js/test/compress/arrays.js
generated
vendored
Normal file
12
node_modules/transformers/node_modules/uglify-js/test/compress/arrays.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
holes_and_undefined: {
|
||||
input: {
|
||||
x = [1, 2, undefined];
|
||||
y = [1, , 2, ];
|
||||
z = [1, undefined, 3];
|
||||
}
|
||||
expect: {
|
||||
x=[1,2,void 0];
|
||||
y=[1,,2];
|
||||
z=[1,void 0,3];
|
||||
}
|
||||
}
|
||||
49
node_modules/transformers/node_modules/uglify-js/test/compress/blocks.js
generated
vendored
Normal file
49
node_modules/transformers/node_modules/uglify-js/test/compress/blocks.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
remove_blocks: {
|
||||
input: {
|
||||
{;}
|
||||
foo();
|
||||
{};
|
||||
{
|
||||
{};
|
||||
};
|
||||
bar();
|
||||
{}
|
||||
}
|
||||
expect: {
|
||||
foo();
|
||||
bar();
|
||||
}
|
||||
}
|
||||
|
||||
keep_some_blocks: {
|
||||
input: {
|
||||
// 1.
|
||||
if (foo) {
|
||||
{{{}}}
|
||||
if (bar) { baz(); }
|
||||
{{}}
|
||||
} else {
|
||||
stuff();
|
||||
}
|
||||
|
||||
// 2.
|
||||
if (foo) {
|
||||
for (var i = 0; i < 5; ++i)
|
||||
if (bar) baz();
|
||||
} else {
|
||||
stuff();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
// 1.
|
||||
if (foo) {
|
||||
if (bar) baz();
|
||||
} else stuff();
|
||||
|
||||
// 2.
|
||||
if (foo) {
|
||||
for (var i = 0; i < 5; ++i)
|
||||
if (bar) baz();
|
||||
} else stuff();
|
||||
}
|
||||
}
|
||||
143
node_modules/transformers/node_modules/uglify-js/test/compress/conditionals.js
generated
vendored
Normal file
143
node_modules/transformers/node_modules/uglify-js/test/compress/conditionals.js
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
ifs_1: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (foo) bar();
|
||||
if (!foo); else bar();
|
||||
if (foo); else bar();
|
||||
if (foo); else;
|
||||
}
|
||||
expect: {
|
||||
foo&&bar();
|
||||
foo&&bar();
|
||||
foo||bar();
|
||||
foo;
|
||||
}
|
||||
}
|
||||
|
||||
ifs_2: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (foo) {
|
||||
x();
|
||||
} else if (bar) {
|
||||
y();
|
||||
} else if (baz) {
|
||||
z();
|
||||
}
|
||||
|
||||
if (foo) {
|
||||
x();
|
||||
} else if (bar) {
|
||||
y();
|
||||
} else if (baz) {
|
||||
z();
|
||||
} else {
|
||||
t();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
foo ? x() : bar ? y() : baz && z();
|
||||
foo ? x() : bar ? y() : baz ? z() : t();
|
||||
}
|
||||
}
|
||||
|
||||
ifs_3_should_warn: {
|
||||
options = {
|
||||
conditionals : true,
|
||||
dead_code : true,
|
||||
evaluate : true,
|
||||
booleans : true
|
||||
};
|
||||
input: {
|
||||
if (x && !(x + "1") && y) { // 1
|
||||
var qq;
|
||||
foo();
|
||||
} else {
|
||||
bar();
|
||||
}
|
||||
|
||||
if (x || !!(x + "1") || y) { // 2
|
||||
foo();
|
||||
} else {
|
||||
var jj;
|
||||
bar();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
var qq; bar(); // 1
|
||||
var jj; foo(); // 2
|
||||
}
|
||||
}
|
||||
|
||||
ifs_4: {
|
||||
options = {
|
||||
conditionals: true
|
||||
};
|
||||
input: {
|
||||
if (foo && bar) {
|
||||
x(foo)[10].bar.baz = something();
|
||||
} else
|
||||
x(foo)[10].bar.baz = something_else();
|
||||
}
|
||||
expect: {
|
||||
x(foo)[10].bar.baz = (foo && bar) ? something() : something_else();
|
||||
}
|
||||
}
|
||||
|
||||
ifs_5: {
|
||||
options = {
|
||||
if_return: true,
|
||||
conditionals: true,
|
||||
comparisons: true,
|
||||
};
|
||||
input: {
|
||||
function f() {
|
||||
if (foo) return;
|
||||
bar();
|
||||
baz();
|
||||
}
|
||||
function g() {
|
||||
if (foo) return;
|
||||
if (bar) return;
|
||||
if (baz) return;
|
||||
if (baa) return;
|
||||
a();
|
||||
b();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function f() {
|
||||
if (!foo) {
|
||||
bar();
|
||||
baz();
|
||||
}
|
||||
}
|
||||
function g() {
|
||||
if (!(foo || bar || baz || baa)) {
|
||||
a();
|
||||
b();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ifs_6: {
|
||||
options = {
|
||||
conditionals: true,
|
||||
comparisons: true
|
||||
};
|
||||
input: {
|
||||
if (!foo && !bar && !baz && !boo) {
|
||||
x = 10;
|
||||
} else {
|
||||
x = 20;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
x = foo || bar || baz || boo ? 20 : 10;
|
||||
}
|
||||
}
|
||||
89
node_modules/transformers/node_modules/uglify-js/test/compress/dead-code.js
generated
vendored
Normal file
89
node_modules/transformers/node_modules/uglify-js/test/compress/dead-code.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
dead_code_1: {
|
||||
options = {
|
||||
dead_code: true
|
||||
};
|
||||
input: {
|
||||
function f() {
|
||||
a();
|
||||
b();
|
||||
x = 10;
|
||||
return;
|
||||
if (x) {
|
||||
y();
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function f() {
|
||||
a();
|
||||
b();
|
||||
x = 10;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dead_code_2_should_warn: {
|
||||
options = {
|
||||
dead_code: true
|
||||
};
|
||||
input: {
|
||||
function f() {
|
||||
g();
|
||||
x = 10;
|
||||
throw "foo";
|
||||
// completely discarding the `if` would introduce some
|
||||
// bugs. UglifyJS v1 doesn't deal with this issue; in v2
|
||||
// we copy any declarations to the upper scope.
|
||||
if (x) {
|
||||
y();
|
||||
var x;
|
||||
function g(){};
|
||||
// but nested declarations should not be kept.
|
||||
(function(){
|
||||
var q;
|
||||
function y(){};
|
||||
})();
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function f() {
|
||||
g();
|
||||
x = 10;
|
||||
throw "foo";
|
||||
var x;
|
||||
function g(){};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dead_code_constant_boolean_should_warn_more: {
|
||||
options = {
|
||||
dead_code : true,
|
||||
loops : true,
|
||||
booleans : true,
|
||||
conditionals : true,
|
||||
evaluate : true
|
||||
};
|
||||
input: {
|
||||
while (!((foo && bar) || (x + "0"))) {
|
||||
console.log("unreachable");
|
||||
var foo;
|
||||
function bar() {}
|
||||
}
|
||||
for (var x = 10; x && (y || x) && (!typeof x); ++x) {
|
||||
asdf();
|
||||
foo();
|
||||
var moo;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
var foo;
|
||||
function bar() {}
|
||||
// nothing for the while
|
||||
// as for the for, it should keep:
|
||||
var x = 10;
|
||||
var moo;
|
||||
}
|
||||
}
|
||||
24
node_modules/transformers/node_modules/uglify-js/test/compress/debugger.js
generated
vendored
Normal file
24
node_modules/transformers/node_modules/uglify-js/test/compress/debugger.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
keep_debugger: {
|
||||
options = {
|
||||
drop_debugger: false
|
||||
};
|
||||
input: {
|
||||
debugger;
|
||||
}
|
||||
expect: {
|
||||
debugger;
|
||||
}
|
||||
}
|
||||
|
||||
drop_debugger: {
|
||||
options = {
|
||||
drop_debugger: true
|
||||
};
|
||||
input: {
|
||||
debugger;
|
||||
if (foo) debugger;
|
||||
}
|
||||
expect: {
|
||||
if (foo);
|
||||
}
|
||||
}
|
||||
97
node_modules/transformers/node_modules/uglify-js/test/compress/drop-unused.js
generated
vendored
Normal file
97
node_modules/transformers/node_modules/uglify-js/test/compress/drop-unused.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
unused_funarg_1: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function f(a, b, c, d, e) {
|
||||
return a + b;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function f(a, b) {
|
||||
return a + b;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unused_funarg_2: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function f(a, b, c, d, e) {
|
||||
return a + c;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function f(a, b, c) {
|
||||
return a + c;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unused_nested_function: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function f(x, y) {
|
||||
function g() {
|
||||
something();
|
||||
}
|
||||
return x + y;
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
function f(x, y) {
|
||||
return x + y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unused_circular_references_1: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function f(x, y) {
|
||||
// circular reference
|
||||
function g() {
|
||||
return h();
|
||||
}
|
||||
function h() {
|
||||
return g();
|
||||
}
|
||||
return x + y;
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
function f(x, y) {
|
||||
return x + y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unused_circular_references_2: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function f(x, y) {
|
||||
var foo = 1, bar = baz, baz = foo + bar, qwe = moo();
|
||||
return x + y;
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
function f(x, y) {
|
||||
moo(); // keeps side effect
|
||||
return x + y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unused_circular_references_3: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function f(x, y) {
|
||||
var g = function() { return h() };
|
||||
var h = function() { return g() };
|
||||
return x + y;
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
function f(x, y) {
|
||||
return x + y;
|
||||
}
|
||||
}
|
||||
}
|
||||
17
node_modules/transformers/node_modules/uglify-js/test/compress/issue-105.js
generated
vendored
Normal file
17
node_modules/transformers/node_modules/uglify-js/test/compress/issue-105.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
typeof_eq_undefined: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
unsafe: false
|
||||
};
|
||||
input: { a = typeof b.c != "undefined" }
|
||||
expect: { a = "undefined" != typeof b.c }
|
||||
}
|
||||
|
||||
typeof_eq_undefined_unsafe: {
|
||||
options = {
|
||||
comparisons: true,
|
||||
unsafe: true
|
||||
};
|
||||
input: { a = typeof b.c != "undefined" }
|
||||
expect: { a = b.c !== void 0 }
|
||||
}
|
||||
11
node_modules/transformers/node_modules/uglify-js/test/compress/issue-12.js
generated
vendored
Normal file
11
node_modules/transformers/node_modules/uglify-js/test/compress/issue-12.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
keep_name_of_getter: {
|
||||
options = { unused: true };
|
||||
input: { a = { get foo () {} } }
|
||||
expect: { a = { get foo () {} } }
|
||||
}
|
||||
|
||||
keep_name_of_setter: {
|
||||
options = { unused: true };
|
||||
input: { a = { set foo () {} } }
|
||||
expect: { a = { set foo () {} } }
|
||||
}
|
||||
17
node_modules/transformers/node_modules/uglify-js/test/compress/issue-22.js
generated
vendored
Normal file
17
node_modules/transformers/node_modules/uglify-js/test/compress/issue-22.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
return_with_no_value_in_if_body: {
|
||||
options = { conditionals: true };
|
||||
input: {
|
||||
function foo(bar) {
|
||||
if (bar) {
|
||||
return;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function foo (bar) {
|
||||
return bar ? void 0 : 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
31
node_modules/transformers/node_modules/uglify-js/test/compress/issue-44.js
generated
vendored
Normal file
31
node_modules/transformers/node_modules/uglify-js/test/compress/issue-44.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
issue_44_valid_ast_1: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function a(b) {
|
||||
for (var i = 0, e = b.qoo(); ; i++) {}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function a(b) {
|
||||
var i = 0;
|
||||
for (b.qoo(); ; i++);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
issue_44_valid_ast_2: {
|
||||
options = { unused: true };
|
||||
input: {
|
||||
function a(b) {
|
||||
if (foo) for (var i = 0, e = b.qoo(); ; i++) {}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function a(b) {
|
||||
if (foo) {
|
||||
var i = 0;
|
||||
for (b.qoo(); ; i++);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
30
node_modules/transformers/node_modules/uglify-js/test/compress/issue-59.js
generated
vendored
Normal file
30
node_modules/transformers/node_modules/uglify-js/test/compress/issue-59.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
keep_continue: {
|
||||
options = {
|
||||
dead_code: true,
|
||||
evaluate: true
|
||||
};
|
||||
input: {
|
||||
while (a) {
|
||||
if (b) {
|
||||
switch (true) {
|
||||
case c():
|
||||
d();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
f();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
while (a) {
|
||||
if (b) {
|
||||
switch (true) {
|
||||
case c():
|
||||
d();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
f();
|
||||
}
|
||||
}
|
||||
}
|
||||
163
node_modules/transformers/node_modules/uglify-js/test/compress/labels.js
generated
vendored
Normal file
163
node_modules/transformers/node_modules/uglify-js/test/compress/labels.js
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
labels_1: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
out: {
|
||||
if (foo) break out;
|
||||
console.log("bar");
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
foo || console.log("bar");
|
||||
}
|
||||
}
|
||||
|
||||
labels_2: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
out: {
|
||||
if (foo) print("stuff");
|
||||
else break out;
|
||||
console.log("here");
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
if (foo) {
|
||||
print("stuff");
|
||||
console.log("here");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
labels_3: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
for (var i = 0; i < 5; ++i) {
|
||||
if (i < 3) continue;
|
||||
console.log(i);
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
for (var i = 0; i < 5; ++i)
|
||||
i < 3 || console.log(i);
|
||||
}
|
||||
}
|
||||
|
||||
labels_4: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
out: for (var i = 0; i < 5; ++i) {
|
||||
if (i < 3) continue out;
|
||||
console.log(i);
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
for (var i = 0; i < 5; ++i)
|
||||
i < 3 || console.log(i);
|
||||
}
|
||||
}
|
||||
|
||||
labels_5: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
// should keep the break-s in the following
|
||||
input: {
|
||||
while (foo) {
|
||||
if (bar) break;
|
||||
console.log("foo");
|
||||
}
|
||||
out: while (foo) {
|
||||
if (bar) break out;
|
||||
console.log("foo");
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
while (foo) {
|
||||
if (bar) break;
|
||||
console.log("foo");
|
||||
}
|
||||
out: while (foo) {
|
||||
if (bar) break out;
|
||||
console.log("foo");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
labels_6: {
|
||||
input: {
|
||||
out: break out;
|
||||
};
|
||||
expect: {}
|
||||
}
|
||||
|
||||
labels_7: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
while (foo) {
|
||||
x();
|
||||
y();
|
||||
continue;
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
while (foo) {
|
||||
x();
|
||||
y();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
labels_8: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
while (foo) {
|
||||
x();
|
||||
y();
|
||||
break;
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
while (foo) {
|
||||
x();
|
||||
y();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
labels_9: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
out: while (foo) {
|
||||
x();
|
||||
y();
|
||||
continue out;
|
||||
z();
|
||||
k();
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
while (foo) {
|
||||
x();
|
||||
y();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
labels_10: {
|
||||
options = { if_return: true, conditionals: true, dead_code: true };
|
||||
input: {
|
||||
out: while (foo) {
|
||||
x();
|
||||
y();
|
||||
break out;
|
||||
z();
|
||||
k();
|
||||
}
|
||||
};
|
||||
expect: {
|
||||
out: while (foo) {
|
||||
x();
|
||||
y();
|
||||
break out;
|
||||
}
|
||||
}
|
||||
}
|
||||
123
node_modules/transformers/node_modules/uglify-js/test/compress/loops.js
generated
vendored
Normal file
123
node_modules/transformers/node_modules/uglify-js/test/compress/loops.js
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
while_becomes_for: {
|
||||
options = { loops: true };
|
||||
input: {
|
||||
while (foo()) bar();
|
||||
}
|
||||
expect: {
|
||||
for (; foo(); ) bar();
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_break_1: {
|
||||
options = { loops: true };
|
||||
input: {
|
||||
for (;;)
|
||||
if (foo()) break;
|
||||
}
|
||||
expect: {
|
||||
for (; !foo(););
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_break_2: {
|
||||
options = { loops: true };
|
||||
input: {
|
||||
for (;bar();)
|
||||
if (foo()) break;
|
||||
}
|
||||
expect: {
|
||||
for (; bar() && !foo(););
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_break_3: {
|
||||
options = { loops: true };
|
||||
input: {
|
||||
for (;bar();) {
|
||||
if (foo()) break;
|
||||
stuff1();
|
||||
stuff2();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
for (; bar() && !foo();) {
|
||||
stuff1();
|
||||
stuff2();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_break_4: {
|
||||
options = { loops: true, sequences: true };
|
||||
input: {
|
||||
for (;bar();) {
|
||||
x();
|
||||
y();
|
||||
if (foo()) break;
|
||||
z();
|
||||
k();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
for (; bar() && (x(), y(), !foo());) z(), k();
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_else_break_1: {
|
||||
options = { loops: true };
|
||||
input: {
|
||||
for (;;) if (foo()) bar(); else break;
|
||||
}
|
||||
expect: {
|
||||
for (; foo(); ) bar();
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_else_break_2: {
|
||||
options = { loops: true };
|
||||
input: {
|
||||
for (;bar();) {
|
||||
if (foo()) baz();
|
||||
else break;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
for (; bar() && foo();) baz();
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_else_break_3: {
|
||||
options = { loops: true };
|
||||
input: {
|
||||
for (;bar();) {
|
||||
if (foo()) baz();
|
||||
else break;
|
||||
stuff1();
|
||||
stuff2();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
for (; bar() && foo();) {
|
||||
baz();
|
||||
stuff1();
|
||||
stuff2();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
drop_if_else_break_4: {
|
||||
options = { loops: true, sequences: true };
|
||||
input: {
|
||||
for (;bar();) {
|
||||
x();
|
||||
y();
|
||||
if (foo()) baz();
|
||||
else break;
|
||||
z();
|
||||
k();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
for (; bar() && (x(), y(), foo());) baz(), z(), k();
|
||||
}
|
||||
}
|
||||
25
node_modules/transformers/node_modules/uglify-js/test/compress/properties.js
generated
vendored
Normal file
25
node_modules/transformers/node_modules/uglify-js/test/compress/properties.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
keep_properties: {
|
||||
options = {
|
||||
properties: false
|
||||
};
|
||||
input: {
|
||||
a["foo"] = "bar";
|
||||
}
|
||||
expect: {
|
||||
a["foo"] = "bar";
|
||||
}
|
||||
}
|
||||
|
||||
dot_properties: {
|
||||
options = {
|
||||
properties: true
|
||||
};
|
||||
input: {
|
||||
a["foo"] = "bar";
|
||||
a["if"] = "if";
|
||||
}
|
||||
expect: {
|
||||
a.foo = "bar";
|
||||
a["if"] = "if";
|
||||
}
|
||||
}
|
||||
161
node_modules/transformers/node_modules/uglify-js/test/compress/sequences.js
generated
vendored
Normal file
161
node_modules/transformers/node_modules/uglify-js/test/compress/sequences.js
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
make_sequences_1: {
|
||||
options = {
|
||||
sequences: true
|
||||
};
|
||||
input: {
|
||||
foo();
|
||||
bar();
|
||||
baz();
|
||||
}
|
||||
expect: {
|
||||
foo(),bar(),baz();
|
||||
}
|
||||
}
|
||||
|
||||
make_sequences_2: {
|
||||
options = {
|
||||
sequences: true
|
||||
};
|
||||
input: {
|
||||
if (boo) {
|
||||
foo();
|
||||
bar();
|
||||
baz();
|
||||
} else {
|
||||
x();
|
||||
y();
|
||||
z();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
if (boo) foo(),bar(),baz();
|
||||
else x(),y(),z();
|
||||
}
|
||||
}
|
||||
|
||||
make_sequences_3: {
|
||||
options = {
|
||||
sequences: true
|
||||
};
|
||||
input: {
|
||||
function f() {
|
||||
foo();
|
||||
bar();
|
||||
return baz();
|
||||
}
|
||||
function g() {
|
||||
foo();
|
||||
bar();
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
function f() {
|
||||
return foo(), bar(), baz();
|
||||
}
|
||||
function g() {
|
||||
throw foo(), bar(), new Error();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
make_sequences_4: {
|
||||
options = {
|
||||
sequences: true
|
||||
};
|
||||
input: {
|
||||
x = 5;
|
||||
if (y) z();
|
||||
|
||||
x = 5;
|
||||
for (i = 0; i < 5; i++) console.log(i);
|
||||
|
||||
x = 5;
|
||||
for (; i < 5; i++) console.log(i);
|
||||
|
||||
x = 5;
|
||||
switch (y) {}
|
||||
|
||||
x = 5;
|
||||
with (obj) {}
|
||||
}
|
||||
expect: {
|
||||
if (x = 5, y) z();
|
||||
for (x = 5, i = 0; i < 5; i++) console.log(i);
|
||||
for (x = 5; i < 5; i++) console.log(i);
|
||||
switch (x = 5, y) {}
|
||||
with (x = 5, obj);
|
||||
}
|
||||
}
|
||||
|
||||
lift_sequences_1: {
|
||||
options = { sequences: true };
|
||||
input: {
|
||||
foo = !(x(), y(), bar());
|
||||
}
|
||||
expect: {
|
||||
x(), y(), foo = !bar();
|
||||
}
|
||||
}
|
||||
|
||||
lift_sequences_2: {
|
||||
options = { sequences: true, evaluate: true };
|
||||
input: {
|
||||
q = 1 + (foo(), bar(), 5) + 7 * (5 / (3 - (a(), (QW=ER), c(), 2))) - (x(), y(), 5);
|
||||
}
|
||||
expect: {
|
||||
foo(), bar(), a(), QW = ER, c(), x(), y(), q = 36
|
||||
}
|
||||
}
|
||||
|
||||
lift_sequences_3: {
|
||||
options = { sequences: true, conditionals: true };
|
||||
input: {
|
||||
x = (foo(), bar(), baz()) ? 10 : 20;
|
||||
}
|
||||
expect: {
|
||||
foo(), bar(), x = baz() ? 10 : 20;
|
||||
}
|
||||
}
|
||||
|
||||
lift_sequences_4: {
|
||||
options = { side_effects: true };
|
||||
input: {
|
||||
x = (foo, bar, baz);
|
||||
}
|
||||
expect: {
|
||||
x = baz;
|
||||
}
|
||||
}
|
||||
|
||||
for_sequences: {
|
||||
options = { sequences: true };
|
||||
input: {
|
||||
// 1
|
||||
foo();
|
||||
bar();
|
||||
for (; false;);
|
||||
// 2
|
||||
foo();
|
||||
bar();
|
||||
for (x = 5; false;);
|
||||
// 3
|
||||
x = (foo in bar);
|
||||
for (; false;);
|
||||
// 4
|
||||
x = (foo in bar);
|
||||
for (y = 5; false;);
|
||||
}
|
||||
expect: {
|
||||
// 1
|
||||
for (foo(), bar(); false;);
|
||||
// 2
|
||||
for (foo(), bar(), x = 5; false;);
|
||||
// 3
|
||||
x = (foo in bar);
|
||||
for (; false;);
|
||||
// 4
|
||||
x = (foo in bar);
|
||||
for (y = 5; false;);
|
||||
}
|
||||
}
|
||||
210
node_modules/transformers/node_modules/uglify-js/test/compress/switch.js
generated
vendored
Normal file
210
node_modules/transformers/node_modules/uglify-js/test/compress/switch.js
generated
vendored
Normal file
@@ -0,0 +1,210 @@
|
||||
constant_switch_1: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
switch (1+1) {
|
||||
case 1: foo(); break;
|
||||
case 1+1: bar(); break;
|
||||
case 1+1+1: baz(); break;
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
bar();
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_2: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
switch (1) {
|
||||
case 1: foo();
|
||||
case 1+1: bar(); break;
|
||||
case 1+1+1: baz();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
foo();
|
||||
bar();
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_3: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
switch (10) {
|
||||
case 1: foo();
|
||||
case 1+1: bar(); break;
|
||||
case 1+1+1: baz();
|
||||
default:
|
||||
def();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
def();
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_4: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
switch (2) {
|
||||
case 1:
|
||||
x();
|
||||
if (foo) break;
|
||||
y();
|
||||
break;
|
||||
case 1+1:
|
||||
bar();
|
||||
default:
|
||||
def();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
bar();
|
||||
def();
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_5: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
switch (1) {
|
||||
case 1:
|
||||
x();
|
||||
if (foo) break;
|
||||
y();
|
||||
break;
|
||||
case 1+1:
|
||||
bar();
|
||||
default:
|
||||
def();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
// the break inside the if ruins our job
|
||||
// we can still get rid of irrelevant cases.
|
||||
switch (1) {
|
||||
case 1:
|
||||
x();
|
||||
if (foo) break;
|
||||
y();
|
||||
}
|
||||
// XXX: we could optimize this better by inventing an outer
|
||||
// labeled block, but that's kinda tricky.
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_6: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
OUT: {
|
||||
foo();
|
||||
switch (1) {
|
||||
case 1:
|
||||
x();
|
||||
if (foo) break OUT;
|
||||
y();
|
||||
case 1+1:
|
||||
bar();
|
||||
break;
|
||||
default:
|
||||
def();
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
OUT: {
|
||||
foo();
|
||||
x();
|
||||
if (foo) break OUT;
|
||||
y();
|
||||
bar();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_7: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
OUT: {
|
||||
foo();
|
||||
switch (1) {
|
||||
case 1:
|
||||
x();
|
||||
if (foo) break OUT;
|
||||
for (var x = 0; x < 10; x++) {
|
||||
if (x > 5) break; // this break refers to the for, not to the switch; thus it
|
||||
// shouldn't ruin our optimization
|
||||
console.log(x);
|
||||
}
|
||||
y();
|
||||
case 1+1:
|
||||
bar();
|
||||
break;
|
||||
default:
|
||||
def();
|
||||
}
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
OUT: {
|
||||
foo();
|
||||
x();
|
||||
if (foo) break OUT;
|
||||
for (var x = 0; x < 10; x++) {
|
||||
if (x > 5) break;
|
||||
console.log(x);
|
||||
}
|
||||
y();
|
||||
bar();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_8: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
OUT: switch (1) {
|
||||
case 1:
|
||||
x();
|
||||
for (;;) break OUT;
|
||||
y();
|
||||
break;
|
||||
case 1+1:
|
||||
bar();
|
||||
default:
|
||||
def();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
OUT: {
|
||||
x();
|
||||
for (;;) break OUT;
|
||||
y();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
constant_switch_9: {
|
||||
options = { dead_code: true, evaluate: true };
|
||||
input: {
|
||||
OUT: switch (1) {
|
||||
case 1:
|
||||
x();
|
||||
for (;;) if (foo) break OUT;
|
||||
y();
|
||||
case 1+1:
|
||||
bar();
|
||||
default:
|
||||
def();
|
||||
}
|
||||
}
|
||||
expect: {
|
||||
OUT: {
|
||||
x();
|
||||
for (;;) if (foo) break OUT;
|
||||
y();
|
||||
bar();
|
||||
def();
|
||||
}
|
||||
}
|
||||
}
|
||||
170
node_modules/transformers/node_modules/uglify-js/test/run-tests.js
generated
vendored
Executable file
170
node_modules/transformers/node_modules/uglify-js/test/run-tests.js
generated
vendored
Executable file
@@ -0,0 +1,170 @@
|
||||
#! /usr/bin/env node
|
||||
|
||||
var U = require("../tools/node");
|
||||
var path = require("path");
|
||||
var fs = require("fs");
|
||||
var assert = require("assert");
|
||||
var sys = require("util");
|
||||
|
||||
var tests_dir = path.dirname(module.filename);
|
||||
|
||||
run_compress_tests();
|
||||
|
||||
/* -----[ utils ]----- */
|
||||
|
||||
function tmpl() {
|
||||
return U.string_template.apply(this, arguments);
|
||||
}
|
||||
|
||||
function log() {
|
||||
var txt = tmpl.apply(this, arguments);
|
||||
sys.puts(txt);
|
||||
}
|
||||
|
||||
function log_directory(dir) {
|
||||
log("*** Entering [{dir}]", { dir: dir });
|
||||
}
|
||||
|
||||
function log_start_file(file) {
|
||||
log("--- {file}", { file: file });
|
||||
}
|
||||
|
||||
function log_test(name) {
|
||||
log(" Running test [{name}]", { name: name });
|
||||
}
|
||||
|
||||
function find_test_files(dir) {
|
||||
var files = fs.readdirSync(dir).filter(function(name){
|
||||
return /\.js$/i.test(name);
|
||||
});
|
||||
if (process.argv.length > 2) {
|
||||
var x = process.argv.slice(2);
|
||||
files = files.filter(function(f){
|
||||
return x.indexOf(f) >= 0;
|
||||
});
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
function test_directory(dir) {
|
||||
return path.resolve(tests_dir, dir);
|
||||
}
|
||||
|
||||
function as_toplevel(input) {
|
||||
if (input instanceof U.AST_BlockStatement) input = input.body;
|
||||
else if (input instanceof U.AST_Statement) input = [ input ];
|
||||
else throw new Error("Unsupported input syntax");
|
||||
var toplevel = new U.AST_Toplevel({ body: input });
|
||||
toplevel.figure_out_scope();
|
||||
return toplevel;
|
||||
}
|
||||
|
||||
function run_compress_tests() {
|
||||
var dir = test_directory("compress");
|
||||
log_directory("compress");
|
||||
var files = find_test_files(dir);
|
||||
function test_file(file) {
|
||||
log_start_file(file);
|
||||
function test_case(test) {
|
||||
log_test(test.name);
|
||||
var options = U.defaults(test.options, {
|
||||
warnings: false
|
||||
});
|
||||
var cmp = new U.Compressor(options, true);
|
||||
var expect = make_code(as_toplevel(test.expect), false);
|
||||
var input = as_toplevel(test.input);
|
||||
var input_code = make_code(test.input);
|
||||
var output = input.transform(cmp);
|
||||
output.figure_out_scope();
|
||||
output = make_code(output, false);
|
||||
if (expect != output) {
|
||||
log("!!! failed\n---INPUT---\n{input}\n---OUTPUT---\n{output}\n---EXPECTED---\n{expected}\n\n", {
|
||||
input: input_code,
|
||||
output: output,
|
||||
expected: expect
|
||||
});
|
||||
}
|
||||
}
|
||||
var tests = parse_test(path.resolve(dir, file));
|
||||
for (var i in tests) if (tests.hasOwnProperty(i)) {
|
||||
test_case(tests[i]);
|
||||
}
|
||||
}
|
||||
files.forEach(function(file){
|
||||
test_file(file);
|
||||
});
|
||||
}
|
||||
|
||||
function parse_test(file) {
|
||||
var script = fs.readFileSync(file, "utf8");
|
||||
var ast = U.parse(script, {
|
||||
filename: file
|
||||
});
|
||||
var tests = {};
|
||||
var tw = new U.TreeWalker(function(node, descend){
|
||||
if (node instanceof U.AST_LabeledStatement
|
||||
&& tw.parent() instanceof U.AST_Toplevel) {
|
||||
var name = node.label.name;
|
||||
tests[name] = get_one_test(name, node.body);
|
||||
return true;
|
||||
}
|
||||
if (!(node instanceof U.AST_Toplevel)) croak(node);
|
||||
});
|
||||
ast.walk(tw);
|
||||
return tests;
|
||||
|
||||
function croak(node) {
|
||||
throw new Error(tmpl("Can't understand test file {file} [{line},{col}]\n{code}", {
|
||||
file: file,
|
||||
line: node.start.line,
|
||||
col: node.start.col,
|
||||
code: make_code(node, false)
|
||||
}));
|
||||
}
|
||||
|
||||
function get_one_test(name, block) {
|
||||
var test = { name: name, options: {} };
|
||||
var tw = new U.TreeWalker(function(node, descend){
|
||||
if (node instanceof U.AST_Assign) {
|
||||
if (!(node.left instanceof U.AST_SymbolRef)) {
|
||||
croak(node);
|
||||
}
|
||||
var name = node.left.name;
|
||||
test[name] = evaluate(node.right);
|
||||
return true;
|
||||
}
|
||||
if (node instanceof U.AST_LabeledStatement) {
|
||||
assert.ok(
|
||||
node.label.name == "input" || node.label.name == "expect",
|
||||
tmpl("Unsupported label {name} [{line},{col}]", {
|
||||
name: node.label.name,
|
||||
line: node.label.start.line,
|
||||
col: node.label.start.col
|
||||
})
|
||||
);
|
||||
var stat = node.body;
|
||||
if (stat instanceof U.AST_BlockStatement) {
|
||||
if (stat.body.length == 1) stat = stat.body[0];
|
||||
else if (stat.body.length == 0) stat = new U.AST_EmptyStatement();
|
||||
}
|
||||
test[node.label.name] = stat;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
block.walk(tw);
|
||||
return test;
|
||||
};
|
||||
}
|
||||
|
||||
function make_code(ast, beautify) {
|
||||
if (arguments.length == 1) beautify = true;
|
||||
var stream = U.OutputStream({ beautify: beautify });
|
||||
ast.print(stream);
|
||||
return stream.get();
|
||||
}
|
||||
|
||||
function evaluate(code) {
|
||||
if (code instanceof U.AST_Node)
|
||||
code = make_code(code);
|
||||
return new Function("return(" + code + ")")();
|
||||
}
|
||||
164
node_modules/transformers/node_modules/uglify-js/tools/node.js
generated
vendored
Normal file
164
node_modules/transformers/node_modules/uglify-js/tools/node.js
generated
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
var path = require("path");
|
||||
var fs = require("fs");
|
||||
var vm = require("vm");
|
||||
var sys = require("util");
|
||||
|
||||
var UglifyJS = vm.createContext({
|
||||
sys : sys,
|
||||
console : console,
|
||||
MOZ_SourceMap : require("source-map")
|
||||
});
|
||||
|
||||
function load_global(file) {
|
||||
file = path.resolve(path.dirname(module.filename), file);
|
||||
try {
|
||||
var code = fs.readFileSync(file, "utf8");
|
||||
return vm.runInContext(code, UglifyJS, file);
|
||||
} catch(ex) {
|
||||
// XXX: in case of a syntax error, the message is kinda
|
||||
// useless. (no location information).
|
||||
sys.debug("ERROR in file: " + file + " / " + ex);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
var FILES = exports.FILES = [
|
||||
"../lib/utils.js",
|
||||
"../lib/ast.js",
|
||||
"../lib/parse.js",
|
||||
"../lib/transform.js",
|
||||
"../lib/scope.js",
|
||||
"../lib/output.js",
|
||||
"../lib/compress.js",
|
||||
"../lib/sourcemap.js",
|
||||
"../lib/mozilla-ast.js"
|
||||
].map(function(file){
|
||||
return path.join(path.dirname(fs.realpathSync(__filename)), file);
|
||||
});
|
||||
|
||||
FILES.forEach(load_global);
|
||||
|
||||
UglifyJS.AST_Node.warn_function = function(txt) {
|
||||
sys.error("WARN: " + txt);
|
||||
};
|
||||
|
||||
// XXX: perhaps we shouldn't export everything but heck, I'm lazy.
|
||||
for (var i in UglifyJS) {
|
||||
if (UglifyJS.hasOwnProperty(i)) {
|
||||
exports[i] = UglifyJS[i];
|
||||
}
|
||||
}
|
||||
|
||||
exports.minify = function(files, options) {
|
||||
options = UglifyJS.defaults(options, {
|
||||
outSourceMap : null,
|
||||
sourceRoot : null,
|
||||
inSourceMap : null,
|
||||
fromString : false,
|
||||
warnings : false,
|
||||
mangle : {},
|
||||
output : null,
|
||||
compress : {}
|
||||
});
|
||||
if (typeof files == "string")
|
||||
files = [ files ];
|
||||
|
||||
// 1. parse
|
||||
var toplevel = null;
|
||||
files.forEach(function(file){
|
||||
var code = options.fromString
|
||||
? file
|
||||
: fs.readFileSync(file, "utf8");
|
||||
toplevel = UglifyJS.parse(code, {
|
||||
filename: options.fromString ? "?" : file,
|
||||
toplevel: toplevel
|
||||
});
|
||||
});
|
||||
|
||||
// 2. compress
|
||||
if (options.compress) {
|
||||
var compress = { warnings: options.warnings };
|
||||
UglifyJS.merge(compress, options.compress);
|
||||
toplevel.figure_out_scope();
|
||||
var sq = UglifyJS.Compressor(compress);
|
||||
toplevel = toplevel.transform(sq);
|
||||
}
|
||||
|
||||
// 3. mangle
|
||||
if (options.mangle) {
|
||||
toplevel.figure_out_scope();
|
||||
toplevel.compute_char_frequency();
|
||||
toplevel.mangle_names(options.mangle);
|
||||
}
|
||||
|
||||
// 4. output
|
||||
var map = null;
|
||||
var inMap = null;
|
||||
if (options.inSourceMap) {
|
||||
inMap = fs.readFileSync(options.inSourceMap, "utf8");
|
||||
}
|
||||
if (options.outSourceMap) map = UglifyJS.SourceMap({
|
||||
file: options.outSourceMap,
|
||||
orig: inMap,
|
||||
root: options.sourceRoot
|
||||
});
|
||||
var output = { source_map: map };
|
||||
if (options.output) {
|
||||
UglifyJS.merge(output, options.output);
|
||||
}
|
||||
var stream = UglifyJS.OutputStream(output);
|
||||
toplevel.print(stream);
|
||||
return {
|
||||
code : stream + "",
|
||||
map : map + ""
|
||||
};
|
||||
};
|
||||
|
||||
// exports.describe_ast = function() {
|
||||
// function doitem(ctor) {
|
||||
// var sub = {};
|
||||
// ctor.SUBCLASSES.forEach(function(ctor){
|
||||
// sub[ctor.TYPE] = doitem(ctor);
|
||||
// });
|
||||
// var ret = {};
|
||||
// if (ctor.SELF_PROPS.length > 0) ret.props = ctor.SELF_PROPS;
|
||||
// if (ctor.SUBCLASSES.length > 0) ret.sub = sub;
|
||||
// return ret;
|
||||
// }
|
||||
// return doitem(UglifyJS.AST_Node).sub;
|
||||
// }
|
||||
|
||||
exports.describe_ast = function() {
|
||||
var out = UglifyJS.OutputStream({ beautify: true });
|
||||
function doitem(ctor) {
|
||||
out.print("AST_" + ctor.TYPE);
|
||||
var props = ctor.SELF_PROPS.filter(function(prop){
|
||||
return !/^\$/.test(prop);
|
||||
});
|
||||
if (props.length > 0) {
|
||||
out.space();
|
||||
out.with_parens(function(){
|
||||
props.forEach(function(prop, i){
|
||||
if (i) out.space();
|
||||
out.print(prop);
|
||||
});
|
||||
});
|
||||
}
|
||||
if (ctor.documentation) {
|
||||
out.space();
|
||||
out.print_string(ctor.documentation);
|
||||
}
|
||||
if (ctor.SUBCLASSES.length > 0) {
|
||||
out.space();
|
||||
out.with_block(function(){
|
||||
ctor.SUBCLASSES.forEach(function(ctor, i){
|
||||
out.indent();
|
||||
doitem(ctor);
|
||||
out.newline();
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
doitem(UglifyJS.AST_Node);
|
||||
return out + "";
|
||||
};
|
||||
122
node_modules/transformers/package.json
generated
vendored
Normal file
122
node_modules/transformers/package.json
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
{
|
||||
"_args": [
|
||||
[
|
||||
"transformers@2.1.0",
|
||||
"/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/jade"
|
||||
]
|
||||
],
|
||||
"_from": "transformers@2.1.0",
|
||||
"_id": "transformers@2.1.0",
|
||||
"_inCache": true,
|
||||
"_installable": true,
|
||||
"_location": "/transformers",
|
||||
"_npmUser": {
|
||||
"email": "forbes@lindesay.co.uk",
|
||||
"name": "forbeslindesay"
|
||||
},
|
||||
"_npmVersion": "1.2.10",
|
||||
"_phantomChildren": {
|
||||
"amdefine": "1.0.0",
|
||||
"optimist": "0.3.7"
|
||||
},
|
||||
"_requested": {
|
||||
"name": "transformers",
|
||||
"raw": "transformers@2.1.0",
|
||||
"rawSpec": "2.1.0",
|
||||
"scope": null,
|
||||
"spec": "2.1.0",
|
||||
"type": "version"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/jade"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/transformers/-/transformers-2.1.0.tgz",
|
||||
"_shasum": "5d23cb35561dd85dc67fb8482309b47d53cce9a7",
|
||||
"_shrinkwrap": null,
|
||||
"_spec": "transformers@2.1.0",
|
||||
"_where": "/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/jade",
|
||||
"author": {
|
||||
"name": "ForbesLindesay"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/ForbesLindesay/transformers/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"css": "~1.0.8",
|
||||
"promise": "~2.0",
|
||||
"uglify-js": "~2.2.5"
|
||||
},
|
||||
"description": "String/Data transformations for use in templating libraries, static site generators and web frameworks",
|
||||
"devDependencies": {
|
||||
"atpl": "*",
|
||||
"coffee-script": "*",
|
||||
"coffeecup": "*",
|
||||
"coffeekup": "*",
|
||||
"component-builder": "*",
|
||||
"cson": "*",
|
||||
"dot": "*",
|
||||
"dust": "*",
|
||||
"dustjs-linkedin": "*",
|
||||
"eco": "*",
|
||||
"ect": "*",
|
||||
"ejs": "*",
|
||||
"expect.js": "~0.2",
|
||||
"haml-coffee": "*",
|
||||
"hamljs": "*",
|
||||
"handlebars": "*",
|
||||
"highlight.js": "*",
|
||||
"hogan.js": "*",
|
||||
"html2jade": "*",
|
||||
"jade": "*",
|
||||
"jazz": "*",
|
||||
"jqtpl": "*",
|
||||
"just": "*",
|
||||
"less": "*",
|
||||
"liquor": "*",
|
||||
"markdown": "*",
|
||||
"markdown-js": "*",
|
||||
"marked": "*",
|
||||
"mocha": "~1.8",
|
||||
"mote": "*",
|
||||
"mustache": "*",
|
||||
"plates": "*",
|
||||
"qejs": "*",
|
||||
"sass": "*",
|
||||
"stylus": "*",
|
||||
"supermarked": "*",
|
||||
"swig": "*",
|
||||
"templayed": "*",
|
||||
"then-jade": "*",
|
||||
"toffee": "*",
|
||||
"underscore": "*",
|
||||
"walrus": "*",
|
||||
"whiskers": "*"
|
||||
},
|
||||
"directories": {},
|
||||
"dist": {
|
||||
"shasum": "5d23cb35561dd85dc67fb8482309b47d53cce9a7",
|
||||
"tarball": "http://registry.npmjs.org/transformers/-/transformers-2.1.0.tgz"
|
||||
},
|
||||
"gitHead": "4b46e72cba3ad3403fd5ed3802d5472dcfa77311",
|
||||
"homepage": "https://github.com/ForbesLindesay/transformers#readme",
|
||||
"license": "MIT",
|
||||
"main": "lib/transformers.js",
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "forbeslindesay",
|
||||
"email": "forbes@lindesay.co.uk"
|
||||
}
|
||||
],
|
||||
"name": "transformers",
|
||||
"optionalDependencies": {},
|
||||
"readme": "ERROR: No README data found!",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ForbesLindesay/transformers.git"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "node test/update-package && npm install",
|
||||
"test": "mocha test/test.js -R spec"
|
||||
},
|
||||
"version": "2.1.0"
|
||||
}
|
||||
Reference in New Issue
Block a user