Make the minify api portable
This commit is contained in:
parent
04b8964505
commit
9b16a75384
10
README.md
10
README.md
|
|
@ -983,6 +983,16 @@ The `source_map_options` (optional) can contain the following properties:
|
|||
[compressor]: http://lisperator.net/uglifyjs/compress
|
||||
[parser]: http://lisperator.net/uglifyjs/parser
|
||||
|
||||
#### Access to the file system
|
||||
|
||||
`UglifyJS.readFile` and `UglifyJS.writeFile` can be overwritten. This is already
|
||||
already done for the nodejs api.
|
||||
|
||||
- `UglifyJS.readFile` must be a function. The first parameter will be the file name.
|
||||
The return value is expected to be a utf-8 string.
|
||||
- `UglifyJS.writeFile` must be a function. The first parameter will be the file
|
||||
to be written. The second parameter will have the content of that file.
|
||||
|
||||
#### Harmony
|
||||
|
||||
If you wish to use the experimental [harmony](https://github.com/mishoo/UglifyJS2/commits/harmony)
|
||||
|
|
|
|||
199
lib/minify.js
Normal file
199
lib/minify.js
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
exports.readFile = function() {
|
||||
DefaultsError.croak("readFile not supported");
|
||||
};
|
||||
|
||||
exports.writeFile = function() {
|
||||
DefaultsError.croak("writeFile not supported");
|
||||
};
|
||||
|
||||
exports.simple_glob = function(files) {
|
||||
return files;
|
||||
};
|
||||
|
||||
var defaultBase64Decoder = exports.base64Decoder = function(input) {
|
||||
DefaultsError.croak("No base64 decoder implemented");
|
||||
}
|
||||
|
||||
var defaultBase64Encoder = exports.base64Encoder = function(input) {
|
||||
DefaultsError.croak("No base64 encoder implemented");
|
||||
}
|
||||
|
||||
var readNameCache = function(filename, key) {
|
||||
var cache = null;
|
||||
if (filename) {
|
||||
try {
|
||||
var cache = exports.readFile(filename);
|
||||
cache = JSON.parse(cache)[key];
|
||||
if (!cache) throw "init";
|
||||
cache.props = Dictionary.fromObject(cache.props);
|
||||
} catch(ex) {
|
||||
cache = {
|
||||
cname: -1,
|
||||
props: new Dictionary()
|
||||
};
|
||||
}
|
||||
}
|
||||
return cache;
|
||||
};
|
||||
|
||||
var writeNameCache = function(filename, key, cache) {
|
||||
if (filename) {
|
||||
var data;
|
||||
try {
|
||||
data = exports.readFile(filename);
|
||||
data = JSON.parse(data);
|
||||
} catch(ex) {
|
||||
data = {};
|
||||
}
|
||||
data[key] = {
|
||||
cname: cache.cname,
|
||||
props: cache.props.toObject()
|
||||
};
|
||||
exports.writeFile(filename, JSON.stringify(data, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
var read_source_map = function(code) {
|
||||
var match = /\n\/\/# sourceMappingURL=data:application\/json(;.*?)?;base64,(.*)/.exec(code);
|
||||
if (!match) {
|
||||
AST_Node.warn("inline source map not found");
|
||||
return null;
|
||||
}
|
||||
return JSON.parse(exports.base64Encoded(match[2]));
|
||||
}
|
||||
|
||||
var minify = function(files, options) {
|
||||
options = defaults(options, {
|
||||
compress : {},
|
||||
fromString : false,
|
||||
inSourceMap : null,
|
||||
mangle : {},
|
||||
mangleProperties : false,
|
||||
nameCache : null,
|
||||
outFileName : null,
|
||||
output : null,
|
||||
outSourceMap : null,
|
||||
parse : {},
|
||||
sourceMapInline : false,
|
||||
sourceMapUrl : null,
|
||||
sourceRoot : null,
|
||||
spidermonkey : false,
|
||||
warnings : false,
|
||||
});
|
||||
base54.reset();
|
||||
|
||||
var inMap = options.inSourceMap;
|
||||
if (typeof inMap == "string" && inMap != "inline") {
|
||||
inMap = JSON.parse(exports.readFile(inMap));
|
||||
}
|
||||
|
||||
// 1. parse
|
||||
var toplevel = null,
|
||||
sourcesContent = {};
|
||||
|
||||
var addFile = function(file, fileUrl) {
|
||||
var code = options.fromString
|
||||
? file
|
||||
: exports.readFile(file);
|
||||
if (inMap == "inline") {
|
||||
inMap = read_source_map(code);
|
||||
}
|
||||
sourcesContent[fileUrl] = code;
|
||||
toplevel = parse(code, {
|
||||
filename: fileUrl,
|
||||
toplevel: toplevel,
|
||||
bare_returns: options.parse ? options.parse.bare_returns : undefined
|
||||
});
|
||||
}
|
||||
|
||||
if (options.spidermonkey) {
|
||||
if (inMap == "inline") {
|
||||
throw new Error("inline source map only works with built-in parser");
|
||||
}
|
||||
toplevel = AST_Node.from_mozilla_ast(files);
|
||||
} else {
|
||||
if (!options.fromString) {
|
||||
files = exports.simple_glob(files);
|
||||
if (inMap == "inline" && files.length > 1) {
|
||||
throw new Error("inline source map only works with singular input");
|
||||
}
|
||||
}
|
||||
[].concat(files).forEach(function (files, i) {
|
||||
if (typeof files === 'string') {
|
||||
addFile(files, options.fromString ? i : files);
|
||||
} else {
|
||||
for (var fileUrl in files) {
|
||||
addFile(files[fileUrl], fileUrl);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (options.wrap) {
|
||||
toplevel = toplevel.wrap_commonjs(options.wrap, options.exportAll);
|
||||
}
|
||||
|
||||
// 2. compress
|
||||
if (options.compress) {
|
||||
var compress = { warnings: options.warnings };
|
||||
merge(compress, options.compress);
|
||||
toplevel.figure_out_scope(options.mangle);
|
||||
var sq = Compressor(compress);
|
||||
toplevel = sq.compress(toplevel);
|
||||
}
|
||||
|
||||
// 3. mangle properties
|
||||
if (options.mangleProperties || options.nameCache) {
|
||||
options.mangleProperties = options.mangleProperties || {};
|
||||
options.mangleProperties.cache = readNameCache(options.nameCache, "props");
|
||||
toplevel = mangle_properties(toplevel, options.mangleProperties);
|
||||
writeNameCache(options.nameCache, "props", options.mangleProperties.cache);
|
||||
}
|
||||
|
||||
// 4. mangle
|
||||
if (options.mangle) {
|
||||
toplevel.figure_out_scope(options.mangle);
|
||||
toplevel.compute_char_frequency(options.mangle);
|
||||
toplevel.mangle_names(options.mangle);
|
||||
}
|
||||
|
||||
// 5. output
|
||||
var output = { max_line_len: 32000 };
|
||||
if (options.outSourceMap || options.sourceMapInline) {
|
||||
output.source_map = SourceMap({
|
||||
// prefer outFileName, otherwise use outSourceMap without .map suffix
|
||||
file: options.outFileName || (typeof options.outSourceMap === 'string' ? options.outSourceMap.replace(/\.map$/i, '') : null),
|
||||
orig: inMap,
|
||||
root: options.sourceRoot
|
||||
});
|
||||
if (options.sourceMapIncludeSources) {
|
||||
for (var file in sourcesContent) {
|
||||
if (sourcesContent.hasOwnProperty(file)) {
|
||||
output.source_map.get().setSourceContent(file, sourcesContent[file]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (options.output) {
|
||||
merge(output, options.output);
|
||||
}
|
||||
var stream = OutputStream(output);
|
||||
toplevel.print(stream);
|
||||
|
||||
var source_map = output.source_map;
|
||||
if (source_map) {
|
||||
source_map = source_map + "";
|
||||
}
|
||||
|
||||
var mappingUrlPrefix = "\n//# sourceMappingURL=";
|
||||
if (options.sourceMapInline) {
|
||||
stream += mappingUrlPrefix + "data:application/json;charset=utf-8;base64," + exports.base64Decoder(source_map);
|
||||
} else if (options.outSourceMap && typeof options.outSourceMap === "string" && options.sourceMapUrl !== false) {
|
||||
stream += mappingUrlPrefix + (typeof options.sourceMapUrl === "string" ? options.sourceMapUrl : options.outSourceMap);
|
||||
}
|
||||
|
||||
return {
|
||||
code : stream + "",
|
||||
map : source_map
|
||||
};
|
||||
};
|
||||
281
test/mocha/portable.js
Normal file
281
test/mocha/portable.js
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
var assert = require("assert");
|
||||
var exec = require("child_process").exec;
|
||||
|
||||
describe("portable", function() {
|
||||
var readFileBackup;
|
||||
var writeFileBackup;
|
||||
var simpleGlobBackup;
|
||||
var base64DecoderBackup;
|
||||
|
||||
before(function(done) {
|
||||
this.timeout(15000);
|
||||
var uglifyjscmd = '"' + process.argv[0] + '" bin/uglifyjs';
|
||||
var command = uglifyjscmd + ' --self -cm --wrap minifier';
|
||||
|
||||
assert.strictEqual(global.minifier, undefined);
|
||||
|
||||
exec(command, function (err, stdout) {
|
||||
if (err) {
|
||||
this.skip();
|
||||
throw err;
|
||||
}
|
||||
|
||||
eval(stdout);
|
||||
|
||||
assert.strictEqual(typeof minifier, 'object');
|
||||
assert.strictEqual(minifier, global.minifier);
|
||||
assert.strictEqual(true, minifier.parse('foo;') instanceof minifier.AST_Node);
|
||||
|
||||
readFileBackup = minifier.readFile;
|
||||
writeFileBackup = minifier.writeFile;
|
||||
simpleGlobBackup = minifier.simple_glob;
|
||||
base64DecoderBackup = minifier.base64Decoder;
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(function() {
|
||||
assert(typeof minifier, "object");
|
||||
|
||||
minifier.readFile = readFileBackup;
|
||||
minifier.writeFile = writeFileBackup;
|
||||
minifier.simple_glob = simpleGlobBackup;
|
||||
minifier.base64Decoder = base64DecoderBackup;
|
||||
});
|
||||
|
||||
after(function() {
|
||||
global.minifier = undefined;
|
||||
assert.strictEqual(global.minifier, undefined);
|
||||
});
|
||||
|
||||
it("Should minify from a string successfully", function() {
|
||||
assert.strictEqual(minifier.minify('console["log"]("Hello " + "world!");', {fromString: true}).code,
|
||||
'console.log("Hello world!");'
|
||||
);
|
||||
});
|
||||
|
||||
it("Should be possible to overwrite readFile", function() {
|
||||
var files = {
|
||||
"foo.js": 'console.log("Hello world!");'
|
||||
};
|
||||
minifier.readFile = function(file) {
|
||||
if (typeof files[file] === "string") {
|
||||
return files[file];
|
||||
}
|
||||
|
||||
assert(false, "Error reading file " + file);
|
||||
};
|
||||
minifier.writeFile = function(file, content) {
|
||||
assert(false, "Error writing to " + file);
|
||||
};
|
||||
|
||||
var result = minifier.minify(["foo.js"], {compress: false});
|
||||
|
||||
assert.strictEqual(result.code, 'console.log("Hello world!");');
|
||||
});
|
||||
|
||||
it("Should be possible to minify a single file with the default simple_glob", function() {
|
||||
var files = {
|
||||
"foo.js": ' console.log( "Hello world!" || "Bye world!");'
|
||||
};
|
||||
var readCount = 0;
|
||||
minifier.readFile = function(file) {
|
||||
readCount++;
|
||||
|
||||
if (typeof files[file] === "string") {
|
||||
return files[file];
|
||||
}
|
||||
|
||||
assert(false, "Error reading file " + file);
|
||||
};
|
||||
minifier.writeFile = function(file, content) {
|
||||
assert(false, "Error writing to " + file);
|
||||
};
|
||||
|
||||
var result = minifier.minify("foo.js");
|
||||
|
||||
assert.strictEqual(result.code, 'console.log("Hello world!");');
|
||||
assert.strictEqual(readCount, 1); // foo.js
|
||||
});
|
||||
|
||||
it("Should be possible to overwrite simple_glob", function() {
|
||||
var files = {
|
||||
"foo.js": 'console.log("Hello world!");'
|
||||
};
|
||||
var readCount = 0;
|
||||
minifier.readFile = function(file) {
|
||||
readCount++;
|
||||
|
||||
if (typeof files[file] === "string") {
|
||||
return files[file];
|
||||
}
|
||||
|
||||
assert(false, "Error reading file " + file);
|
||||
};
|
||||
minifier.writeFile = function(file, content) {
|
||||
assert(false, "Error writing to " + file);
|
||||
};
|
||||
minifier.simple_glob = function(files) {
|
||||
files = files.slice();
|
||||
|
||||
for (var i = 0; i < files.length; i++) {
|
||||
files[i] = files[i].replace(/\*/g, "foo");
|
||||
}
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
var result = minifier.minify(["*.js"], {compress: false});
|
||||
|
||||
assert.strictEqual(result.code, 'console.log("Hello world!");');
|
||||
assert.strictEqual(readCount, 1); // foo.js
|
||||
});
|
||||
|
||||
it("Should be possible to store to the name cache", function() {
|
||||
var files = {
|
||||
"foo.js": 'var foo = "bar";'
|
||||
};
|
||||
var writes = {
|
||||
"foo.json": {
|
||||
content: ['{\n "props": {\n "cname": -1,\n "props": {}\n }\n}'],
|
||||
maxWrites: 1
|
||||
}
|
||||
}
|
||||
var readCount = 0;
|
||||
var writeCount = 0;
|
||||
minifier.readFile = function(file) {
|
||||
readCount++;
|
||||
|
||||
if (typeof files[file] === "string") {
|
||||
return files[file];
|
||||
}
|
||||
|
||||
assert(false, "Error reading file " + file);
|
||||
};
|
||||
minifier.writeFile = function(file, content) {
|
||||
writeCount++;
|
||||
|
||||
if (writes[file]) {
|
||||
if (writes[file].writes === undefined) {
|
||||
writes[file].writes = 1;
|
||||
} else {
|
||||
writes[file].writes++;
|
||||
}
|
||||
|
||||
if (writes[file].maxWrites) {
|
||||
assert(writes[file].writes <= writes[file].maxWrites, "Reached write limit for " + file);
|
||||
}
|
||||
|
||||
assert.strictEqual(content, writes[file].content[writes[file].writes - 1]);
|
||||
} else {
|
||||
assert(false, "Error writing to " + file + " with " + content);
|
||||
}
|
||||
};
|
||||
var result = minifier.minify(["foo.js"], {nameCache: "foo.json"});
|
||||
|
||||
assert.strictEqual(result.code, 'var foo="bar";');
|
||||
assert.strictEqual(readCount, 3); // Read foo.js, read foo.json, read foo.json before writing to foo.json
|
||||
assert.strictEqual(writeCount, 1); // foo.json
|
||||
});
|
||||
|
||||
it("Should be possible to store to the name cache", function() {
|
||||
var files = {
|
||||
"foo.js": 'var foo = "bar";',
|
||||
"foo.json": '{\n "props": {\n "cname": -1,\n "props": {}\n }\n}'
|
||||
};
|
||||
var writes = {
|
||||
"foo.json": {
|
||||
content: ['{\n "props": {\n "cname": -1,\n "props": {}\n }\n}'],
|
||||
maxWrites: 1
|
||||
}
|
||||
}
|
||||
var writeCount = 0;
|
||||
var readCount = 0;
|
||||
minifier.readFile = function(file) {
|
||||
readCount++;
|
||||
|
||||
if (typeof files[file] === "string") {
|
||||
return files[file];
|
||||
}
|
||||
|
||||
assert(false, "Error reading file " + file);
|
||||
};
|
||||
minifier.writeFile = function(file, content) {
|
||||
writeCount++;
|
||||
|
||||
if (writes[file]) {
|
||||
if (writes[file].writes === undefined) {
|
||||
writes[file].writes = 1;
|
||||
} else {
|
||||
writes[file].writes++;
|
||||
}
|
||||
|
||||
if (writes[file].maxWrites) {
|
||||
assert(writes[file].writes <= writes[file].maxWrites, "Reached write limit for " + file);
|
||||
}
|
||||
|
||||
assert.strictEqual(content, writes[file].content[writes[file].writes - 1]);
|
||||
} else {
|
||||
assert(false, "Error writing to " + file + " with " + content);
|
||||
}
|
||||
};
|
||||
var result = minifier.minify(["foo.js"], {nameCache: "foo.json"});
|
||||
|
||||
assert.strictEqual(result.code, 'var foo="bar";');
|
||||
assert.strictEqual(readCount, 3); // Read foo.js, read foo.json, read foo.json before writing to foo.json
|
||||
assert.strictEqual(writeCount, 1); // foo.json
|
||||
});
|
||||
|
||||
it("Should throw an error if the default readFile and writeFile hooks are called", function() {
|
||||
var readFileError = "readFile not supported";
|
||||
var writeFileError = "writeFile not supported";
|
||||
var checkError = function(expected) {
|
||||
return function(e) {
|
||||
return e instanceof Error &&
|
||||
e.message === expected;
|
||||
}
|
||||
};
|
||||
|
||||
// First test with directly calling them
|
||||
assert.throws(function() {
|
||||
minifier.readFile();
|
||||
}, checkError(readFileError));
|
||||
assert.throws(function() {
|
||||
minifier.writeFile(writeFileError);
|
||||
});
|
||||
|
||||
assert.throws(function() {
|
||||
minifier.minify("foo.bar");
|
||||
}, checkError(readFileError));
|
||||
|
||||
// For the last test, make readFile nearly no-op
|
||||
minifier.readFile = function() { return ""; };
|
||||
|
||||
assert.throws(function() {
|
||||
minifier.minify("foo.bar", {nameCache: "foo.json"});
|
||||
}, checkError(writeFileError));
|
||||
});
|
||||
|
||||
it("Should throw an error if the default base64Decoder hook gets called", function() {
|
||||
var base64DecoderError = "No base64 decoder implemented";
|
||||
|
||||
assert.throws(function() {
|
||||
minifier.base64Decoder("testtesttest");
|
||||
}, function(e) {
|
||||
return e instanceof Error &&
|
||||
e.message === base64DecoderError;
|
||||
});
|
||||
});
|
||||
|
||||
it("Should throw an error if the default base64Encoder hook gets called", function() {
|
||||
var base64EncoderError = "No base64 encoder implemented";
|
||||
|
||||
assert.throws(function() {
|
||||
minifier.base64Encoder("testtesttest");
|
||||
}, function(e) {
|
||||
return e instanceof Error &&
|
||||
e.message === base64EncoderError;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -17,3 +17,6 @@ exports["string_template"] = string_template;
|
|||
exports["tokenizer"] = tokenizer;
|
||||
exports["is_identifier"] = is_identifier;
|
||||
exports["SymbolDef"] = SymbolDef;
|
||||
exports["minify"] = minify;
|
||||
exports["readNameCache"] = readNameCache;
|
||||
exports["writeNameCache"] = writeNameCache;
|
||||
|
|
|
|||
186
tools/node.js
186
tools/node.js
|
|
@ -19,6 +19,7 @@ var FILES = UglifyJS.FILES = [
|
|||
"../lib/sourcemap.js",
|
||||
"../lib/mozilla-ast.js",
|
||||
"../lib/propmangle.js",
|
||||
"../lib/minify.js",
|
||||
"./exports.js",
|
||||
].map(function(file){
|
||||
return require.resolve(file);
|
||||
|
|
@ -35,149 +36,21 @@ UglifyJS.AST_Node.warn_function = function(txt) {
|
|||
console.error("WARN: %s", txt);
|
||||
};
|
||||
|
||||
function read_source_map(code) {
|
||||
var match = /\n\/\/# sourceMappingURL=data:application\/json(;.*?)?;base64,(.*)/.exec(code);
|
||||
if (!match) {
|
||||
UglifyJS.AST_Node.warn("inline source map not found");
|
||||
return null;
|
||||
}
|
||||
return JSON.parse(new Buffer(match[2], "base64"));
|
||||
UglifyJS.readFile = function(file) {
|
||||
return fs.readFileSync(file, "utf8");
|
||||
}
|
||||
|
||||
UglifyJS.minify = function(files, options) {
|
||||
options = UglifyJS.defaults(options, {
|
||||
compress : {},
|
||||
fromString : false,
|
||||
inSourceMap : null,
|
||||
mangle : {},
|
||||
mangleProperties : false,
|
||||
nameCache : null,
|
||||
outFileName : null,
|
||||
output : null,
|
||||
outSourceMap : null,
|
||||
parse : {},
|
||||
sourceMapInline : false,
|
||||
sourceMapUrl : null,
|
||||
sourceRoot : null,
|
||||
spidermonkey : false,
|
||||
warnings : false,
|
||||
});
|
||||
UglifyJS.base54.reset();
|
||||
UglifyJS.writeFile = function(file, data) {
|
||||
return fs.writeFileSync(filename, data, "utf8");
|
||||
}
|
||||
|
||||
var inMap = options.inSourceMap;
|
||||
if (typeof inMap == "string" && inMap != "inline") {
|
||||
inMap = JSON.parse(fs.readFileSync(inMap, "utf8"));
|
||||
}
|
||||
UglifyJS.base64Decoder = function(input) {
|
||||
return new Buffer(input).toString("base64");
|
||||
}
|
||||
|
||||
// 1. parse
|
||||
var toplevel = null,
|
||||
sourcesContent = {};
|
||||
|
||||
if (options.spidermonkey) {
|
||||
if (inMap == "inline") {
|
||||
throw new Error("inline source map only works with built-in parser");
|
||||
}
|
||||
toplevel = UglifyJS.AST_Node.from_mozilla_ast(files);
|
||||
} else {
|
||||
function addFile(file, fileUrl) {
|
||||
var code = options.fromString
|
||||
? file
|
||||
: fs.readFileSync(file, "utf8");
|
||||
if (inMap == "inline") {
|
||||
inMap = read_source_map(code);
|
||||
}
|
||||
sourcesContent[fileUrl] = code;
|
||||
toplevel = UglifyJS.parse(code, {
|
||||
filename: fileUrl,
|
||||
toplevel: toplevel,
|
||||
bare_returns: options.parse ? options.parse.bare_returns : undefined
|
||||
});
|
||||
}
|
||||
if (!options.fromString) {
|
||||
files = UglifyJS.simple_glob(files);
|
||||
if (inMap == "inline" && files.length > 1) {
|
||||
throw new Error("inline source map only works with singular input");
|
||||
}
|
||||
}
|
||||
[].concat(files).forEach(function (files, i) {
|
||||
if (typeof files === 'string') {
|
||||
addFile(files, options.fromString ? i : files);
|
||||
} else {
|
||||
for (var fileUrl in files) {
|
||||
addFile(files[fileUrl], fileUrl);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (options.wrap) {
|
||||
toplevel = toplevel.wrap_commonjs(options.wrap, options.exportAll);
|
||||
}
|
||||
|
||||
// 2. compress
|
||||
if (options.compress) {
|
||||
var compress = { warnings: options.warnings };
|
||||
UglifyJS.merge(compress, options.compress);
|
||||
toplevel.figure_out_scope(options.mangle);
|
||||
var sq = UglifyJS.Compressor(compress);
|
||||
toplevel = sq.compress(toplevel);
|
||||
}
|
||||
|
||||
// 3. mangle properties
|
||||
if (options.mangleProperties || options.nameCache) {
|
||||
options.mangleProperties.cache = UglifyJS.readNameCache(options.nameCache, "props");
|
||||
toplevel = UglifyJS.mangle_properties(toplevel, options.mangleProperties);
|
||||
UglifyJS.writeNameCache(options.nameCache, "props", options.mangleProperties.cache);
|
||||
}
|
||||
|
||||
// 4. mangle
|
||||
if (options.mangle) {
|
||||
toplevel.figure_out_scope(options.mangle);
|
||||
toplevel.compute_char_frequency(options.mangle);
|
||||
toplevel.mangle_names(options.mangle);
|
||||
}
|
||||
|
||||
// 5. output
|
||||
var output = { max_line_len: 32000 };
|
||||
if (options.outSourceMap || options.sourceMapInline) {
|
||||
output.source_map = UglifyJS.SourceMap({
|
||||
// prefer outFileName, otherwise use outSourceMap without .map suffix
|
||||
file: options.outFileName || (typeof options.outSourceMap === 'string' ? options.outSourceMap.replace(/\.map$/i, '') : null),
|
||||
orig: inMap,
|
||||
root: options.sourceRoot
|
||||
});
|
||||
if (options.sourceMapIncludeSources) {
|
||||
for (var file in sourcesContent) {
|
||||
if (sourcesContent.hasOwnProperty(file)) {
|
||||
output.source_map.get().setSourceContent(file, sourcesContent[file]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
if (options.output) {
|
||||
UglifyJS.merge(output, options.output);
|
||||
}
|
||||
var stream = UglifyJS.OutputStream(output);
|
||||
toplevel.print(stream);
|
||||
|
||||
|
||||
var source_map = output.source_map;
|
||||
if (source_map) {
|
||||
source_map = source_map + "";
|
||||
}
|
||||
|
||||
var mappingUrlPrefix = "\n//# sourceMappingURL=";
|
||||
if (options.sourceMapInline) {
|
||||
stream += mappingUrlPrefix + "data:application/json;charset=utf-8;base64," + new Buffer(source_map).toString("base64");
|
||||
} else if (options.outSourceMap && typeof options.outSourceMap === "string" && options.sourceMapUrl !== false) {
|
||||
stream += mappingUrlPrefix + (typeof options.sourceMapUrl === "string" ? options.sourceMapUrl : options.outSourceMap);
|
||||
}
|
||||
|
||||
return {
|
||||
code : stream + "",
|
||||
map : source_map
|
||||
};
|
||||
};
|
||||
UglifyJS.base64Encoded = function(input) {
|
||||
return new Buffer(input, "base64");
|
||||
}
|
||||
|
||||
// UglifyJS.describe_ast = function() {
|
||||
// function doitem(ctor) {
|
||||
|
|
@ -253,41 +126,6 @@ UglifyJS.readDefaultReservedFile = function(reserved) {
|
|||
return readReservedFile(require.resolve("./domprops.json"), reserved);
|
||||
};
|
||||
|
||||
UglifyJS.readNameCache = function(filename, key) {
|
||||
var cache = null;
|
||||
if (filename) {
|
||||
try {
|
||||
var cache = fs.readFileSync(filename, "utf8");
|
||||
cache = JSON.parse(cache)[key];
|
||||
if (!cache) throw "init";
|
||||
cache.props = UglifyJS.Dictionary.fromObject(cache.props);
|
||||
} catch(ex) {
|
||||
cache = {
|
||||
cname: -1,
|
||||
props: new UglifyJS.Dictionary()
|
||||
};
|
||||
}
|
||||
}
|
||||
return cache;
|
||||
};
|
||||
|
||||
UglifyJS.writeNameCache = function(filename, key, cache) {
|
||||
if (filename) {
|
||||
var data;
|
||||
try {
|
||||
data = fs.readFileSync(filename, "utf8");
|
||||
data = JSON.parse(data);
|
||||
} catch(ex) {
|
||||
data = {};
|
||||
}
|
||||
data[key] = {
|
||||
cname: cache.cname,
|
||||
props: cache.props.toObject()
|
||||
};
|
||||
fs.writeFileSync(filename, JSON.stringify(data, null, 2), "utf8");
|
||||
}
|
||||
};
|
||||
|
||||
// A file glob function that only supports "*" and "?" wildcards in the basename.
|
||||
// Example: "foo/bar/*baz??.*.js"
|
||||
// Argument `glob` may be a string or an array of strings.
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user