1701 lines
60 KiB
JavaScript
1701 lines
60 KiB
JavaScript
/***********************************************************************
|
|
|
|
A JavaScript tokenizer / parser / beautifier / compressor.
|
|
https://github.com/mishoo/UglifyJS2
|
|
|
|
-------------------------------- (C) ---------------------------------
|
|
|
|
Author: Mihai Bazon
|
|
<mihai.bazon@gmail.com>
|
|
http://mihai.bazon.net/blog
|
|
|
|
Distributed under the BSD license:
|
|
|
|
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
|
Parser based on parse-js (http://marijn.haverbeke.nl/parse-js/).
|
|
|
|
Redistribution and use in source and binary forms, with or without
|
|
modification, are permitted provided that the following conditions
|
|
are met:
|
|
|
|
* Redistributions of source code must retain the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer.
|
|
|
|
* Redistributions in binary form must reproduce the above
|
|
copyright notice, this list of conditions and the following
|
|
disclaimer in the documentation and/or other materials
|
|
provided with the distribution.
|
|
|
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
|
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
|
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
|
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
|
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
SUCH DAMAGE.
|
|
|
|
***********************************************************************/
|
|
|
|
"use strict";
|
|
|
|
var utils = require("./utils");
|
|
var characters = utils.characters;
|
|
var find_if = utils.find_if;
|
|
var configure_error_stack = utils.configure_error_stack;
|
|
var defaults = utils.defaults;
|
|
var makePredicate = utils.makePredicate;
|
|
var all = utils.all;
|
|
var HOP = utils.HOP;
|
|
var merge = utils.merge;
|
|
|
|
var AST = require("./ast");
|
|
|
|
var KEYWORDS = 'break case catch const continue debugger default delete do else finally for function if in instanceof new return switch throw try typeof var void while with';
|
|
var KEYWORDS_ATOM = 'false null true';
|
|
var RESERVED_WORDS = 'abstract boolean byte char class double enum export extends final float goto implements import int interface let long native package private protected public short static super synchronized this throws transient volatile yield'
|
|
+ " " + KEYWORDS_ATOM + " " + KEYWORDS;
|
|
var KEYWORDS_BEFORE_EXPRESSION = 'return new delete throw else case';
|
|
|
|
KEYWORDS = makePredicate(KEYWORDS);
|
|
RESERVED_WORDS = makePredicate(RESERVED_WORDS);
|
|
KEYWORDS_BEFORE_EXPRESSION = makePredicate(KEYWORDS_BEFORE_EXPRESSION);
|
|
KEYWORDS_ATOM = makePredicate(KEYWORDS_ATOM);
|
|
|
|
var OPERATOR_CHARS = makePredicate(characters("+-*&%=<>!?|~^"));
|
|
|
|
var RE_HEX_NUMBER = /^0x[0-9a-f]+$/i;
|
|
var RE_OCT_NUMBER = /^0[0-7]+$/;
|
|
|
|
var OPERATORS = makePredicate([
|
|
"in",
|
|
"instanceof",
|
|
"typeof",
|
|
"new",
|
|
"void",
|
|
"delete",
|
|
"++",
|
|
"--",
|
|
"+",
|
|
"-",
|
|
"!",
|
|
"~",
|
|
"&",
|
|
"|",
|
|
"^",
|
|
"*",
|
|
"/",
|
|
"%",
|
|
">>",
|
|
"<<",
|
|
">>>",
|
|
"<",
|
|
">",
|
|
"<=",
|
|
">=",
|
|
"==",
|
|
"===",
|
|
"!=",
|
|
"!==",
|
|
"?",
|
|
"=",
|
|
"+=",
|
|
"-=",
|
|
"/=",
|
|
"*=",
|
|
"%=",
|
|
">>=",
|
|
"<<=",
|
|
">>>=",
|
|
"|=",
|
|
"^=",
|
|
"&=",
|
|
"&&",
|
|
"||"
|
|
]);
|
|
|
|
var WHITESPACE_CHARS = makePredicate(characters(" \u00a0\n\r\t\f\u000b\u200b\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u2028\u2029\u202f\u205f\u3000\uFEFF"));
|
|
|
|
var NEWLINE_CHARS = makePredicate(characters("\n\r\u2028\u2029"));
|
|
|
|
var PUNC_BEFORE_EXPRESSION = makePredicate(characters("[{(,;:"));
|
|
|
|
var PUNC_CHARS = makePredicate(characters("[]{}(),;:"));
|
|
|
|
/* -----[ Tokenizer ]----- */
|
|
|
|
// regexps adapted from http://xregexp.com/plugins/#unicode
|
|
var UNICODE = {
|
|
letter: new RegExp("[\\u0041-\\u005A\\u0061-\\u007A\\u00AA\\u00B5\\u00BA\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02C1\\u02C6-\\u02D1\\u02E0-\\u02E4\\u02EC\\u02EE\\u0370-\\u0374\\u0376\\u0377\\u037A-\\u037D\\u037F\\u0386\\u0388-\\u038A\\u038C\\u038E-\\u03A1\\u03A3-\\u03F5\\u03F7-\\u0481\\u048A-\\u052F\\u0531-\\u0556\\u0559\\u0561-\\u0587\\u05D0-\\u05EA\\u05F0-\\u05F2\\u0620-\\u064A\\u066E\\u066F\\u0671-\\u06D3\\u06D5\\u06E5\\u06E6\\u06EE\\u06EF\\u06FA-\\u06FC\\u06FF\\u0710\\u0712-\\u072F\\u074D-\\u07A5\\u07B1\\u07CA-\\u07EA\\u07F4\\u07F5\\u07FA\\u0800-\\u0815\\u081A\\u0824\\u0828\\u0840-\\u0858\\u08A0-\\u08B2\\u0904-\\u0939\\u093D\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098C\\u098F\\u0990\\u0993-\\u09A8\\u09AA-\\u09B0\\u09B2\\u09B6-\\u09B9\\u09BD\\u09CE\\u09DC\\u09DD\\u09DF-\\u09E1\\u09F0\\u09F1\\u0A05-\\u0A0A\\u0A0F\\u0A10\\u0A13-\\u0A28\\u0A2A-\\u0A30\\u0A32\\u0A33\\u0A35\\u0A36\\u0A38\\u0A39\\u0A59-\\u0A5C\\u0A5E\\u0A72-\\u0A74\\u0A85-\\u0A8D\\u0A8F-\\u0A91\\u0A93-\\u0AA8\\u0AAA-\\u0AB0\\u0AB2\\u0AB3\\u0AB5-\\u0AB9\\u0ABD\\u0AD0\\u0AE0\\u0AE1\\u0B05-\\u0B0C\\u0B0F\\u0B10\\u0B13-\\u0B28\\u0B2A-\\u0B30\\u0B32\\u0B33\\u0B35-\\u0B39\\u0B3D\\u0B5C\\u0B5D\\u0B5F-\\u0B61\\u0B71\\u0B83\\u0B85-\\u0B8A\\u0B8E-\\u0B90\\u0B92-\\u0B95\\u0B99\\u0B9A\\u0B9C\\u0B9E\\u0B9F\\u0BA3\\u0BA4\\u0BA8-\\u0BAA\\u0BAE-\\u0BB9\\u0BD0\\u0C05-\\u0C0C\\u0C0E-\\u0C10\\u0C12-\\u0C28\\u0C2A-\\u0C39\\u0C3D\\u0C58\\u0C59\\u0C60\\u0C61\\u0C85-\\u0C8C\\u0C8E-\\u0C90\\u0C92-\\u0CA8\\u0CAA-\\u0CB3\\u0CB5-\\u0CB9\\u0CBD\\u0CDE\\u0CE0\\u0CE1\\u0CF1\\u0CF2\\u0D05-\\u0D0C\\u0D0E-\\u0D10\\u0D12-\\u0D3A\\u0D3D\\u0D4E\\u0D60\\u0D61\\u0D7A-\\u0D7F\\u0D85-\\u0D96\\u0D9A-\\u0DB1\\u0DB3-\\u0DBB\\u0DBD\\u0DC0-\\u0DC6\\u0E01-\\u0E30\\u0E32\\u0E33\\u0E40-\\u0E46\\u0E81\\u0E82\\u0E84\\u0E87\\u0E88\\u0E8A\\u0E8D\\u0E94-\\u0E97\\u0E99-\\u0E9F\\u0EA1-\\u0EA3\\u0EA5\\u0EA7\\u0EAA\\u0EAB\\u0EAD-\\u0EB0\\u0EB2\\u0EB3\\u0EBD\\u0EC0-\\u0EC4\\u0EC6\\u0EDC-\\u0EDF\\u0F00\\u0F40-\\u0F47\\u0F49-\\u0F6C\\u0F88-\\u0F8C\\u1000-\\u102A\\u103F\\u1050-\\u1055\\u105A-\\u105D\\u1061\\u1065\\u1066\\u106E-\\u1070\\u1075-\\u1081\\u108E\\u10A0-\\u10C5\\u10C7\\u10CD\\u10D0-\\u10FA\\u10FC-\\u1248\\u124A-\\u124D\\u1250-\\u1256\\u1258\\u125A-\\u125D\\u1260-\\u1288\\u128A-\\u128D\\u1290-\\u12B0\\u12B2-\\u12B5\\u12B8-\\u12BE\\u12C0\\u12C2-\\u12C5\\u12C8-\\u12D6\\u12D8-\\u1310\\u1312-\\u1315\\u1318-\\u135A\\u1380-\\u138F\\u13A0-\\u13F4\\u1401-\\u166C\\u166F-\\u167F\\u1681-\\u169A\\u16A0-\\u16EA\\u16EE-\\u16F8\\u1700-\\u170C\\u170E-\\u1711\\u1720-\\u1731\\u1740-\\u1751\\u1760-\\u176C\\u176E-\\u1770\\u1780-\\u17B3\\u17D7\\u17DC\\u1820-\\u1877\\u1880-\\u18A8\\u18AA\\u18B0-\\u18F5\\u1900-\\u191E\\u1950-\\u196D\\u1970-\\u1974\\u1980-\\u19AB\\u19C1-\\u19C7\\u1A00-\\u1A16\\u1A20-\\u1A54\\u1AA7\\u1B05-\\u1B33\\u1B45-\\u1B4B\\u1B83-\\u1BA0\\u1BAE\\u1BAF\\u1BBA-\\u1BE5\\u1C00-\\u1C23\\u1C4D-\\u1C4F\\u1C5A-\\u1C7D\\u1CE9-\\u1CEC\\u1CEE-\\u1CF1\\u1CF5\\u1CF6\\u1D00-\\u1DBF\\u1E00-\\u1F15\\u1F18-\\u1F1D\\u1F20-\\u1F45\\u1F48-\\u1F4D\\u1F50-\\u1F57\\u1F59\\u1F5B\\u1F5D\\u1F5F-\\u1F7D\\u1F80-\\u1FB4\\u1FB6-\\u1FBC\\u1FBE\\u1FC2-\\u1FC4\\u1FC6-\\u1FCC\\u1FD0-\\u1FD3\\u1FD6-\\u1FDB\\u1FE0-\\u1FEC\\u1FF2-\\u1FF4\\u1FF6-\\u1FFC\\u2071\\u207F\\u2090-\\u209C\\u2102\\u2107\\u210A-\\u2113\\u2115\\u2119-\\u211D\\u2124\\u2126\\u2128\\u212A-\\u212D\\u212F-\\u2139\\u213C-\\u213F\\u2145-\\u2149\\u214E\\u2160-\\u2188\\u2C00-\\u2C2E\\u2C30-\\u2C5E\\u2C60-\\u2CE4\\u2CEB-\\u2CEE\\u2CF2\\u2CF3\\u2D00-\\u2D25\\u2D27\\u2D2D\\u2D30-\\u2D67\\u2D6F\\u2D80-\\u2D96\\u2DA0-\\u2DA6\\u2DA8-\\u2DAE\\u2DB0-\\u2DB6\\u2DB8-\\u2DBE\\u2DC0-\\u2DC6\\u2DC8-\\u2DCE\\u2DD0-\\u2DD6\\u2DD8-\\u2DDE\\u2E2F\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303C\\u3041-\\u3096\\u309D-\\u309F\\u30A1-\\u30FA\\u30FC-\\u30FF\\u3105-\\u312D\\u3131-\\u318E\\u31A0-\\u31BA\\u31F0-\\u31FF\\u3400-\\u4DB5\\u4E00-\\u9FCC\\uA000-\\uA48C\\uA4D0-\\uA4FD\\uA500-\\uA60C\\uA610-\\uA61F\\uA62A\\uA62B\\uA640-\\uA66E\\uA67F-\\uA69D\\uA6A0-\\uA6EF\\uA717-\\uA71F\\uA722-\\uA788\\uA78B-\\uA78E\\uA790-\\uA7AD\\uA7B0\\uA7B1\\uA7F7-\\uA801\\uA803-\\uA805\\uA807-\\uA80A\\uA80C-\\uA822\\uA840-\\uA873\\uA882-\\uA8B3\\uA8F2-\\uA8F7\\uA8FB\\uA90A-\\uA925\\uA930-\\uA946\\uA960-\\uA97C\\uA984-\\uA9B2\\uA9CF\\uA9E0-\\uA9E4\\uA9E6-\\uA9EF\\uA9FA-\\uA9FE\\uAA00-\\uAA28\\uAA40-\\uAA42\\uAA44-\\uAA4B\\uAA60-\\uAA76\\uAA7A\\uAA7E-\\uAAAF\\uAAB1\\uAAB5\\uAAB6\\uAAB9-\\uAABD\\uAAC0\\uAAC2\\uAADB-\\uAADD\\uAAE0-\\uAAEA\\uAAF2-\\uAAF4\\uAB01-\\uAB06\\uAB09-\\uAB0E\\uAB11-\\uAB16\\uAB20-\\uAB26\\uAB28-\\uAB2E\\uAB30-\\uAB5A\\uAB5C-\\uAB5F\\uAB64\\uAB65\\uABC0-\\uABE2\\uAC00-\\uD7A3\\uD7B0-\\uD7C6\\uD7CB-\\uD7FB\\uF900-\\uFA6D\\uFA70-\\uFAD9\\uFB00-\\uFB06\\uFB13-\\uFB17\\uFB1D\\uFB1F-\\uFB28\\uFB2A-\\uFB36\\uFB38-\\uFB3C\\uFB3E\\uFB40\\uFB41\\uFB43\\uFB44\\uFB46-\\uFBB1\\uFBD3-\\uFD3D\\uFD50-\\uFD8F\\uFD92-\\uFDC7\\uFDF0-\\uFDFB\\uFE70-\\uFE74\\uFE76-\\uFEFC\\uFF21-\\uFF3A\\uFF41-\\uFF5A\\uFF66-\\uFFBE\\uFFC2-\\uFFC7\\uFFCA-\\uFFCF\\uFFD2-\\uFFD7\\uFFDA-\\uFFDC]"),
|
|
digit: new RegExp("[\\u0030-\\u0039\\u0660-\\u0669\\u06F0-\\u06F9\\u07C0-\\u07C9\\u0966-\\u096F\\u09E6-\\u09EF\\u0A66-\\u0A6F\\u0AE6-\\u0AEF\\u0B66-\\u0B6F\\u0BE6-\\u0BEF\\u0C66-\\u0C6F\\u0CE6-\\u0CEF\\u0D66-\\u0D6F\\u0DE6-\\u0DEF\\u0E50-\\u0E59\\u0ED0-\\u0ED9\\u0F20-\\u0F29\\u1040-\\u1049\\u1090-\\u1099\\u17E0-\\u17E9\\u1810-\\u1819\\u1946-\\u194F\\u19D0-\\u19D9\\u1A80-\\u1A89\\u1A90-\\u1A99\\u1B50-\\u1B59\\u1BB0-\\u1BB9\\u1C40-\\u1C49\\u1C50-\\u1C59\\uA620-\\uA629\\uA8D0-\\uA8D9\\uA900-\\uA909\\uA9D0-\\uA9D9\\uA9F0-\\uA9F9\\uAA50-\\uAA59\\uABF0-\\uABF9\\uFF10-\\uFF19]"),
|
|
non_spacing_mark: new RegExp("[\\u0300-\\u036F\\u0483-\\u0487\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065E\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0900-\\u0902\\u093C\\u0941-\\u0948\\u094D\\u0951-\\u0955\\u0962\\u0963\\u0981\\u09BC\\u09C1-\\u09C4\\u09CD\\u09E2\\u09E3\\u0A01\\u0A02\\u0A3C\\u0A41\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81\\u0A82\\u0ABC\\u0AC1-\\u0AC5\\u0AC7\\u0AC8\\u0ACD\\u0AE2\\u0AE3\\u0B01\\u0B3C\\u0B3F\\u0B41-\\u0B44\\u0B4D\\u0B56\\u0B62\\u0B63\\u0B82\\u0BC0\\u0BCD\\u0C3E-\\u0C40\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0CBC\\u0CBF\\u0CC6\\u0CCC\\u0CCD\\u0CE2\\u0CE3\\u0D41-\\u0D44\\u0D4D\\u0D62\\u0D63\\u0DCA\\u0DD2-\\u0DD4\\u0DD6\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F71-\\u0F7E\\u0F80-\\u0F84\\u0F86\\u0F87\\u0F90-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102D-\\u1030\\u1032-\\u1037\\u1039\\u103A\\u103D\\u103E\\u1058\\u1059\\u105E-\\u1060\\u1071-\\u1074\\u1082\\u1085\\u1086\\u108D\\u109D\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B7-\\u17BD\\u17C6\\u17C9-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u1922\\u1927\\u1928\\u1932\\u1939-\\u193B\\u1A17\\u1A18\\u1A56\\u1A58-\\u1A5E\\u1A60\\u1A62\\u1A65-\\u1A6C\\u1A73-\\u1A7C\\u1A7F\\u1B00-\\u1B03\\u1B34\\u1B36-\\u1B3A\\u1B3C\\u1B42\\u1B6B-\\u1B73\\u1B80\\u1B81\\u1BA2-\\u1BA5\\u1BA8\\u1BA9\\u1C2C-\\u1C33\\u1C36\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE0\\u1CE2-\\u1CE8\\u1CED\\u1DC0-\\u1DE6\\u1DFD-\\u1DFF\\u20D0-\\u20DC\\u20E1\\u20E5-\\u20F0\\u2CEF-\\u2CF1\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F\\uA67C\\uA67D\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA825\\uA826\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA951\\uA980-\\uA982\\uA9B3\\uA9B6-\\uA9B9\\uA9BC\\uAA29-\\uAA2E\\uAA31\\uAA32\\uAA35\\uAA36\\uAA43\\uAA4C\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uABE5\\uABE8\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE26]"),
|
|
space_combining_mark: new RegExp("[\\u0903\\u093E-\\u0940\\u0949-\\u094C\\u094E\\u0982\\u0983\\u09BE-\\u09C0\\u09C7\\u09C8\\u09CB\\u09CC\\u09D7\\u0A03\\u0A3E-\\u0A40\\u0A83\\u0ABE-\\u0AC0\\u0AC9\\u0ACB\\u0ACC\\u0B02\\u0B03\\u0B3E\\u0B40\\u0B47\\u0B48\\u0B4B\\u0B4C\\u0B57\\u0BBE\\u0BBF\\u0BC1\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCC\\u0BD7\\u0C01-\\u0C03\\u0C41-\\u0C44\\u0C82\\u0C83\\u0CBE\\u0CC0-\\u0CC4\\u0CC7\\u0CC8\\u0CCA\\u0CCB\\u0CD5\\u0CD6\\u0D02\\u0D03\\u0D3E-\\u0D40\\u0D46-\\u0D48\\u0D4A-\\u0D4C\\u0D57\\u0D82\\u0D83\\u0DCF-\\u0DD1\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0F3E\\u0F3F\\u0F7F\\u102B\\u102C\\u1031\\u1038\\u103B\\u103C\\u1056\\u1057\\u1062-\\u1064\\u1067-\\u106D\\u1083\\u1084\\u1087-\\u108C\\u108F\\u109A-\\u109C\\u17B6\\u17BE-\\u17C5\\u17C7\\u17C8\\u1923-\\u1926\\u1929-\\u192B\\u1930\\u1931\\u1933-\\u1938\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A19-\\u1A1B\\u1A55\\u1A57\\u1A61\\u1A63\\u1A64\\u1A6D-\\u1A72\\u1B04\\u1B35\\u1B3B\\u1B3D-\\u1B41\\u1B43\\u1B44\\u1B82\\u1BA1\\u1BA6\\u1BA7\\u1BAA\\u1C24-\\u1C2B\\u1C34\\u1C35\\u1CE1\\u1CF2\\uA823\\uA824\\uA827\\uA880\\uA881\\uA8B4-\\uA8C3\\uA952\\uA953\\uA983\\uA9B4\\uA9B5\\uA9BA\\uA9BB\\uA9BD-\\uA9C0\\uAA2F\\uAA30\\uAA33\\uAA34\\uAA4D\\uAA7B\\uABE3\\uABE4\\uABE6\\uABE7\\uABE9\\uABEA\\uABEC]"),
|
|
connector_punctuation: new RegExp("[\\u005F\\u203F\\u2040\\u2054\\uFE33\\uFE34\\uFE4D-\\uFE4F\\uFF3F]")
|
|
};
|
|
|
|
function is_letter(code) {
|
|
return (code >= 97 && code <= 122)
|
|
|| (code >= 65 && code <= 90)
|
|
|| (code >= 0xaa && UNICODE.letter.test(String.fromCharCode(code)));
|
|
}
|
|
|
|
function is_surrogate_pair_head(code) {
|
|
if (typeof code == "string")
|
|
code = code.charCodeAt(0);
|
|
return code >= 0xd800 && code <= 0xdbff;
|
|
}
|
|
|
|
function is_surrogate_pair_tail(code) {
|
|
if (typeof code == "string")
|
|
code = code.charCodeAt(0);
|
|
return code >= 0xdc00 && code <= 0xdfff;
|
|
}
|
|
|
|
function is_digit(code) {
|
|
return code >= 48 && code <= 57;
|
|
}
|
|
|
|
function is_alphanumeric_char(code) {
|
|
return is_digit(code) || is_letter(code);
|
|
}
|
|
|
|
function is_unicode_digit(code) {
|
|
return UNICODE.digit.test(String.fromCharCode(code));
|
|
}
|
|
|
|
function is_unicode_combining_mark(ch) {
|
|
return UNICODE.non_spacing_mark.test(ch) || UNICODE.space_combining_mark.test(ch);
|
|
}
|
|
|
|
function is_unicode_connector_punctuation(ch) {
|
|
return UNICODE.connector_punctuation.test(ch);
|
|
}
|
|
|
|
function is_identifier(name) {
|
|
return !RESERVED_WORDS[name] && /^[a-z_$][a-z0-9_$]*$/i.test(name);
|
|
}
|
|
|
|
function is_identifier_start(code) {
|
|
return code == 36 || code == 95 || is_letter(code);
|
|
}
|
|
|
|
function is_identifier_char(ch) {
|
|
var code = ch.charCodeAt(0);
|
|
return is_identifier_start(code)
|
|
|| is_digit(code)
|
|
|| code == 8204 // \u200c: zero-width non-joiner <ZWNJ>
|
|
|| code == 8205 // \u200d: zero-width joiner <ZWJ> (in my ECMA-262 PDF, this is also 200c)
|
|
|| is_unicode_combining_mark(ch)
|
|
|| is_unicode_connector_punctuation(ch)
|
|
|| is_unicode_digit(code)
|
|
;
|
|
}
|
|
|
|
function is_identifier_string(str) {
|
|
return /^[a-z_$][a-z0-9_$]*$/i.test(str);
|
|
}
|
|
|
|
function parse_js_number(num) {
|
|
if (RE_HEX_NUMBER.test(num)) {
|
|
return parseInt(num.substr(2), 16);
|
|
} else if (RE_OCT_NUMBER.test(num)) {
|
|
return parseInt(num.substr(1), 8);
|
|
} else {
|
|
var val = parseFloat(num);
|
|
if (val == num) return val;
|
|
}
|
|
}
|
|
|
|
function JS_Parse_Error(message, filename, line, col, pos) {
|
|
this.message = message;
|
|
this.filename = filename;
|
|
this.line = line;
|
|
this.col = col;
|
|
this.pos = pos;
|
|
}
|
|
JS_Parse_Error.prototype = Object.create(Error.prototype);
|
|
JS_Parse_Error.prototype.constructor = JS_Parse_Error;
|
|
JS_Parse_Error.prototype.name = "SyntaxError";
|
|
configure_error_stack(JS_Parse_Error);
|
|
|
|
function js_error(message, filename, line, col, pos) {
|
|
throw new JS_Parse_Error(message, filename, line, col, pos);
|
|
}
|
|
|
|
function is_token(token, type, val) {
|
|
return token.type == type && (val == null || token.value == val);
|
|
}
|
|
|
|
var EX_EOF = {};
|
|
|
|
function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
|
|
var S = {
|
|
text : $TEXT,
|
|
filename : filename,
|
|
pos : 0,
|
|
tokpos : 0,
|
|
line : 1,
|
|
tokline : 0,
|
|
col : 0,
|
|
tokcol : 0,
|
|
newline_before : false,
|
|
regex_allowed : false,
|
|
comments_before : [],
|
|
directives : {},
|
|
directive_stack : []
|
|
};
|
|
|
|
function peek() {
|
|
return S.text.charAt(S.pos);
|
|
}
|
|
|
|
function next(signal_eof, in_string) {
|
|
var ch = S.text.charAt(S.pos++);
|
|
if (signal_eof && !ch)
|
|
throw EX_EOF;
|
|
if (NEWLINE_CHARS[ch]) {
|
|
S.newline_before = S.newline_before || !in_string;
|
|
++S.line;
|
|
S.col = 0;
|
|
if (!in_string && ch == "\r" && peek() == "\n") {
|
|
// treat a \r\n sequence as a single \n
|
|
++S.pos;
|
|
ch = "\n";
|
|
}
|
|
} else {
|
|
++S.col;
|
|
}
|
|
return ch;
|
|
}
|
|
|
|
function forward(i) {
|
|
while (i-- > 0) next();
|
|
}
|
|
|
|
function looking_at(str) {
|
|
return S.text.substr(S.pos, str.length) == str;
|
|
}
|
|
|
|
function find_eol() {
|
|
var text = S.text;
|
|
for (var i = S.pos, n = S.text.length; i < n; ++i) {
|
|
var ch = text[i];
|
|
if (NEWLINE_CHARS[ch])
|
|
return i;
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
function find(what, signal_eof) {
|
|
var pos = S.text.indexOf(what, S.pos);
|
|
if (signal_eof && pos == -1) throw EX_EOF;
|
|
return pos;
|
|
}
|
|
|
|
function start_token() {
|
|
S.tokline = S.line;
|
|
S.tokcol = S.col;
|
|
S.tokpos = S.pos;
|
|
}
|
|
|
|
var prev_was_dot = false;
|
|
function token(type, value, is_comment) {
|
|
S.regex_allowed = ((type == "operator" && !UNARY_POSTFIX[value]) ||
|
|
(type == "keyword" && KEYWORDS_BEFORE_EXPRESSION[value]) ||
|
|
(type == "punc" && PUNC_BEFORE_EXPRESSION[value]));
|
|
if (type == "punc" && value == ".") {
|
|
prev_was_dot = true;
|
|
} else if (!is_comment) {
|
|
prev_was_dot = false;
|
|
}
|
|
var ret = {
|
|
type : type,
|
|
value : value,
|
|
line : S.tokline,
|
|
col : S.tokcol,
|
|
pos : S.tokpos,
|
|
endline : S.line,
|
|
endcol : S.col,
|
|
endpos : S.pos,
|
|
nlb : S.newline_before,
|
|
file : filename
|
|
};
|
|
if (/^(?:num|string|regexp)$/i.test(type)) {
|
|
ret.raw = $TEXT.substring(ret.pos, ret.endpos);
|
|
}
|
|
if (!is_comment) {
|
|
ret.comments_before = S.comments_before;
|
|
ret.comments_after = S.comments_before = [];
|
|
}
|
|
S.newline_before = false;
|
|
return new AST.Token(ret);
|
|
}
|
|
|
|
function skip_whitespace() {
|
|
while (WHITESPACE_CHARS[peek()])
|
|
next();
|
|
}
|
|
|
|
function read_while(pred) {
|
|
var ret = "", ch, i = 0;
|
|
while ((ch = peek()) && pred(ch, i++))
|
|
ret += next();
|
|
return ret;
|
|
}
|
|
|
|
function parse_error(err) {
|
|
js_error(err, filename, S.tokline, S.tokcol, S.tokpos);
|
|
}
|
|
|
|
function read_num(prefix) {
|
|
var has_e = false, after_e = false, has_x = false, has_dot = prefix == ".";
|
|
var num = read_while(function(ch, i) {
|
|
var code = ch.charCodeAt(0);
|
|
switch (code) {
|
|
case 120: case 88: // xX
|
|
return has_x ? false : (has_x = true);
|
|
case 101: case 69: // eE
|
|
return has_x ? true : has_e ? false : (has_e = after_e = true);
|
|
case 45: // -
|
|
return after_e || (i == 0 && !prefix);
|
|
case 43: // +
|
|
return after_e;
|
|
case (after_e = false, 46): // .
|
|
return (!has_dot && !has_x && !has_e) ? (has_dot = true) : false;
|
|
}
|
|
return is_alphanumeric_char(code);
|
|
});
|
|
if (prefix) num = prefix + num;
|
|
if (RE_OCT_NUMBER.test(num) && next_token.has_directive("use strict")) {
|
|
parse_error("Legacy octal literals are not allowed in strict mode");
|
|
}
|
|
var valid = parse_js_number(num);
|
|
if (!isNaN(valid)) {
|
|
return token("num", valid);
|
|
} else {
|
|
parse_error("Invalid syntax: " + num);
|
|
}
|
|
}
|
|
|
|
function read_escaped_char(in_string) {
|
|
var ch = next(true, in_string);
|
|
switch (ch.charCodeAt(0)) {
|
|
case 110 : return "\n";
|
|
case 114 : return "\r";
|
|
case 116 : return "\t";
|
|
case 98 : return "\b";
|
|
case 118 : return "\u000b"; // \v
|
|
case 102 : return "\f";
|
|
case 120 : return String.fromCharCode(hex_bytes(2)); // \x
|
|
case 117 : return String.fromCharCode(hex_bytes(4)); // \u
|
|
case 10 : return ""; // newline
|
|
case 13 : // \r
|
|
if (peek() == "\n") { // DOS newline
|
|
next(true, in_string);
|
|
return "";
|
|
}
|
|
}
|
|
if (ch >= "0" && ch <= "7")
|
|
return read_octal_escape_sequence(ch);
|
|
return ch;
|
|
}
|
|
|
|
function read_octal_escape_sequence(ch) {
|
|
// Read
|
|
var p = peek();
|
|
if (p >= "0" && p <= "7") {
|
|
ch += next(true);
|
|
if (ch[0] <= "3" && (p = peek()) >= "0" && p <= "7")
|
|
ch += next(true);
|
|
}
|
|
|
|
// Parse
|
|
if (ch === "0") return "\0";
|
|
if (ch.length > 0 && next_token.has_directive("use strict"))
|
|
parse_error("Legacy octal escape sequences are not allowed in strict mode");
|
|
return String.fromCharCode(parseInt(ch, 8));
|
|
}
|
|
|
|
function hex_bytes(n) {
|
|
var num = 0;
|
|
for (; n > 0; --n) {
|
|
var digit = parseInt(next(true), 16);
|
|
if (isNaN(digit))
|
|
parse_error("Invalid hex-character pattern in string");
|
|
num = (num << 4) | digit;
|
|
}
|
|
return num;
|
|
}
|
|
|
|
var read_string = with_eof_error("Unterminated string constant", function(quote_char) {
|
|
var quote = next(), ret = "";
|
|
for (;;) {
|
|
var ch = next(true, true);
|
|
if (ch == "\\") ch = read_escaped_char(true);
|
|
else if (NEWLINE_CHARS[ch]) parse_error("Unterminated string constant");
|
|
else if (ch == quote) break;
|
|
ret += ch;
|
|
}
|
|
var tok = token("string", ret);
|
|
tok.quote = quote_char;
|
|
return tok;
|
|
});
|
|
|
|
function skip_line_comment(type) {
|
|
var regex_allowed = S.regex_allowed;
|
|
var i = find_eol(), ret;
|
|
if (i == -1) {
|
|
ret = S.text.substr(S.pos);
|
|
S.pos = S.text.length;
|
|
} else {
|
|
ret = S.text.substring(S.pos, i);
|
|
S.pos = i;
|
|
}
|
|
S.col = S.tokcol + (S.pos - S.tokpos);
|
|
S.comments_before.push(token(type, ret, true));
|
|
S.regex_allowed = regex_allowed;
|
|
return next_token;
|
|
}
|
|
|
|
var skip_multiline_comment = with_eof_error("Unterminated multiline comment", function() {
|
|
var regex_allowed = S.regex_allowed;
|
|
var i = find("*/", true);
|
|
var text = S.text.substring(S.pos, i).replace(/\r\n|\r|\u2028|\u2029/g, '\n');
|
|
// update stream position
|
|
forward(text.length /* doesn't count \r\n as 2 char while S.pos - i does */ + 2);
|
|
S.comments_before.push(token("comment2", text, true));
|
|
S.regex_allowed = regex_allowed;
|
|
return next_token;
|
|
});
|
|
|
|
function read_name() {
|
|
var backslash = false, name = "", ch, escaped = false, hex;
|
|
while ((ch = peek()) != null) {
|
|
if (!backslash) {
|
|
if (ch == "\\") escaped = backslash = true, next();
|
|
else if (is_identifier_char(ch)) name += next();
|
|
else break;
|
|
}
|
|
else {
|
|
if (ch != "u") parse_error("Expecting UnicodeEscapeSequence -- uXXXX");
|
|
ch = read_escaped_char();
|
|
if (!is_identifier_char(ch)) parse_error("Unicode char: " + ch.charCodeAt(0) + " is not valid in identifier");
|
|
name += ch;
|
|
backslash = false;
|
|
}
|
|
}
|
|
if (KEYWORDS[name] && escaped) {
|
|
hex = name.charCodeAt(0).toString(16).toUpperCase();
|
|
name = "\\u" + "0000".substr(hex.length) + hex + name.slice(1);
|
|
}
|
|
return name;
|
|
}
|
|
|
|
var read_regexp = with_eof_error("Unterminated regular expression", function(source) {
|
|
var prev_backslash = false, ch, in_class = false;
|
|
while ((ch = next(true))) if (NEWLINE_CHARS[ch]) {
|
|
parse_error("Unexpected line terminator");
|
|
} else if (prev_backslash) {
|
|
source += "\\" + ch;
|
|
prev_backslash = false;
|
|
} else if (ch == "[") {
|
|
in_class = true;
|
|
source += ch;
|
|
} else if (ch == "]" && in_class) {
|
|
in_class = false;
|
|
source += ch;
|
|
} else if (ch == "/" && !in_class) {
|
|
break;
|
|
} else if (ch == "\\") {
|
|
prev_backslash = true;
|
|
} else {
|
|
source += ch;
|
|
}
|
|
var mods = read_name();
|
|
try {
|
|
var regexp = new RegExp(source, mods);
|
|
regexp.raw_source = source;
|
|
return token("regexp", regexp);
|
|
} catch(e) {
|
|
parse_error(e.message);
|
|
}
|
|
});
|
|
|
|
function read_operator(prefix) {
|
|
function grow(op) {
|
|
if (!peek()) return op;
|
|
var bigger = op + peek();
|
|
if (OPERATORS[bigger]) {
|
|
next();
|
|
return grow(bigger);
|
|
} else {
|
|
return op;
|
|
}
|
|
}
|
|
return token("operator", grow(prefix || next()));
|
|
}
|
|
|
|
function handle_slash() {
|
|
next();
|
|
switch (peek()) {
|
|
case "/":
|
|
next();
|
|
return skip_line_comment("comment1");
|
|
case "*":
|
|
next();
|
|
return skip_multiline_comment();
|
|
}
|
|
return S.regex_allowed ? read_regexp("") : read_operator("/");
|
|
}
|
|
|
|
function handle_dot() {
|
|
next();
|
|
return is_digit(peek().charCodeAt(0))
|
|
? read_num(".")
|
|
: token("punc", ".");
|
|
}
|
|
|
|
function read_word() {
|
|
var word = read_name();
|
|
if (prev_was_dot) return token("name", word);
|
|
return KEYWORDS_ATOM[word] ? token("atom", word)
|
|
: !KEYWORDS[word] ? token("name", word)
|
|
: OPERATORS[word] ? token("operator", word)
|
|
: token("keyword", word);
|
|
}
|
|
|
|
function with_eof_error(eof_error, cont) {
|
|
return function(x) {
|
|
try {
|
|
return cont(x);
|
|
} catch(ex) {
|
|
if (ex === EX_EOF) parse_error(eof_error);
|
|
else throw ex;
|
|
}
|
|
};
|
|
}
|
|
|
|
function next_token(force_regexp) {
|
|
if (force_regexp != null)
|
|
return read_regexp(force_regexp);
|
|
if (shebang && S.pos == 0 && looking_at("#!")) {
|
|
start_token();
|
|
forward(2);
|
|
skip_line_comment("comment5");
|
|
}
|
|
for (;;) {
|
|
skip_whitespace();
|
|
start_token();
|
|
if (html5_comments) {
|
|
if (looking_at("<!--")) {
|
|
forward(4);
|
|
skip_line_comment("comment3");
|
|
continue;
|
|
}
|
|
if (looking_at("-->") && S.newline_before) {
|
|
forward(3);
|
|
skip_line_comment("comment4");
|
|
continue;
|
|
}
|
|
}
|
|
var ch = peek();
|
|
if (!ch) return token("eof");
|
|
var code = ch.charCodeAt(0);
|
|
switch (code) {
|
|
case 34: case 39: return read_string(ch);
|
|
case 46: return handle_dot();
|
|
case 47: {
|
|
var tok = handle_slash();
|
|
if (tok === next_token) continue;
|
|
return tok;
|
|
}
|
|
}
|
|
if (is_digit(code)) return read_num();
|
|
if (PUNC_CHARS[ch]) return token("punc", next());
|
|
if (OPERATOR_CHARS[ch]) return read_operator();
|
|
if (code == 92 || is_identifier_start(code)) return read_word();
|
|
break;
|
|
}
|
|
parse_error("Unexpected character '" + ch + "'");
|
|
}
|
|
|
|
next_token.context = function(nc) {
|
|
if (nc) S = nc;
|
|
return S;
|
|
};
|
|
|
|
next_token.add_directive = function(directive) {
|
|
S.directive_stack[S.directive_stack.length - 1].push(directive);
|
|
|
|
if (S.directives[directive] === undefined) {
|
|
S.directives[directive] = 1;
|
|
} else {
|
|
S.directives[directive]++;
|
|
}
|
|
}
|
|
|
|
next_token.push_directives_stack = function() {
|
|
S.directive_stack.push([]);
|
|
}
|
|
|
|
next_token.pop_directives_stack = function() {
|
|
var directives = S.directive_stack[S.directive_stack.length - 1];
|
|
|
|
for (var i = 0; i < directives.length; i++) {
|
|
S.directives[directives[i]]--;
|
|
}
|
|
|
|
S.directive_stack.pop();
|
|
}
|
|
|
|
next_token.has_directive = function(directive) {
|
|
return S.directives[directive] > 0;
|
|
}
|
|
|
|
return next_token;
|
|
}
|
|
|
|
/* -----[ Parser (constants) ]----- */
|
|
|
|
var UNARY_PREFIX = makePredicate([
|
|
"typeof",
|
|
"void",
|
|
"delete",
|
|
"--",
|
|
"++",
|
|
"!",
|
|
"~",
|
|
"-",
|
|
"+"
|
|
]);
|
|
|
|
var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
|
|
|
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
|
|
|
var PRECEDENCE = function(a, ret) {
|
|
for (var i = 0; i < a.length; ++i) {
|
|
var b = a[i];
|
|
for (var j = 0; j < b.length; ++j) {
|
|
ret[b[j]] = i + 1;
|
|
}
|
|
}
|
|
return ret;
|
|
}([
|
|
["||"],
|
|
["&&"],
|
|
["|"],
|
|
["^"],
|
|
["&"],
|
|
["==", "===", "!=", "!=="],
|
|
["<", ">", "<=", ">=", "in", "instanceof"],
|
|
[">>", "<<", ">>>"],
|
|
["+", "-"],
|
|
["*", "/", "%"]
|
|
], {});
|
|
|
|
var ATOMIC_START_TOKEN = makePredicate([ "atom", "num", "string", "regexp", "name" ]);
|
|
|
|
/* -----[ Parser ]----- */
|
|
|
|
function parse($TEXT, options) {
|
|
options = defaults(options, {
|
|
bare_returns : false,
|
|
expression : false,
|
|
filename : null,
|
|
html5_comments : true,
|
|
shebang : true,
|
|
strict : false,
|
|
toplevel : null,
|
|
}, true);
|
|
|
|
var S = {
|
|
input : (typeof $TEXT == "string"
|
|
? tokenizer($TEXT, options.filename,
|
|
options.html5_comments, options.shebang)
|
|
: $TEXT),
|
|
token : null,
|
|
prev : null,
|
|
peeked : null,
|
|
in_function : 0,
|
|
in_directives : true,
|
|
in_loop : 0,
|
|
labels : []
|
|
};
|
|
|
|
S.token = next();
|
|
|
|
function is(type, value) {
|
|
return is_token(S.token, type, value);
|
|
}
|
|
|
|
function peek() {
|
|
return S.peeked || (S.peeked = S.input());
|
|
}
|
|
|
|
function next() {
|
|
S.prev = S.token;
|
|
if (S.peeked) {
|
|
S.token = S.peeked;
|
|
S.peeked = null;
|
|
} else {
|
|
S.token = S.input();
|
|
}
|
|
S.in_directives = S.in_directives && (
|
|
S.token.type == "string" || is("punc", ";")
|
|
);
|
|
return S.token;
|
|
}
|
|
|
|
function prev() {
|
|
return S.prev;
|
|
}
|
|
|
|
function croak(msg, line, col, pos) {
|
|
var ctx = S.input.context();
|
|
js_error(msg,
|
|
ctx.filename,
|
|
line != null ? line : ctx.tokline,
|
|
col != null ? col : ctx.tokcol,
|
|
pos != null ? pos : ctx.tokpos);
|
|
}
|
|
|
|
function token_error(token, msg) {
|
|
croak(msg, token.line, token.col);
|
|
}
|
|
|
|
function unexpected(token) {
|
|
if (token == null)
|
|
token = S.token;
|
|
token_error(token, "Unexpected token: " + token.type + " (" + token.value + ")");
|
|
}
|
|
|
|
function expect_token(type, val) {
|
|
if (is(type, val)) {
|
|
return next();
|
|
}
|
|
token_error(S.token, "Unexpected token " + S.token.type + " «" + S.token.value + "»" + ", expected " + type + " «" + val + "»");
|
|
}
|
|
|
|
function expect(punc) {
|
|
return expect_token("punc", punc);
|
|
}
|
|
|
|
function has_newline_before(token) {
|
|
return token.nlb || !all(token.comments_before, function(comment) {
|
|
return !comment.nlb;
|
|
});
|
|
}
|
|
|
|
function can_insert_semicolon() {
|
|
return !options.strict
|
|
&& (is("eof") || is("punc", "}") || has_newline_before(S.token));
|
|
}
|
|
|
|
function semicolon(optional) {
|
|
if (is("punc", ";")) next();
|
|
else if (!optional && !can_insert_semicolon()) unexpected();
|
|
}
|
|
|
|
function parenthesised() {
|
|
expect("(");
|
|
var exp = expression(true);
|
|
expect(")");
|
|
return exp;
|
|
}
|
|
|
|
function embed_tokens(parser) {
|
|
return function() {
|
|
var start = S.token;
|
|
var expr = parser.apply(null, arguments);
|
|
var end = prev();
|
|
expr.start = start;
|
|
expr.end = end;
|
|
return expr;
|
|
};
|
|
}
|
|
|
|
function handle_regexp() {
|
|
if (is("operator", "/") || is("operator", "/=")) {
|
|
S.peeked = null;
|
|
S.token = S.input(S.token.value.substr(1)); // force regexp
|
|
}
|
|
}
|
|
|
|
var statement = embed_tokens(function(strict_defun) {
|
|
handle_regexp();
|
|
switch (S.token.type) {
|
|
case "string":
|
|
if (S.in_directives) {
|
|
var token = peek();
|
|
if (S.token.raw.indexOf("\\") == -1
|
|
&& (is_token(token, "punc", ";")
|
|
|| is_token(token, "punc", "}")
|
|
|| has_newline_before(token)
|
|
|| is_token(token, "eof"))) {
|
|
S.input.add_directive(S.token.value);
|
|
} else {
|
|
S.in_directives = false;
|
|
}
|
|
}
|
|
var dir = S.in_directives, stat = simple_statement();
|
|
return dir ? new AST.Directive(stat.body) : stat;
|
|
case "num":
|
|
case "regexp":
|
|
case "operator":
|
|
case "atom":
|
|
return simple_statement();
|
|
|
|
case "name":
|
|
return is_token(peek(), "punc", ":")
|
|
? labeled_statement()
|
|
: simple_statement();
|
|
|
|
case "punc":
|
|
switch (S.token.value) {
|
|
case "{":
|
|
return new AST.BlockStatement({
|
|
start : S.token,
|
|
body : block_(),
|
|
end : prev()
|
|
});
|
|
case "[":
|
|
case "(":
|
|
return simple_statement();
|
|
case ";":
|
|
S.in_directives = false;
|
|
next();
|
|
return new AST.EmptyStatement();
|
|
default:
|
|
unexpected();
|
|
}
|
|
|
|
case "keyword":
|
|
switch (S.token.value) {
|
|
case "break":
|
|
next();
|
|
return break_cont(AST.Break);
|
|
|
|
case "continue":
|
|
next();
|
|
return break_cont(AST.Continue);
|
|
|
|
case "debugger":
|
|
next();
|
|
semicolon();
|
|
return new AST.Debugger();
|
|
|
|
case "do":
|
|
next();
|
|
var body = in_loop(statement);
|
|
expect_token("keyword", "while");
|
|
var condition = parenthesised();
|
|
semicolon(true);
|
|
return new AST.Do({
|
|
body : body,
|
|
condition : condition
|
|
});
|
|
|
|
case "while":
|
|
next();
|
|
return new AST.While({
|
|
condition : parenthesised(),
|
|
body : in_loop(statement)
|
|
});
|
|
|
|
case "for":
|
|
next();
|
|
return for_();
|
|
|
|
case "function":
|
|
if (!strict_defun && S.input.has_directive("use strict")) {
|
|
croak("In strict mode code, functions can only be declared at top level or immediately within another function.");
|
|
}
|
|
next();
|
|
return function_(AST.Defun);
|
|
|
|
case "if":
|
|
next();
|
|
return if_();
|
|
|
|
case "return":
|
|
if (S.in_function == 0 && !options.bare_returns)
|
|
croak("'return' outside of function");
|
|
next();
|
|
var value = null;
|
|
if (is("punc", ";")) {
|
|
next();
|
|
} else if (!can_insert_semicolon()) {
|
|
value = expression(true);
|
|
semicolon();
|
|
}
|
|
return new AST.Return({
|
|
value: value
|
|
});
|
|
|
|
case "switch":
|
|
next();
|
|
return new AST.Switch({
|
|
expression : parenthesised(),
|
|
body : in_loop(switch_body_)
|
|
});
|
|
|
|
case "throw":
|
|
next();
|
|
if (has_newline_before(S.token))
|
|
croak("Illegal newline after 'throw'");
|
|
var value = expression(true);
|
|
semicolon();
|
|
return new AST.Throw({
|
|
value: value
|
|
});
|
|
|
|
case "try":
|
|
next();
|
|
return try_();
|
|
|
|
case "var":
|
|
next();
|
|
var node = var_();
|
|
semicolon();
|
|
return node;
|
|
|
|
case "with":
|
|
if (S.input.has_directive("use strict")) {
|
|
croak("Strict mode may not include a with statement");
|
|
}
|
|
next();
|
|
return new AST.With({
|
|
expression : parenthesised(),
|
|
body : statement()
|
|
});
|
|
}
|
|
}
|
|
unexpected();
|
|
});
|
|
|
|
function labeled_statement() {
|
|
var label = as_symbol(AST.Label);
|
|
if (!all(S.labels, function(l) {
|
|
return l.name != label.name;
|
|
})) {
|
|
// ECMA-262, 12.12: An ECMAScript program is considered
|
|
// syntactically incorrect if it contains a
|
|
// LabelledStatement that is enclosed by a
|
|
// LabelledStatement with the same Identifier as label.
|
|
croak("Label " + label.name + " defined twice");
|
|
}
|
|
expect(":");
|
|
S.labels.push(label);
|
|
var stat = statement();
|
|
S.labels.pop();
|
|
if (!(stat instanceof AST.IterationStatement)) {
|
|
// check for `continue` that refers to this label.
|
|
// those should be reported as syntax errors.
|
|
// https://github.com/mishoo/UglifyJS2/issues/287
|
|
label.references.forEach(function(ref) {
|
|
if (ref instanceof AST.Continue) {
|
|
ref = ref.label.start;
|
|
croak("Continue label `" + label.name + "` refers to non-IterationStatement.",
|
|
ref.line, ref.col, ref.pos);
|
|
}
|
|
});
|
|
}
|
|
return new AST.LabeledStatement({ body: stat, label: label });
|
|
}
|
|
|
|
function simple_statement(tmp) {
|
|
return new AST.SimpleStatement({ body: (tmp = expression(true), semicolon(), tmp) });
|
|
}
|
|
|
|
function break_cont(type) {
|
|
var label = null, ldef;
|
|
if (!can_insert_semicolon()) {
|
|
label = as_symbol(AST.LabelRef, true);
|
|
}
|
|
if (label != null) {
|
|
ldef = find_if(function(l) {
|
|
return l.name == label.name;
|
|
}, S.labels);
|
|
if (!ldef) croak("Undefined label " + label.name);
|
|
label.thedef = ldef;
|
|
} else if (S.in_loop == 0) croak(type.TYPE + " not inside a loop or switch");
|
|
semicolon();
|
|
var stat = new type({ label: label });
|
|
if (ldef) ldef.references.push(stat);
|
|
return stat;
|
|
}
|
|
|
|
function for_() {
|
|
expect("(");
|
|
var init = null;
|
|
if (!is("punc", ";")) {
|
|
init = is("keyword", "var")
|
|
? (next(), var_(true))
|
|
: expression(true, true);
|
|
if (is("operator", "in")) {
|
|
if (init instanceof AST.Var) {
|
|
if (init.definitions.length > 1)
|
|
croak("Only one variable declaration allowed in for..in loop", init.start.line, init.start.col, init.start.pos);
|
|
} else if (!is_assignable(init)) {
|
|
croak("Invalid left-hand side in for..in loop", init.start.line, init.start.col, init.start.pos);
|
|
}
|
|
next();
|
|
return for_in(init);
|
|
}
|
|
}
|
|
return regular_for(init);
|
|
}
|
|
|
|
function regular_for(init) {
|
|
expect(";");
|
|
var test = is("punc", ";") ? null : expression(true);
|
|
expect(";");
|
|
var step = is("punc", ")") ? null : expression(true);
|
|
expect(")");
|
|
return new AST.For({
|
|
init : init,
|
|
condition : test,
|
|
step : step,
|
|
body : in_loop(statement)
|
|
});
|
|
}
|
|
|
|
function for_in(init) {
|
|
var obj = expression(true);
|
|
expect(")");
|
|
return new AST.ForIn({
|
|
init : init,
|
|
object : obj,
|
|
body : in_loop(statement)
|
|
});
|
|
}
|
|
|
|
var function_ = function(ctor) {
|
|
var in_statement = ctor === AST.Defun;
|
|
var name = is("name") ? as_symbol(in_statement ? AST.SymbolDefun : AST.SymbolLambda) : null;
|
|
if (in_statement && !name)
|
|
unexpected();
|
|
if (name && ctor !== AST.Accessor && !(name instanceof AST.SymbolDeclaration))
|
|
unexpected(prev());
|
|
expect("(");
|
|
var argnames = [];
|
|
for (var first = true; !is("punc", ")");) {
|
|
if (first) first = false; else expect(",");
|
|
argnames.push(as_symbol(AST.SymbolFunarg));
|
|
}
|
|
next();
|
|
var loop = S.in_loop;
|
|
var labels = S.labels;
|
|
++S.in_function;
|
|
S.in_directives = true;
|
|
S.input.push_directives_stack();
|
|
S.in_loop = 0;
|
|
S.labels = [];
|
|
var body = block_(true);
|
|
if (S.input.has_directive("use strict")) {
|
|
if (name) strict_verify_symbol(name);
|
|
argnames.forEach(strict_verify_symbol);
|
|
}
|
|
S.input.pop_directives_stack();
|
|
--S.in_function;
|
|
S.in_loop = loop;
|
|
S.labels = labels;
|
|
return new ctor({
|
|
name: name,
|
|
argnames: argnames,
|
|
body: body
|
|
});
|
|
};
|
|
|
|
function if_() {
|
|
var cond = parenthesised(), body = statement(), belse = null;
|
|
if (is("keyword", "else")) {
|
|
next();
|
|
belse = statement();
|
|
}
|
|
return new AST.If({
|
|
condition : cond,
|
|
body : body,
|
|
alternative : belse
|
|
});
|
|
}
|
|
|
|
function block_(strict_defun) {
|
|
expect("{");
|
|
var a = [];
|
|
while (!is("punc", "}")) {
|
|
if (is("eof")) unexpected();
|
|
a.push(statement(strict_defun));
|
|
}
|
|
next();
|
|
return a;
|
|
}
|
|
|
|
function switch_body_() {
|
|
expect("{");
|
|
var a = [], cur = null, branch = null, tmp;
|
|
while (!is("punc", "}")) {
|
|
if (is("eof")) unexpected();
|
|
if (is("keyword", "case")) {
|
|
if (branch) branch.end = prev();
|
|
cur = [];
|
|
branch = new AST.Case({
|
|
start : (tmp = S.token, next(), tmp),
|
|
expression : expression(true),
|
|
body : cur
|
|
});
|
|
a.push(branch);
|
|
expect(":");
|
|
}
|
|
else if (is("keyword", "default")) {
|
|
if (branch) branch.end = prev();
|
|
cur = [];
|
|
branch = new AST.Default({
|
|
start : (tmp = S.token, next(), expect(":"), tmp),
|
|
body : cur
|
|
});
|
|
a.push(branch);
|
|
}
|
|
else {
|
|
if (!cur) unexpected();
|
|
cur.push(statement());
|
|
}
|
|
}
|
|
if (branch) branch.end = prev();
|
|
next();
|
|
return a;
|
|
}
|
|
|
|
function try_() {
|
|
var body = block_(), bcatch = null, bfinally = null;
|
|
if (is("keyword", "catch")) {
|
|
var start = S.token;
|
|
next();
|
|
expect("(");
|
|
var name = as_symbol(AST.SymbolCatch);
|
|
expect(")");
|
|
bcatch = new AST.Catch({
|
|
start : start,
|
|
argname : name,
|
|
body : block_(),
|
|
end : prev()
|
|
});
|
|
}
|
|
if (is("keyword", "finally")) {
|
|
var start = S.token;
|
|
next();
|
|
bfinally = new AST.Finally({
|
|
start : start,
|
|
body : block_(),
|
|
end : prev()
|
|
});
|
|
}
|
|
if (!bcatch && !bfinally)
|
|
croak("Missing catch/finally blocks");
|
|
return new AST.Try({
|
|
body : body,
|
|
bcatch : bcatch,
|
|
bfinally : bfinally
|
|
});
|
|
}
|
|
|
|
function vardefs(no_in) {
|
|
var a = [];
|
|
for (;;) {
|
|
a.push(new AST.VarDef({
|
|
start : S.token,
|
|
name : as_symbol(AST.SymbolVar),
|
|
value : is("operator", "=") ? (next(), expression(false, no_in)) : null,
|
|
end : prev()
|
|
}));
|
|
if (!is("punc", ","))
|
|
break;
|
|
next();
|
|
}
|
|
return a;
|
|
}
|
|
|
|
var var_ = function(no_in) {
|
|
return new AST.Var({
|
|
start : prev(),
|
|
definitions : vardefs(no_in),
|
|
end : prev()
|
|
});
|
|
};
|
|
|
|
var new_ = function(allow_calls) {
|
|
var start = S.token;
|
|
expect_token("operator", "new");
|
|
var newexp = expr_atom(false), args;
|
|
if (is("punc", "(")) {
|
|
next();
|
|
args = expr_list(")");
|
|
} else {
|
|
args = [];
|
|
}
|
|
var call = new AST.New({
|
|
start : start,
|
|
expression : newexp,
|
|
args : args,
|
|
end : prev()
|
|
});
|
|
mark_pure(call);
|
|
return subscripts(call, allow_calls);
|
|
};
|
|
|
|
function as_atom_node() {
|
|
var tok = S.token, ret;
|
|
switch (tok.type) {
|
|
case "name":
|
|
ret = _make_symbol(AST.SymbolRef);
|
|
break;
|
|
case "num":
|
|
ret = new AST.Number({ start: tok, end: tok, value: tok.value });
|
|
break;
|
|
case "string":
|
|
ret = new AST.String({
|
|
start : tok,
|
|
end : tok,
|
|
value : tok.value,
|
|
quote : tok.quote
|
|
});
|
|
break;
|
|
case "regexp":
|
|
ret = new AST.RegExp({ start: tok, end: tok, value: tok.value });
|
|
break;
|
|
case "atom":
|
|
switch (tok.value) {
|
|
case "false":
|
|
ret = new AST.False({ start: tok, end: tok });
|
|
break;
|
|
case "true":
|
|
ret = new AST.True({ start: tok, end: tok });
|
|
break;
|
|
case "null":
|
|
ret = new AST.Null({ start: tok, end: tok });
|
|
break;
|
|
}
|
|
break;
|
|
}
|
|
next();
|
|
return ret;
|
|
}
|
|
|
|
var expr_atom = function(allow_calls) {
|
|
if (is("operator", "new")) {
|
|
return new_(allow_calls);
|
|
}
|
|
var start = S.token;
|
|
if (is("punc")) {
|
|
switch (start.value) {
|
|
case "(":
|
|
next();
|
|
var ex = expression(true);
|
|
var len = start.comments_before.length;
|
|
[].unshift.apply(ex.start.comments_before, start.comments_before);
|
|
start.comments_before = ex.start.comments_before;
|
|
start.comments_before_length = len;
|
|
if (len == 0 && start.comments_before.length > 0) {
|
|
var comment = start.comments_before[0];
|
|
if (!comment.nlb) {
|
|
comment.nlb = start.nlb;
|
|
start.nlb = false;
|
|
}
|
|
}
|
|
start.comments_after = ex.start.comments_after;
|
|
ex.start = start;
|
|
expect(")");
|
|
var end = prev();
|
|
end.comments_before = ex.end.comments_before;
|
|
[].push.apply(ex.end.comments_after, end.comments_after);
|
|
end.comments_after = ex.end.comments_after;
|
|
ex.end = end;
|
|
if (ex instanceof AST.Call) mark_pure(ex);
|
|
return subscripts(ex, allow_calls);
|
|
case "[":
|
|
return subscripts(array_(), allow_calls);
|
|
case "{":
|
|
return subscripts(object_(), allow_calls);
|
|
}
|
|
unexpected();
|
|
}
|
|
if (is("keyword", "function")) {
|
|
next();
|
|
var func = function_(AST.Function);
|
|
func.start = start;
|
|
func.end = prev();
|
|
return subscripts(func, allow_calls);
|
|
}
|
|
if (ATOMIC_START_TOKEN[S.token.type]) {
|
|
return subscripts(as_atom_node(), allow_calls);
|
|
}
|
|
unexpected();
|
|
};
|
|
|
|
function expr_list(closing, allow_trailing_comma, allow_empty) {
|
|
var first = true, a = [];
|
|
while (!is("punc", closing)) {
|
|
if (first) first = false; else expect(",");
|
|
if (allow_trailing_comma && is("punc", closing)) break;
|
|
if (is("punc", ",") && allow_empty) {
|
|
a.push(new AST.Hole({ start: S.token, end: S.token }));
|
|
} else {
|
|
a.push(expression(false));
|
|
}
|
|
}
|
|
next();
|
|
return a;
|
|
}
|
|
|
|
var array_ = embed_tokens(function() {
|
|
expect("[");
|
|
return new AST.Array({
|
|
elements: expr_list("]", !options.strict, true)
|
|
});
|
|
});
|
|
|
|
var create_accessor = embed_tokens(function() {
|
|
return function_(AST.Accessor);
|
|
});
|
|
|
|
var object_ = embed_tokens(function() {
|
|
expect("{");
|
|
var first = true, a = [];
|
|
while (!is("punc", "}")) {
|
|
if (first) first = false; else expect(",");
|
|
if (!options.strict && is("punc", "}"))
|
|
// allow trailing comma
|
|
break;
|
|
var start = S.token;
|
|
var type = start.type;
|
|
var name = as_property_name();
|
|
if (type == "name" && !is("punc", ":")) {
|
|
var key = new AST.SymbolAccessor({
|
|
start: S.token,
|
|
name: "" + as_property_name(),
|
|
end: prev()
|
|
});
|
|
if (name == "get") {
|
|
a.push(new AST.ObjectGetter({
|
|
start : start,
|
|
key : key,
|
|
value : create_accessor(),
|
|
end : prev()
|
|
}));
|
|
continue;
|
|
}
|
|
if (name == "set") {
|
|
a.push(new AST.ObjectSetter({
|
|
start : start,
|
|
key : key,
|
|
value : create_accessor(),
|
|
end : prev()
|
|
}));
|
|
continue;
|
|
}
|
|
}
|
|
expect(":");
|
|
a.push(new AST.ObjectKeyVal({
|
|
start : start,
|
|
quote : start.quote,
|
|
key : "" + name,
|
|
value : expression(false),
|
|
end : prev()
|
|
}));
|
|
}
|
|
next();
|
|
return new AST.Object({ properties: a });
|
|
});
|
|
|
|
function as_property_name() {
|
|
var tmp = S.token;
|
|
switch (tmp.type) {
|
|
case "operator":
|
|
if (!KEYWORDS[tmp.value]) unexpected();
|
|
case "num":
|
|
case "string":
|
|
case "name":
|
|
case "keyword":
|
|
case "atom":
|
|
next();
|
|
return tmp.value;
|
|
default:
|
|
unexpected();
|
|
}
|
|
}
|
|
|
|
function as_name() {
|
|
var tmp = S.token;
|
|
if (tmp.type != "name") unexpected();
|
|
next();
|
|
return tmp.value;
|
|
}
|
|
|
|
function _make_symbol(type) {
|
|
var name = S.token.value;
|
|
return new (name == "this" ? AST.This : type)({
|
|
name : String(name),
|
|
start : S.token,
|
|
end : S.token
|
|
});
|
|
}
|
|
|
|
function strict_verify_symbol(sym) {
|
|
if (sym.name == "arguments" || sym.name == "eval")
|
|
croak("Unexpected " + sym.name + " in strict mode", sym.start.line, sym.start.col, sym.start.pos);
|
|
}
|
|
|
|
function as_symbol(type, noerror) {
|
|
if (!is("name")) {
|
|
if (!noerror) croak("Name expected");
|
|
return null;
|
|
}
|
|
var sym = _make_symbol(type);
|
|
if (S.input.has_directive("use strict") && sym instanceof AST.SymbolDeclaration) {
|
|
strict_verify_symbol(sym);
|
|
}
|
|
next();
|
|
return sym;
|
|
}
|
|
|
|
function mark_pure(call) {
|
|
var start = call.start;
|
|
var comments = start.comments_before;
|
|
var i = HOP(start, "comments_before_length") ? start.comments_before_length : comments.length;
|
|
while (--i >= 0) {
|
|
var comment = comments[i];
|
|
if (/[@#]__PURE__/.test(comment.value)) {
|
|
call.pure = comment;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
var subscripts = function(expr, allow_calls) {
|
|
var start = expr.start;
|
|
if (is("punc", ".")) {
|
|
next();
|
|
return subscripts(new AST.Dot({
|
|
start : start,
|
|
expression : expr,
|
|
property : as_name(),
|
|
end : prev()
|
|
}), allow_calls);
|
|
}
|
|
if (is("punc", "[")) {
|
|
next();
|
|
var prop = expression(true);
|
|
expect("]");
|
|
return subscripts(new AST.Sub({
|
|
start : start,
|
|
expression : expr,
|
|
property : prop,
|
|
end : prev()
|
|
}), allow_calls);
|
|
}
|
|
if (allow_calls && is("punc", "(")) {
|
|
next();
|
|
var call = new AST.Call({
|
|
start : start,
|
|
expression : expr,
|
|
args : expr_list(")"),
|
|
end : prev()
|
|
});
|
|
mark_pure(call);
|
|
return subscripts(call, true);
|
|
}
|
|
return expr;
|
|
};
|
|
|
|
var maybe_unary = function(allow_calls) {
|
|
var start = S.token;
|
|
if (is("operator") && UNARY_PREFIX[start.value]) {
|
|
next();
|
|
handle_regexp();
|
|
var ex = make_unary(AST.UnaryPrefix, start, maybe_unary(allow_calls));
|
|
ex.start = start;
|
|
ex.end = prev();
|
|
return ex;
|
|
}
|
|
var val = expr_atom(allow_calls);
|
|
while (is("operator") && UNARY_POSTFIX[S.token.value] && !has_newline_before(S.token)) {
|
|
val = make_unary(AST.UnaryPostfix, S.token, val);
|
|
val.start = start;
|
|
val.end = S.token;
|
|
next();
|
|
}
|
|
return val;
|
|
};
|
|
|
|
function make_unary(ctor, token, expr) {
|
|
var op = token.value;
|
|
switch (op) {
|
|
case "++":
|
|
case "--":
|
|
if (!is_assignable(expr))
|
|
croak("Invalid use of " + op + " operator", token.line, token.col, token.pos);
|
|
break;
|
|
case "delete":
|
|
if (expr instanceof AST.SymbolRef && S.input.has_directive("use strict"))
|
|
croak("Calling delete on expression not allowed in strict mode", expr.start.line, expr.start.col, expr.start.pos);
|
|
break;
|
|
}
|
|
return new ctor({ operator: op, expression: expr });
|
|
}
|
|
|
|
var expr_op = function(left, min_prec, no_in) {
|
|
var op = is("operator") ? S.token.value : null;
|
|
if (op == "in" && no_in) op = null;
|
|
var prec = op != null ? PRECEDENCE[op] : null;
|
|
if (prec != null && prec > min_prec) {
|
|
next();
|
|
var right = expr_op(maybe_unary(true), prec, no_in);
|
|
return expr_op(new AST.Binary({
|
|
start : left.start,
|
|
left : left,
|
|
operator : op,
|
|
right : right,
|
|
end : right.end
|
|
}), min_prec, no_in);
|
|
}
|
|
return left;
|
|
};
|
|
|
|
function expr_ops(no_in) {
|
|
return expr_op(maybe_unary(true), 0, no_in);
|
|
}
|
|
|
|
var maybe_conditional = function(no_in) {
|
|
var start = S.token;
|
|
var expr = expr_ops(no_in);
|
|
if (is("operator", "?")) {
|
|
next();
|
|
var yes = expression(false);
|
|
expect(":");
|
|
return new AST.Conditional({
|
|
start : start,
|
|
condition : expr,
|
|
consequent : yes,
|
|
alternative : expression(false, no_in),
|
|
end : prev()
|
|
});
|
|
}
|
|
return expr;
|
|
};
|
|
|
|
function is_assignable(expr) {
|
|
return expr instanceof AST.PropAccess || expr instanceof AST.SymbolRef;
|
|
}
|
|
|
|
var maybe_assign = function(no_in) {
|
|
var start = S.token;
|
|
var left = maybe_conditional(no_in), val = S.token.value;
|
|
if (is("operator") && ASSIGNMENT[val]) {
|
|
if (is_assignable(left)) {
|
|
next();
|
|
return new AST.Assign({
|
|
start : start,
|
|
left : left,
|
|
operator : val,
|
|
right : maybe_assign(no_in),
|
|
end : prev()
|
|
});
|
|
}
|
|
croak("Invalid assignment");
|
|
}
|
|
return left;
|
|
};
|
|
|
|
var expression = function(commas, no_in) {
|
|
var start = S.token;
|
|
var exprs = [];
|
|
while (true) {
|
|
exprs.push(maybe_assign(no_in));
|
|
if (!commas || !is("punc", ",")) break;
|
|
next();
|
|
commas = true;
|
|
}
|
|
return exprs.length == 1 ? exprs[0] : new AST.Sequence({
|
|
start : start,
|
|
expressions : exprs,
|
|
end : peek()
|
|
});
|
|
};
|
|
|
|
function in_loop(cont) {
|
|
++S.in_loop;
|
|
var ret = cont();
|
|
--S.in_loop;
|
|
return ret;
|
|
}
|
|
|
|
if (options.expression) {
|
|
return expression(true);
|
|
}
|
|
|
|
return function() {
|
|
var start = S.token;
|
|
var body = [];
|
|
S.input.push_directives_stack();
|
|
while (!is("eof"))
|
|
body.push(statement(true));
|
|
S.input.pop_directives_stack();
|
|
var end = prev();
|
|
var toplevel = options.toplevel;
|
|
if (toplevel) {
|
|
toplevel.body = toplevel.body.concat(body);
|
|
toplevel.end = end;
|
|
} else {
|
|
toplevel = new AST.Toplevel({ start: start, body: body, end: end });
|
|
}
|
|
return toplevel;
|
|
}();
|
|
}
|
|
|
|
merge(exports, {
|
|
KEYWORDS : KEYWORDS,
|
|
KEYWORDS_ATOM : KEYWORDS_ATOM,
|
|
RESERVED_WORDS : RESERVED_WORDS,
|
|
KEYWORDS_BEFORE_EXPRESSION : KEYWORDS_BEFORE_EXPRESSION,
|
|
OPERATOR_CHARS : OPERATOR_CHARS,
|
|
RE_HEX_NUMBER : RE_HEX_NUMBER,
|
|
RE_OCT_NUMBER : RE_OCT_NUMBER,
|
|
OPERATORS : OPERATORS,
|
|
WHITESPACE_CHARS : WHITESPACE_CHARS,
|
|
NEWLINE_CHARS : NEWLINE_CHARS,
|
|
PUNC_BEFORE_EXPRESSION : PUNC_BEFORE_EXPRESSION,
|
|
PUNC_CHARS : PUNC_CHARS,
|
|
UNICODE : UNICODE,
|
|
is_letter : is_letter,
|
|
is_surrogate_pair_head : is_surrogate_pair_head,
|
|
is_surrogate_pair_tail : is_surrogate_pair_tail,
|
|
is_digit : is_digit,
|
|
is_alphanumeric_char : is_alphanumeric_char,
|
|
is_unicode_digit : is_unicode_digit,
|
|
is_unicode_combining_mark : is_unicode_combining_mark,
|
|
is_unicode_connector_punctuation : is_unicode_connector_punctuation,
|
|
is_identifier : is_identifier,
|
|
is_identifier_start : is_identifier_start,
|
|
is_identifier_char : is_identifier_char,
|
|
is_identifier_string : is_identifier_string,
|
|
parse_js_number : parse_js_number,
|
|
JS_Parse_Error : JS_Parse_Error,
|
|
js_error : js_error,
|
|
is_token : is_token,
|
|
EX_EOF : EX_EOF,
|
|
tokenizer : tokenizer,
|
|
UNARY_PREFIX : UNARY_PREFIX,
|
|
UNARY_POSTFIX : UNARY_POSTFIX,
|
|
ASSIGNMENT : ASSIGNMENT,
|
|
PRECEDENCE : PRECEDENCE,
|
|
ATOMIC_START_TOKEN : ATOMIC_START_TOKEN,
|
|
parse : parse,
|
|
});
|