mirror of
https://github.com/S2-/minifyfromhtml.git
synced 2025-08-03 20:30:04 +02:00
update packages to latest version
This commit is contained in:
169
node_modules/terser/lib/parse.js
generated
vendored
169
node_modules/terser/lib/parse.js
generated
vendored
@@ -162,17 +162,19 @@ import {
|
||||
} from "./ast.js";
|
||||
|
||||
var LATEST_RAW = ""; // Only used for numbers and template strings
|
||||
var LATEST_TEMPLATE_END = true;
|
||||
var TEMPLATE_RAWS = new Map(); // Raw template strings
|
||||
|
||||
var KEYWORDS = "break case catch class const continue debugger default delete do else export extends finally for function if in instanceof let new return switch throw try typeof var void while with";
|
||||
var KEYWORDS_ATOM = "false null true";
|
||||
var RESERVED_WORDS = "enum implements import interface package private protected public static super this " + KEYWORDS_ATOM + " " + KEYWORDS;
|
||||
var RESERVED_WORDS = "enum import super this " + KEYWORDS_ATOM + " " + KEYWORDS;
|
||||
var ALL_RESERVED_WORDS = "implements interface package private protected public static " + RESERVED_WORDS;
|
||||
var KEYWORDS_BEFORE_EXPRESSION = "return new delete throw else case yield await";
|
||||
|
||||
KEYWORDS = makePredicate(KEYWORDS);
|
||||
RESERVED_WORDS = makePredicate(RESERVED_WORDS);
|
||||
KEYWORDS_BEFORE_EXPRESSION = makePredicate(KEYWORDS_BEFORE_EXPRESSION);
|
||||
KEYWORDS_ATOM = makePredicate(KEYWORDS_ATOM);
|
||||
ALL_RESERVED_WORDS = makePredicate(ALL_RESERVED_WORDS);
|
||||
|
||||
var OPERATOR_CHARS = makePredicate(characters("+-*&%=<>!?|~^"));
|
||||
|
||||
@@ -691,8 +693,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
next(true, true);
|
||||
S.brace_counter++;
|
||||
tok = token(begin ? "template_head" : "template_substitution", content);
|
||||
LATEST_RAW = raw;
|
||||
LATEST_TEMPLATE_END = false;
|
||||
TEMPLATE_RAWS.set(tok, raw);
|
||||
tok.template_end = false;
|
||||
return tok;
|
||||
}
|
||||
|
||||
@@ -708,8 +710,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
}
|
||||
S.template_braces.pop();
|
||||
tok = token(begin ? "template_head" : "template_substitution", content);
|
||||
LATEST_RAW = raw;
|
||||
LATEST_TEMPLATE_END = true;
|
||||
TEMPLATE_RAWS.set(tok, raw);
|
||||
tok.template_end = true;
|
||||
return tok;
|
||||
});
|
||||
|
||||
@@ -1220,7 +1222,7 @@ function parse($TEXT, options) {
|
||||
}
|
||||
if (S.token.value == "import" && !is_token(peek(), "punc", "(") && !is_token(peek(), "punc", ".")) {
|
||||
next();
|
||||
var node = import_();
|
||||
var node = import_statement();
|
||||
semicolon();
|
||||
return node;
|
||||
}
|
||||
@@ -1372,7 +1374,7 @@ function parse($TEXT, options) {
|
||||
case "export":
|
||||
if (!is_token(peek(), "punc", "(")) {
|
||||
next();
|
||||
var node = export_();
|
||||
var node = export_statement();
|
||||
if (is("punc", ";")) semicolon();
|
||||
return node;
|
||||
}
|
||||
@@ -1570,66 +1572,66 @@ function parse($TEXT, options) {
|
||||
});
|
||||
};
|
||||
|
||||
function track_used_binding_identifiers(is_parameter, strict) {
|
||||
var parameters = new Set();
|
||||
var duplicate = false;
|
||||
var default_assignment = false;
|
||||
var spread = false;
|
||||
var strict_mode = !!strict;
|
||||
var tracker = {
|
||||
add_parameter: function(token) {
|
||||
if (parameters.has(token.value)) {
|
||||
if (duplicate === false) {
|
||||
duplicate = token;
|
||||
}
|
||||
tracker.check_strict();
|
||||
} else {
|
||||
parameters.add(token.value);
|
||||
if (is_parameter) {
|
||||
switch (token.value) {
|
||||
case "arguments":
|
||||
case "eval":
|
||||
case "yield":
|
||||
if (strict_mode) {
|
||||
token_error(token, "Unexpected " + token.value + " identifier as parameter inside strict mode");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (RESERVED_WORDS.has(token.value)) {
|
||||
unexpected();
|
||||
}
|
||||
class UsedParametersTracker {
|
||||
constructor(is_parameter, strict, duplicates_ok = false) {
|
||||
this.is_parameter = is_parameter;
|
||||
this.duplicates_ok = duplicates_ok;
|
||||
this.parameters = new Set();
|
||||
this.duplicate = null;
|
||||
this.default_assignment = false;
|
||||
this.spread = false;
|
||||
this.strict_mode = !!strict;
|
||||
}
|
||||
add_parameter(token) {
|
||||
if (this.parameters.has(token.value)) {
|
||||
if (this.duplicate === null) {
|
||||
this.duplicate = token;
|
||||
}
|
||||
this.check_strict();
|
||||
} else {
|
||||
this.parameters.add(token.value);
|
||||
if (this.is_parameter) {
|
||||
switch (token.value) {
|
||||
case "arguments":
|
||||
case "eval":
|
||||
case "yield":
|
||||
if (this.strict_mode) {
|
||||
token_error(token, "Unexpected " + token.value + " identifier as parameter inside strict mode");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (RESERVED_WORDS.has(token.value)) {
|
||||
unexpected();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
mark_default_assignment: function(token) {
|
||||
if (default_assignment === false) {
|
||||
default_assignment = token;
|
||||
}
|
||||
},
|
||||
mark_spread: function(token) {
|
||||
if (spread === false) {
|
||||
spread = token;
|
||||
}
|
||||
},
|
||||
mark_strict_mode: function() {
|
||||
strict_mode = true;
|
||||
},
|
||||
is_strict: function() {
|
||||
return default_assignment !== false || spread !== false || strict_mode;
|
||||
},
|
||||
check_strict: function() {
|
||||
if (tracker.is_strict() && duplicate !== false) {
|
||||
token_error(duplicate, "Parameter " + duplicate.value + " was used already");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return tracker;
|
||||
}
|
||||
mark_default_assignment(token) {
|
||||
if (this.default_assignment === false) {
|
||||
this.default_assignment = token;
|
||||
}
|
||||
}
|
||||
mark_spread(token) {
|
||||
if (this.spread === false) {
|
||||
this.spread = token;
|
||||
}
|
||||
}
|
||||
mark_strict_mode() {
|
||||
this.strict_mode = true;
|
||||
}
|
||||
is_strict() {
|
||||
return this.default_assignment !== false || this.spread !== false || this.strict_mode;
|
||||
}
|
||||
check_strict() {
|
||||
if (this.is_strict() && this.duplicate !== null && !this.duplicates_ok) {
|
||||
token_error(this.duplicate, "Parameter " + this.duplicate.value + " was used already");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parameters(params) {
|
||||
var used_parameters = track_used_binding_identifiers(true, S.input.has_directive("use strict"));
|
||||
var used_parameters = new UsedParametersTracker(true, S.input.has_directive("use strict"));
|
||||
|
||||
expect("(");
|
||||
|
||||
@@ -1653,7 +1655,7 @@ function parse($TEXT, options) {
|
||||
var param;
|
||||
var expand = false;
|
||||
if (used_parameters === undefined) {
|
||||
used_parameters = track_used_binding_identifiers(true, S.input.has_directive("use strict"));
|
||||
used_parameters = new UsedParametersTracker(true, S.input.has_directive("use strict"));
|
||||
}
|
||||
if (is("expand", "...")) {
|
||||
expand = S.token;
|
||||
@@ -1696,7 +1698,9 @@ function parse($TEXT, options) {
|
||||
var expand_token;
|
||||
var first_token = S.token;
|
||||
if (used_parameters === undefined) {
|
||||
used_parameters = track_used_binding_identifiers(false, S.input.has_directive("use strict"));
|
||||
const strict = S.input.has_directive("use strict");
|
||||
const duplicates_ok = symbol_type === AST_SymbolVar;
|
||||
used_parameters = new UsedParametersTracker(false, strict, duplicates_ok);
|
||||
}
|
||||
symbol_type = symbol_type === undefined ? AST_SymbolFunarg : symbol_type;
|
||||
if (is("punc", "[")) {
|
||||
@@ -2089,7 +2093,7 @@ function parse($TEXT, options) {
|
||||
if (is("punc", "{") || is("punc", "[")) {
|
||||
def = new AST_VarDef({
|
||||
start: S.token,
|
||||
name: binding_element(undefined ,sym_type),
|
||||
name: binding_element(undefined, sym_type),
|
||||
value: is("operator", "=") ? (expect_token("operator", "="), expression(false, no_in)) : null,
|
||||
end: prev()
|
||||
});
|
||||
@@ -2363,19 +2367,19 @@ function parse($TEXT, options) {
|
||||
|
||||
segments.push(new AST_TemplateSegment({
|
||||
start: S.token,
|
||||
raw: LATEST_RAW,
|
||||
raw: TEMPLATE_RAWS.get(S.token),
|
||||
value: S.token.value,
|
||||
end: S.token
|
||||
}));
|
||||
|
||||
while (!LATEST_TEMPLATE_END) {
|
||||
while (!S.token.template_end) {
|
||||
next();
|
||||
handle_regexp();
|
||||
segments.push(expression(true));
|
||||
|
||||
segments.push(new AST_TemplateSegment({
|
||||
start: S.token,
|
||||
raw: LATEST_RAW,
|
||||
raw: TEMPLATE_RAWS.get(S.token),
|
||||
value: S.token.value,
|
||||
end: S.token
|
||||
}));
|
||||
@@ -2549,7 +2553,7 @@ function parse($TEXT, options) {
|
||||
};
|
||||
|
||||
const is_not_method_start = () =>
|
||||
!is("punc", "(") && !is("punc", ",") && !is("punc", "}") && !is("operator", "=");
|
||||
!is("punc", "(") && !is("punc", ",") && !is("punc", "}") && !is("punc", ";") && !is("operator", "=");
|
||||
|
||||
var is_async = false;
|
||||
var is_static = false;
|
||||
@@ -2664,7 +2668,15 @@ function parse($TEXT, options) {
|
||||
}
|
||||
}
|
||||
|
||||
function import_() {
|
||||
function maybe_import_assertion() {
|
||||
if (is("name", "assert") && !has_newline_before(S.token)) {
|
||||
next();
|
||||
return object_or_destructuring_();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function import_statement() {
|
||||
var start = prev();
|
||||
|
||||
var imported_name;
|
||||
@@ -2687,16 +2699,20 @@ function parse($TEXT, options) {
|
||||
unexpected();
|
||||
}
|
||||
next();
|
||||
|
||||
const assert_clause = maybe_import_assertion();
|
||||
|
||||
return new AST_Import({
|
||||
start: start,
|
||||
imported_name: imported_name,
|
||||
imported_names: imported_names,
|
||||
start,
|
||||
imported_name,
|
||||
imported_names,
|
||||
module_name: new AST_String({
|
||||
start: mod_str,
|
||||
value: mod_str.value,
|
||||
quote: mod_str.quote,
|
||||
end: mod_str,
|
||||
}),
|
||||
assert_clause,
|
||||
end: S.token,
|
||||
});
|
||||
}
|
||||
@@ -2804,7 +2820,7 @@ function parse($TEXT, options) {
|
||||
return names;
|
||||
}
|
||||
|
||||
function export_() {
|
||||
function export_statement() {
|
||||
var start = S.token;
|
||||
var is_default;
|
||||
var exported_names;
|
||||
@@ -2822,6 +2838,8 @@ function parse($TEXT, options) {
|
||||
}
|
||||
next();
|
||||
|
||||
const assert_clause = maybe_import_assertion();
|
||||
|
||||
return new AST_Export({
|
||||
start: start,
|
||||
is_default: is_default,
|
||||
@@ -2833,6 +2851,7 @@ function parse($TEXT, options) {
|
||||
end: mod_str,
|
||||
}),
|
||||
end: prev(),
|
||||
assert_clause
|
||||
});
|
||||
} else {
|
||||
return new AST_Export({
|
||||
@@ -2878,6 +2897,7 @@ function parse($TEXT, options) {
|
||||
exported_value: exported_value,
|
||||
exported_definition: exported_definition,
|
||||
end: prev(),
|
||||
assert_clause: null
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3322,6 +3342,7 @@ function parse($TEXT, options) {
|
||||
} else {
|
||||
toplevel = new AST_Toplevel({ start: start, body: body, end: end });
|
||||
}
|
||||
TEMPLATE_RAWS = new Map();
|
||||
return toplevel;
|
||||
})();
|
||||
|
||||
@@ -3339,6 +3360,6 @@ export {
|
||||
JS_Parse_Error,
|
||||
parse,
|
||||
PRECEDENCE,
|
||||
RESERVED_WORDS,
|
||||
ALL_RESERVED_WORDS,
|
||||
tokenizer,
|
||||
};
|
||||
|
Reference in New Issue
Block a user