mirror of
https://github.com/S2-/minifyfromhtml.git
synced 2025-08-03 04:10:04 +02:00
update packages to latest version
This commit is contained in:
101
node_modules/terser/CHANGELOG.md
generated
vendored
101
node_modules/terser/CHANGELOG.md
generated
vendored
@@ -1,5 +1,99 @@
|
||||
# Changelog
|
||||
|
||||
## v5.14.2
|
||||
|
||||
- Security fix for RegExps that should not be evaluated (regexp DDOS)
|
||||
- Source maps improvements (#1211)
|
||||
- Performance improvements in long property access evaluation (#1213)
|
||||
|
||||
## v5.14.1
|
||||
- keep_numbers option added to TypeScript defs (#1208)
|
||||
- Fixed parsing of nested template strings (#1204)
|
||||
|
||||
## v5.14.0
|
||||
- Switched to @jridgewell/source-map for sourcemap generation (#1190, #1181)
|
||||
- Fixed source maps with non-terminated segments (#1106)
|
||||
- Enabled typescript types to be imported from the package (#1194)
|
||||
- Extra DOM props have been added (#1191)
|
||||
- Delete the AST while generating code, as a means to save RAM
|
||||
|
||||
## v5.13.1
|
||||
- Removed self-assignments (`varname=varname`) (closes #1081)
|
||||
- Separated inlining code (for inlining things into references, or removing IIFEs)
|
||||
- Allow multiple identifiers with the same name in `var` destructuring (eg `var { a, a } = x`) (#1176)
|
||||
|
||||
## v5.13.0
|
||||
|
||||
- All calls to eval() were removed (#1171, #1184)
|
||||
- `source-map` was updated to 0.8.0-beta.0 (#1164)
|
||||
- NavigatorUAData was added to domprops to avoid property mangling (#1166)
|
||||
|
||||
## v5.12.1
|
||||
|
||||
- Fixed an issue with function definitions inside blocks (#1155)
|
||||
- Fixed parens of `new` in some situations (closes #1159)
|
||||
|
||||
## v5.12.0
|
||||
|
||||
- `TERSER_DEBUG_DIR` environment variable
|
||||
- @copyright comments are now preserved with the comments="some" option (#1153)
|
||||
|
||||
## v5.11.0
|
||||
|
||||
- Unicode code point escapes (`\u{abcde}`) are not emitted inside RegExp literals anymore (#1147)
|
||||
- acorn is now a regular dependency
|
||||
|
||||
## v5.10.0
|
||||
|
||||
- Massive optimization to max_line_len (#1109)
|
||||
- Basic support for import assertions
|
||||
- Marked ES2022 Object.hasOwn as a pure function
|
||||
- Fix `delete optional?.property`
|
||||
- New CI/CD pipeline with github actions (#1057)
|
||||
- Fix reordering of switch branches (#1092), (#1084)
|
||||
- Fix error when creating a class property called `get`
|
||||
- Acorn dependency is now an optional peerDependency
|
||||
- Fix mangling collision with exported variables (#1072)
|
||||
- Fix an issue with `return someVariable = (async () => { ... })()` (#1073)
|
||||
|
||||
## v5.9.0
|
||||
|
||||
- Collapsing switch cases with the same bodies (even if they're not next to each other) (#1070).
|
||||
- Fix evaluation of optional chain expressions (#1062)
|
||||
- Fix mangling collision in ESM exports (#1063)
|
||||
- Fix issue with mutating function objects after a second pass (#1047)
|
||||
- Fix for inlining object spread `{ ...obj }` (#1071)
|
||||
- Typescript typings fix (#1069)
|
||||
|
||||
## v5.8.0
|
||||
|
||||
- Fixed shadowing variables while moving code in some cases (#1065)
|
||||
- Stop mangling computed & quoted properties when keep_quoted is enabled.
|
||||
- Fix for mangling private getter/setter and .#private access (#1060, #1068)
|
||||
- Array.from has a new optimization when the unsafe option is set (#737)
|
||||
- Mangle/propmangle let you generate your own identifiers through the nth_identifier option (#1061)
|
||||
- More optimizations to switch statements (#1044)
|
||||
|
||||
## v5.7.2
|
||||
|
||||
- Fixed issues with compressing functions defined in `global_defs` option (#1036)
|
||||
- New recipe for using Terser in gulp was added to RECIPES.md (#1035)
|
||||
- Fixed issues with `??` and `?.` (#1045)
|
||||
- Future reserved words such as `package` no longer require you to disable strict mode to be used as names.
|
||||
- Refactored huge compressor file into multiple more focused files.
|
||||
- Avoided unparenthesized `in` operator in some for loops (it breaks parsing because of for..in loops)
|
||||
- Improved documentation (#1021, #1025)
|
||||
- More type definitions (#1021)
|
||||
|
||||
## v5.7.1
|
||||
|
||||
- Avoided collapsing assignments together if it would place a chain assignment on the left hand side, which is invalid syntax (`a?.b = c`)
|
||||
- Removed undefined from object expansions (`{ ...void 0 }` -> `{}`)
|
||||
- Fix crash when checking if something is nullish or undefined (#1009)
|
||||
- Fixed comparison of private class properties (#1015)
|
||||
- Minor performance improvements (#993)
|
||||
- Fixed scope of function defs in strict mode (they are block scoped)
|
||||
|
||||
## v5.7.0
|
||||
|
||||
- Several compile-time evaluation and inlining fixes
|
||||
@@ -118,6 +212,13 @@ Hotfix release, fixes package.json "engines" syntax
|
||||
- Module is now distributed as a dual package - You can `import` and `require()` too.
|
||||
- Inline improvements were made
|
||||
|
||||
|
||||
-----
|
||||
|
||||
## v4.8.1 (backport)
|
||||
|
||||
- Security fix for RegExps that should not be evaluated (regexp DDOS)
|
||||
|
||||
## v4.8.0
|
||||
|
||||
- Support for numeric separators (`million = 1_000_000`) was added.
|
||||
|
65
node_modules/terser/README.md
generated
vendored
65
node_modules/terser/README.md
generated
vendored
@@ -5,7 +5,7 @@
|
||||
[![Travis Build][travis-image]][travis-url]
|
||||
[![Opencollective financial contributors][opencollective-contributors]][opencollective-url]
|
||||
|
||||
A JavaScript parser and mangler/compressor toolkit for ES6+.
|
||||
A JavaScript mangler/compressor toolkit for ES6+.
|
||||
|
||||
*note*: You can support this project on patreon: <a target="_blank" rel="nofollow" href="https://www.patreon.com/fabiosantoscode"><img src="https://c5.patreon.com/external/logo/become_a_patron_button@2x.png" alt="patron" width="100px" height="auto"></a>. Check out [PATRONS.md](https://github.com/terser/terser/blob/master/PATRONS.md) for our first-tier patrons.
|
||||
|
||||
@@ -21,8 +21,8 @@ Find the changelog in [CHANGELOG.md](https://github.com/terser/terser/blob/maste
|
||||
[npm-url]: https://npmjs.org/package/terser
|
||||
[downloads-image]: https://img.shields.io/npm/dm/terser.svg
|
||||
[downloads-url]: https://npmjs.org/package/terser
|
||||
[travis-image]: https://travis-ci.com/terser/terser.svg?branch=master
|
||||
[travis-url]: https://travis-ci.com/terser/terser
|
||||
[travis-image]: https://app.travis-ci.com/terser/terser.svg?branch=master
|
||||
[travis-url]: https://app.travis-ci.com/github/terser/terser
|
||||
[opencollective-contributors]: https://opencollective.com/terser/tiers/badge.svg
|
||||
[opencollective-url]: https://opencollective.com/terser
|
||||
|
||||
@@ -60,6 +60,12 @@ in sequence and apply any compression options. The files are parsed in the
|
||||
same global scope, that is, a reference from a file to some
|
||||
variable/function declared in another file will be matched properly.
|
||||
|
||||
Command line arguments that take options (like --parse, --compress, --mangle and
|
||||
--format) can take in a comma-separated list of default option overrides. For
|
||||
instance:
|
||||
|
||||
terser input.js --compress ecma=2015,computed_props=false
|
||||
|
||||
If no input file is specified, Terser will read from STDIN.
|
||||
|
||||
If you wish to pass your options before the input files, separate the two with
|
||||
@@ -121,8 +127,8 @@ a double dash to prevent input files being used as option arguments:
|
||||
as JSON to STDOUT respectively.
|
||||
--comments [filter] Preserve copyright comments in the output. By
|
||||
default this works like Google Closure, keeping
|
||||
JSDoc-style comments that contain "@license" or
|
||||
"@preserve". You can optionally pass one of the
|
||||
JSDoc-style comments that contain e.g. "@license",
|
||||
or start with "!". You can optionally pass one of the
|
||||
following arguments to this flag:
|
||||
- "all" to keep all comments
|
||||
- `false` to omit comments in the output
|
||||
@@ -530,6 +536,11 @@ try {
|
||||
- `ecma` (default `undefined`) - pass `5`, `2015`, `2016`, etc to override
|
||||
`compress` and `format`'s `ecma` options.
|
||||
|
||||
- `enclose` (default `false`) - pass `true`, or a string in the format
|
||||
of `"args[:values]"`, where `args` and `values` are comma-separated
|
||||
argument names and values, respectively, to embed the output in a big
|
||||
function with the configurable arguments and values.
|
||||
|
||||
- `parse` (default `{}`) — pass an object if you wish to specify some
|
||||
additional [parse options](#parse-options).
|
||||
|
||||
@@ -569,7 +580,7 @@ try {
|
||||
of class names. Pass a regular expression to only keep class names matching that regex.
|
||||
|
||||
- `keep_fnames` (default: `false`) - pass `true` to prevent discarding or mangling
|
||||
of function names. Pass a regular expression to only keep class names matching that regex.
|
||||
of function names. Pass a regular expression to only keep function names matching that regex.
|
||||
Useful for code relying on `Function.prototype.name`. If the top level minify option
|
||||
`keep_classnames` is `undefined` it will be overridden with the value of the top level
|
||||
minify option `keep_fnames`.
|
||||
@@ -602,6 +613,7 @@ try {
|
||||
// source map options
|
||||
},
|
||||
ecma: 5, // specify one of: 5, 2015, 2016, etc.
|
||||
enclose: false, // or specify true, or "args:values"
|
||||
keep_classnames: false,
|
||||
keep_fnames: false,
|
||||
ie8: false,
|
||||
@@ -885,13 +897,19 @@ If you happen to need the source map as a raw object, set `sourceMap.asObject` t
|
||||
See also: the `keep_classnames` [compress option](#compress-options).
|
||||
|
||||
- `keep_fnames` (default `false`) -- Pass `true` to not mangle function names.
|
||||
Pass a regular expression to only keep class names matching that regex.
|
||||
Pass a regular expression to only keep function names matching that regex.
|
||||
Useful for code relying on `Function.prototype.name`. See also: the `keep_fnames`
|
||||
[compress option](#compress-options).
|
||||
|
||||
- `module` (default `false`) -- Pass `true` an ES6 modules, where the toplevel
|
||||
scope is not the global scope. Implies `toplevel`.
|
||||
|
||||
- `nth_identifier` (default: an internal mangler that weights based on character
|
||||
frequency analysis) -- Pass an object with a `get(n)` function that converts an
|
||||
ordinal into the nth most favored (usually shortest) identifier.
|
||||
Optionally also provide `reset()`, `sort()`, and `consider(chars, delta)` to
|
||||
use character frequency analysis of the source code.
|
||||
|
||||
- `reserved` (default `[]`) -- Pass an array of identifiers that should be
|
||||
excluded from mangling. Example: `["foo", "bar"]`.
|
||||
|
||||
@@ -938,6 +956,12 @@ await minify(code, { mangle: { toplevel: true } }).code;
|
||||
- `false` -- `obj["prop"]` is mangled.
|
||||
- `true` -- `obj.prop` is mangled unless there is `obj["prop"]` elsewhere in the code.
|
||||
|
||||
- `nth_identifer` (default: an internal mangler that weights based on character
|
||||
frequency analysis) -- Pass an object with a `get(n)` function that converts an
|
||||
ordinal into the nth most favored (usually shortest) identifier.
|
||||
Optionally also provide `reset()`, `sort()`, and `consider(chars, delta)` to
|
||||
use character frequency analysis of the source code.
|
||||
|
||||
- `regex` (default: `null`) — Pass a [RegExp literal or pattern string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp) to only mangle property matching the regular expression.
|
||||
|
||||
- `reserved` (default: `[]`) — Do not mangle property names listed in the
|
||||
@@ -965,8 +989,8 @@ as "output options".
|
||||
statement.
|
||||
|
||||
- `comments` (default `"some"`) -- by default it keeps JSDoc-style comments
|
||||
that contain "@license", "@preserve" or start with `!`, pass `true` or
|
||||
`"all"` to preserve all comments, `false` to omit comments in the output,
|
||||
that contain "@license", "@copyright", "@preserve" or start with `!`, pass `true`
|
||||
or `"all"` to preserve all comments, `false` to omit comments in the output,
|
||||
a regular expression string (e.g. `/^!/`) or a function.
|
||||
|
||||
- `ecma` (default `5`) -- set desired EcmaScript standard version for output.
|
||||
@@ -1038,9 +1062,9 @@ as "output options".
|
||||
### Keeping copyright notices or other comments
|
||||
|
||||
You can pass `--comments` to retain certain comments in the output. By
|
||||
default it will keep JSDoc-style comments that contain "@preserve",
|
||||
"@license" or "@cc_on" (conditional compilation for IE). You can pass
|
||||
`--comments all` to keep all the comments, or a valid JavaScript regexp to
|
||||
default it will keep comments starting with "!" and JSDoc-style comments that
|
||||
contain "@preserve", "@copyright", "@license" or "@cc_on" (conditional compilation for IE).
|
||||
You can pass `--comments all` to keep all the comments, or a valid JavaScript regexp to
|
||||
keep only comments that match this regexp. For example `--comments /^!/`
|
||||
will keep comments like `/*! Copyright Notice */`.
|
||||
|
||||
@@ -1072,6 +1096,7 @@ You might want to try it on your own code; it should reduce the minified size.
|
||||
Some examples of the optimizations made when this option is enabled:
|
||||
|
||||
- `new Array(1, 2, 3)` or `Array(1, 2, 3)` → `[ 1, 2, 3 ]`
|
||||
- `Array.from([1, 2, 3])` → `[1, 2, 3]`
|
||||
- `new Object()` → `{}`
|
||||
- `String(exp)` or `exp.toString()` → `"" + exp`
|
||||
- `new Object/RegExp/Function/Error/Array (...)` → we discard the `new`
|
||||
@@ -1293,6 +1318,22 @@ $ yarn
|
||||
|
||||
In the terser CLI we use [source-map-support](https://npmjs.com/source-map-support) to produce good error stacks. In your own app, you're expected to enable source-map-support (read their docs) to have nice stack traces that will help you write good issues.
|
||||
|
||||
## Obtaining the source code given to Terser
|
||||
|
||||
Because users often don't control the call to `await minify()` or its arguments, Terser provides a `TERSER_DEBUG_DIR` environment variable to make terser output some debug logs. If you're using a bundler or a project that includes a bundler and are not sure what went wrong with your code, pass that variable like so:
|
||||
|
||||
```
|
||||
$ TERSER_DEBUG_DIR=/path/to/logs command-that-uses-terser
|
||||
$ ls /path/to/logs
|
||||
terser-debug-123456.log
|
||||
```
|
||||
|
||||
If you're not sure how to set an environment variable on your shell (the above example works in bash), you can try using cross-env:
|
||||
|
||||
```
|
||||
> npx cross-env TERSER_DEBUG_DIR=/path/to/logs command-that-uses-terser
|
||||
```
|
||||
|
||||
# README.md Patrons:
|
||||
|
||||
*note*: You can support this project on patreon: <a target="_blank" rel="nofollow" href="https://www.patreon.com/fabiosantoscode"><img src="https://c5.patreon.com/external/logo/become_a_patron_button@2x.png" alt="patron" width="100px" height="auto"></a>. Check out [PATRONS.md](https://github.com/terser/terser/blob/master/PATRONS.md) for our first-tier patrons.
|
||||
|
21
node_modules/terser/bin/terser.mjs
generated
vendored
21
node_modules/terser/bin/terser.mjs
generated
vendored
@@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
"use strict";
|
||||
|
||||
import "../tools/exit.cjs";
|
||||
|
||||
import fs from "fs"
|
||||
import path from "path"
|
||||
import program from "commander"
|
||||
|
||||
import { run_cli } from "../lib/cli.js"
|
||||
|
||||
const packageJson = {
|
||||
name: "terser",
|
||||
version: "experimental module CLI"
|
||||
}
|
||||
|
||||
run_cli({ program, packageJson, fs, path }).catch((error) => {
|
||||
console.error(error);
|
||||
process.exitCode = 1;
|
||||
});
|
12881
node_modules/terser/dist/bundle.min.js
generated
vendored
12881
node_modules/terser/dist/bundle.min.js
generated
vendored
File diff suppressed because it is too large
Load Diff
2077
node_modules/terser/lib/ast.js
generated
vendored
2077
node_modules/terser/lib/ast.js
generated
vendored
File diff suppressed because it is too large
Load Diff
24
node_modules/terser/lib/cli.js
generated
vendored
24
node_modules/terser/lib/cli.js
generated
vendored
@@ -224,7 +224,7 @@ export async function run_cli({ program, packageJson, fs, path }) {
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await minify(files, options);
|
||||
result = await minify(files, options, fs);
|
||||
} catch (ex) {
|
||||
if (ex.name == "SyntaxError") {
|
||||
print_error("Parse error at " + ex.filename + ":" + ex.line + "," + ex.col);
|
||||
@@ -264,7 +264,6 @@ export async function run_cli({ program, packageJson, fs, path }) {
|
||||
case "enclosed":
|
||||
return value.length ? value.map(symdef) : undefined;
|
||||
case "variables":
|
||||
case "functions":
|
||||
case "globals":
|
||||
return value.size ? collect_from_map(value, symdef) : undefined;
|
||||
}
|
||||
@@ -277,7 +276,6 @@ export async function run_cli({ program, packageJson, fs, path }) {
|
||||
};
|
||||
if (value.block_scope) {
|
||||
result.variables = value.block_scope.variables;
|
||||
result.functions = value.block_scope.functions;
|
||||
result.enclosed = value.block_scope.enclosed;
|
||||
}
|
||||
value.CTOR.PROPS.forEach(function(prop) {
|
||||
@@ -289,14 +287,18 @@ export async function run_cli({ program, packageJson, fs, path }) {
|
||||
}, 2));
|
||||
} else if (program.output == "spidermonkey") {
|
||||
try {
|
||||
const minified = await minify(result.code, {
|
||||
compress: false,
|
||||
mangle: false,
|
||||
format: {
|
||||
ast: true,
|
||||
code: false
|
||||
}
|
||||
});
|
||||
const minified = await minify(
|
||||
result.code,
|
||||
{
|
||||
compress: false,
|
||||
mangle: false,
|
||||
format: {
|
||||
ast: true,
|
||||
code: false
|
||||
}
|
||||
},
|
||||
fs
|
||||
);
|
||||
console.log(JSON.stringify(minified.ast.to_mozilla_ast(), null, 2));
|
||||
} catch (ex) {
|
||||
fatal(ex);
|
||||
|
344
node_modules/terser/lib/compress/common.js
generated
vendored
Normal file
344
node_modules/terser/lib/compress/common.js
generated
vendored
Normal file
@@ -0,0 +1,344 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import {
|
||||
AST_Array,
|
||||
AST_Arrow,
|
||||
AST_BlockStatement,
|
||||
AST_Call,
|
||||
AST_Class,
|
||||
AST_Const,
|
||||
AST_Constant,
|
||||
AST_DefClass,
|
||||
AST_Defun,
|
||||
AST_EmptyStatement,
|
||||
AST_Export,
|
||||
AST_False,
|
||||
AST_Function,
|
||||
AST_Import,
|
||||
AST_Infinity,
|
||||
AST_LabeledStatement,
|
||||
AST_Lambda,
|
||||
AST_Let,
|
||||
AST_LoopControl,
|
||||
AST_NaN,
|
||||
AST_Node,
|
||||
AST_Null,
|
||||
AST_Number,
|
||||
AST_Object,
|
||||
AST_ObjectKeyVal,
|
||||
AST_PropAccess,
|
||||
AST_RegExp,
|
||||
AST_Scope,
|
||||
AST_Sequence,
|
||||
AST_SimpleStatement,
|
||||
AST_Statement,
|
||||
AST_String,
|
||||
AST_SymbolRef,
|
||||
AST_True,
|
||||
AST_UnaryPrefix,
|
||||
AST_Undefined,
|
||||
|
||||
TreeWalker,
|
||||
walk,
|
||||
walk_abort,
|
||||
walk_parent,
|
||||
} from "../ast.js";
|
||||
import { make_node, regexp_source_fix, string_template, makePredicate } from "../utils/index.js";
|
||||
import { first_in_statement } from "../utils/first_in_statement.js";
|
||||
import { has_flag, TOP } from "./compressor-flags.js";
|
||||
|
||||
export function merge_sequence(array, node) {
|
||||
if (node instanceof AST_Sequence) {
|
||||
array.push(...node.expressions);
|
||||
} else {
|
||||
array.push(node);
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
export function make_sequence(orig, expressions) {
|
||||
if (expressions.length == 1) return expressions[0];
|
||||
if (expressions.length == 0) throw new Error("trying to create a sequence with length zero!");
|
||||
return make_node(AST_Sequence, orig, {
|
||||
expressions: expressions.reduce(merge_sequence, [])
|
||||
});
|
||||
}
|
||||
|
||||
export function make_node_from_constant(val, orig) {
|
||||
switch (typeof val) {
|
||||
case "string":
|
||||
return make_node(AST_String, orig, {
|
||||
value: val
|
||||
});
|
||||
case "number":
|
||||
if (isNaN(val)) return make_node(AST_NaN, orig);
|
||||
if (isFinite(val)) {
|
||||
return 1 / val < 0 ? make_node(AST_UnaryPrefix, orig, {
|
||||
operator: "-",
|
||||
expression: make_node(AST_Number, orig, { value: -val })
|
||||
}) : make_node(AST_Number, orig, { value: val });
|
||||
}
|
||||
return val < 0 ? make_node(AST_UnaryPrefix, orig, {
|
||||
operator: "-",
|
||||
expression: make_node(AST_Infinity, orig)
|
||||
}) : make_node(AST_Infinity, orig);
|
||||
case "boolean":
|
||||
return make_node(val ? AST_True : AST_False, orig);
|
||||
case "undefined":
|
||||
return make_node(AST_Undefined, orig);
|
||||
default:
|
||||
if (val === null) {
|
||||
return make_node(AST_Null, orig, { value: null });
|
||||
}
|
||||
if (val instanceof RegExp) {
|
||||
return make_node(AST_RegExp, orig, {
|
||||
value: {
|
||||
source: regexp_source_fix(val.source),
|
||||
flags: val.flags
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(string_template("Can't handle constant of type: {type}", {
|
||||
type: typeof val
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
export function best_of_expression(ast1, ast2) {
|
||||
return ast1.size() > ast2.size() ? ast2 : ast1;
|
||||
}
|
||||
|
||||
export function best_of_statement(ast1, ast2) {
|
||||
return best_of_expression(
|
||||
make_node(AST_SimpleStatement, ast1, {
|
||||
body: ast1
|
||||
}),
|
||||
make_node(AST_SimpleStatement, ast2, {
|
||||
body: ast2
|
||||
})
|
||||
).body;
|
||||
}
|
||||
|
||||
/** Find which node is smaller, and return that */
|
||||
export function best_of(compressor, ast1, ast2) {
|
||||
if (first_in_statement(compressor)) {
|
||||
return best_of_statement(ast1, ast2);
|
||||
} else {
|
||||
return best_of_expression(ast1, ast2);
|
||||
}
|
||||
}
|
||||
|
||||
/** Simplify an object property's key, if possible */
|
||||
export function get_simple_key(key) {
|
||||
if (key instanceof AST_Constant) {
|
||||
return key.getValue();
|
||||
}
|
||||
if (key instanceof AST_UnaryPrefix
|
||||
&& key.operator == "void"
|
||||
&& key.expression instanceof AST_Constant) {
|
||||
return;
|
||||
}
|
||||
return key;
|
||||
}
|
||||
|
||||
export function read_property(obj, key) {
|
||||
key = get_simple_key(key);
|
||||
if (key instanceof AST_Node) return;
|
||||
|
||||
var value;
|
||||
if (obj instanceof AST_Array) {
|
||||
var elements = obj.elements;
|
||||
if (key == "length") return make_node_from_constant(elements.length, obj);
|
||||
if (typeof key == "number" && key in elements) value = elements[key];
|
||||
} else if (obj instanceof AST_Object) {
|
||||
key = "" + key;
|
||||
var props = obj.properties;
|
||||
for (var i = props.length; --i >= 0;) {
|
||||
var prop = props[i];
|
||||
if (!(prop instanceof AST_ObjectKeyVal)) return;
|
||||
if (!value && props[i].key === key) value = props[i].value;
|
||||
}
|
||||
}
|
||||
|
||||
return value instanceof AST_SymbolRef && value.fixed_value() || value;
|
||||
}
|
||||
|
||||
export function has_break_or_continue(loop, parent) {
|
||||
var found = false;
|
||||
var tw = new TreeWalker(function(node) {
|
||||
if (found || node instanceof AST_Scope) return true;
|
||||
if (node instanceof AST_LoopControl && tw.loopcontrol_target(node) === loop) {
|
||||
return found = true;
|
||||
}
|
||||
});
|
||||
if (parent instanceof AST_LabeledStatement) tw.push(parent);
|
||||
tw.push(loop);
|
||||
loop.body.walk(tw);
|
||||
return found;
|
||||
}
|
||||
|
||||
// we shouldn't compress (1,func)(something) to
|
||||
// func(something) because that changes the meaning of
|
||||
// the func (becomes lexical instead of global).
|
||||
export function maintain_this_binding(parent, orig, val) {
|
||||
if (
|
||||
parent instanceof AST_UnaryPrefix && parent.operator == "delete"
|
||||
|| parent instanceof AST_Call && parent.expression === orig
|
||||
&& (
|
||||
val instanceof AST_PropAccess
|
||||
|| val instanceof AST_SymbolRef && val.name == "eval"
|
||||
)
|
||||
) {
|
||||
const zero = make_node(AST_Number, orig, { value: 0 });
|
||||
return make_sequence(orig, [ zero, val ]);
|
||||
} else {
|
||||
return val;
|
||||
}
|
||||
}
|
||||
|
||||
export function is_func_expr(node) {
|
||||
return node instanceof AST_Arrow || node instanceof AST_Function;
|
||||
}
|
||||
|
||||
export function is_iife_call(node) {
|
||||
// Used to determine whether the node can benefit from negation.
|
||||
// Not the case with arrow functions (you need an extra set of parens).
|
||||
if (node.TYPE != "Call") return false;
|
||||
return node.expression instanceof AST_Function || is_iife_call(node.expression);
|
||||
}
|
||||
|
||||
export function is_empty(thing) {
|
||||
if (thing === null) return true;
|
||||
if (thing instanceof AST_EmptyStatement) return true;
|
||||
if (thing instanceof AST_BlockStatement) return thing.body.length == 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
export const identifier_atom = makePredicate("Infinity NaN undefined");
|
||||
export function is_identifier_atom(node) {
|
||||
return node instanceof AST_Infinity
|
||||
|| node instanceof AST_NaN
|
||||
|| node instanceof AST_Undefined;
|
||||
}
|
||||
|
||||
/** Check if this is a SymbolRef node which has one def of a certain AST type */
|
||||
export function is_ref_of(ref, type) {
|
||||
if (!(ref instanceof AST_SymbolRef)) return false;
|
||||
var orig = ref.definition().orig;
|
||||
for (var i = orig.length; --i >= 0;) {
|
||||
if (orig[i] instanceof type) return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Can we turn { block contents... } into just the block contents ?
|
||||
// Not if one of these is inside.
|
||||
export function can_be_evicted_from_block(node) {
|
||||
return !(
|
||||
node instanceof AST_DefClass ||
|
||||
node instanceof AST_Defun ||
|
||||
node instanceof AST_Let ||
|
||||
node instanceof AST_Const ||
|
||||
node instanceof AST_Export ||
|
||||
node instanceof AST_Import
|
||||
);
|
||||
}
|
||||
|
||||
export function as_statement_array(thing) {
|
||||
if (thing === null) return [];
|
||||
if (thing instanceof AST_BlockStatement) return thing.body;
|
||||
if (thing instanceof AST_EmptyStatement) return [];
|
||||
if (thing instanceof AST_Statement) return [ thing ];
|
||||
throw new Error("Can't convert thing to statement array");
|
||||
}
|
||||
|
||||
export function is_reachable(scope_node, defs) {
|
||||
const find_ref = node => {
|
||||
if (node instanceof AST_SymbolRef && defs.includes(node.definition())) {
|
||||
return walk_abort;
|
||||
}
|
||||
};
|
||||
|
||||
return walk_parent(scope_node, (node, info) => {
|
||||
if (node instanceof AST_Scope && node !== scope_node) {
|
||||
var parent = info.parent();
|
||||
|
||||
if (
|
||||
parent instanceof AST_Call
|
||||
&& parent.expression === node
|
||||
// Async/Generators aren't guaranteed to sync evaluate all of
|
||||
// their body steps, so it's possible they close over the variable.
|
||||
&& !(node.async || node.is_generator)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (walk(node, find_ref)) return walk_abort;
|
||||
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/** Check if a ref refers to the name of a function/class it's defined within */
|
||||
export function is_recursive_ref(compressor, def) {
|
||||
var node;
|
||||
for (var i = 0; node = compressor.parent(i); i++) {
|
||||
if (node instanceof AST_Lambda || node instanceof AST_Class) {
|
||||
var name = node.name;
|
||||
if (name && name.definition() === def) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO this only works with AST_Defun, shouldn't it work for other ways of defining functions?
|
||||
export function retain_top_func(fn, compressor) {
|
||||
return compressor.top_retain
|
||||
&& fn instanceof AST_Defun
|
||||
&& has_flag(fn, TOP)
|
||||
&& fn.name
|
||||
&& compressor.top_retain(fn.name);
|
||||
}
|
63
node_modules/terser/lib/compress/compressor-flags.js
generated
vendored
Normal file
63
node_modules/terser/lib/compress/compressor-flags.js
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
// bitfield flags to be stored in node.flags.
|
||||
// These are set and unset during compression, and store information in the node without requiring multiple fields.
|
||||
export const UNUSED = 0b00000001;
|
||||
export const TRUTHY = 0b00000010;
|
||||
export const FALSY = 0b00000100;
|
||||
export const UNDEFINED = 0b00001000;
|
||||
export const INLINED = 0b00010000;
|
||||
|
||||
// Nodes to which values are ever written. Used when keep_assign is part of the unused option string.
|
||||
export const WRITE_ONLY = 0b00100000;
|
||||
|
||||
// information specific to a single compression pass
|
||||
export const SQUEEZED = 0b0000000100000000;
|
||||
export const OPTIMIZED = 0b0000001000000000;
|
||||
export const TOP = 0b0000010000000000;
|
||||
export const CLEAR_BETWEEN_PASSES = SQUEEZED | OPTIMIZED | TOP;
|
||||
|
||||
export const has_flag = (node, flag) => node.flags & flag;
|
||||
export const set_flag = (node, flag) => { node.flags |= flag; };
|
||||
export const clear_flag = (node, flag) => { node.flags &= ~flag; };
|
350
node_modules/terser/lib/compress/drop-side-effect-free.js
generated
vendored
Normal file
350
node_modules/terser/lib/compress/drop-side-effect-free.js
generated
vendored
Normal file
@@ -0,0 +1,350 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import {
|
||||
AST_Accessor,
|
||||
AST_Array,
|
||||
AST_Arrow,
|
||||
AST_Assign,
|
||||
AST_Binary,
|
||||
AST_Call,
|
||||
AST_Chain,
|
||||
AST_Class,
|
||||
AST_ClassProperty,
|
||||
AST_ConciseMethod,
|
||||
AST_Conditional,
|
||||
AST_Constant,
|
||||
AST_Dot,
|
||||
AST_Expansion,
|
||||
AST_Function,
|
||||
AST_Node,
|
||||
AST_Number,
|
||||
AST_Object,
|
||||
AST_ObjectGetter,
|
||||
AST_ObjectKeyVal,
|
||||
AST_ObjectProperty,
|
||||
AST_ObjectSetter,
|
||||
AST_PropAccess,
|
||||
AST_Scope,
|
||||
AST_Sequence,
|
||||
AST_Sub,
|
||||
AST_SymbolRef,
|
||||
AST_TemplateSegment,
|
||||
AST_TemplateString,
|
||||
AST_This,
|
||||
AST_Unary,
|
||||
} from "../ast.js";
|
||||
import { make_node, return_null, return_this } from "../utils/index.js";
|
||||
import { first_in_statement } from "../utils/first_in_statement.js";
|
||||
|
||||
import { pure_prop_access_globals } from "./native-objects.js";
|
||||
import { lazy_op, unary_side_effects, is_nullish_shortcircuited } from "./inference.js";
|
||||
import { WRITE_ONLY, set_flag, clear_flag } from "./compressor-flags.js";
|
||||
import { make_sequence, is_func_expr, is_iife_call } from "./common.js";
|
||||
|
||||
// AST_Node#drop_side_effect_free() gets called when we don't care about the value,
|
||||
// only about side effects. We'll be defining this method for each node type in this module
|
||||
//
|
||||
// Examples:
|
||||
// foo++ -> foo++
|
||||
// 1 + func() -> func()
|
||||
// 10 -> (nothing)
|
||||
// knownPureFunc(foo++) -> foo++
|
||||
|
||||
function def_drop_side_effect_free(node, func) {
|
||||
node.DEFMETHOD("drop_side_effect_free", func);
|
||||
}
|
||||
|
||||
// Drop side-effect-free elements from an array of expressions.
|
||||
// Returns an array of expressions with side-effects or null
|
||||
// if all elements were dropped. Note: original array may be
|
||||
// returned if nothing changed.
|
||||
function trim(nodes, compressor, first_in_statement) {
|
||||
var len = nodes.length;
|
||||
if (!len) return null;
|
||||
|
||||
var ret = [], changed = false;
|
||||
for (var i = 0; i < len; i++) {
|
||||
var node = nodes[i].drop_side_effect_free(compressor, first_in_statement);
|
||||
changed |= node !== nodes[i];
|
||||
if (node) {
|
||||
ret.push(node);
|
||||
first_in_statement = false;
|
||||
}
|
||||
}
|
||||
return changed ? ret.length ? ret : null : nodes;
|
||||
}
|
||||
|
||||
def_drop_side_effect_free(AST_Node, return_this);
|
||||
def_drop_side_effect_free(AST_Constant, return_null);
|
||||
def_drop_side_effect_free(AST_This, return_null);
|
||||
|
||||
def_drop_side_effect_free(AST_Call, function (compressor, first_in_statement) {
|
||||
if (is_nullish_shortcircuited(this, compressor)) {
|
||||
return this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
}
|
||||
|
||||
if (!this.is_callee_pure(compressor)) {
|
||||
if (this.expression.is_call_pure(compressor)) {
|
||||
var exprs = this.args.slice();
|
||||
exprs.unshift(this.expression.expression);
|
||||
exprs = trim(exprs, compressor, first_in_statement);
|
||||
return exprs && make_sequence(this, exprs);
|
||||
}
|
||||
if (is_func_expr(this.expression)
|
||||
&& (!this.expression.name || !this.expression.name.definition().references.length)) {
|
||||
var node = this.clone();
|
||||
node.expression.process_expression(false, compressor);
|
||||
return node;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
var args = trim(this.args, compressor, first_in_statement);
|
||||
return args && make_sequence(this, args);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Accessor, return_null);
|
||||
|
||||
def_drop_side_effect_free(AST_Function, return_null);
|
||||
|
||||
def_drop_side_effect_free(AST_Arrow, return_null);
|
||||
|
||||
def_drop_side_effect_free(AST_Class, function (compressor) {
|
||||
const with_effects = [];
|
||||
const trimmed_extends = this.extends && this.extends.drop_side_effect_free(compressor);
|
||||
if (trimmed_extends)
|
||||
with_effects.push(trimmed_extends);
|
||||
for (const prop of this.properties) {
|
||||
const trimmed_prop = prop.drop_side_effect_free(compressor);
|
||||
if (trimmed_prop)
|
||||
with_effects.push(trimmed_prop);
|
||||
}
|
||||
if (!with_effects.length)
|
||||
return null;
|
||||
return make_sequence(this, with_effects);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Binary, function (compressor, first_in_statement) {
|
||||
var right = this.right.drop_side_effect_free(compressor);
|
||||
if (!right)
|
||||
return this.left.drop_side_effect_free(compressor, first_in_statement);
|
||||
if (lazy_op.has(this.operator)) {
|
||||
if (right === this.right)
|
||||
return this;
|
||||
var node = this.clone();
|
||||
node.right = right;
|
||||
return node;
|
||||
} else {
|
||||
var left = this.left.drop_side_effect_free(compressor, first_in_statement);
|
||||
if (!left)
|
||||
return this.right.drop_side_effect_free(compressor, first_in_statement);
|
||||
return make_sequence(this, [left, right]);
|
||||
}
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Assign, function (compressor) {
|
||||
if (this.logical)
|
||||
return this;
|
||||
|
||||
var left = this.left;
|
||||
if (left.has_side_effects(compressor)
|
||||
|| compressor.has_directive("use strict")
|
||||
&& left instanceof AST_PropAccess
|
||||
&& left.expression.is_constant()) {
|
||||
return this;
|
||||
}
|
||||
set_flag(this, WRITE_ONLY);
|
||||
while (left instanceof AST_PropAccess) {
|
||||
left = left.expression;
|
||||
}
|
||||
if (left.is_constant_expression(compressor.find_parent(AST_Scope))) {
|
||||
return this.right.drop_side_effect_free(compressor);
|
||||
}
|
||||
return this;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Conditional, function (compressor) {
|
||||
var consequent = this.consequent.drop_side_effect_free(compressor);
|
||||
var alternative = this.alternative.drop_side_effect_free(compressor);
|
||||
if (consequent === this.consequent && alternative === this.alternative)
|
||||
return this;
|
||||
if (!consequent)
|
||||
return alternative ? make_node(AST_Binary, this, {
|
||||
operator: "||",
|
||||
left: this.condition,
|
||||
right: alternative
|
||||
}) : this.condition.drop_side_effect_free(compressor);
|
||||
if (!alternative)
|
||||
return make_node(AST_Binary, this, {
|
||||
operator: "&&",
|
||||
left: this.condition,
|
||||
right: consequent
|
||||
});
|
||||
var node = this.clone();
|
||||
node.consequent = consequent;
|
||||
node.alternative = alternative;
|
||||
return node;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Unary, function (compressor, first_in_statement) {
|
||||
if (unary_side_effects.has(this.operator)) {
|
||||
if (!this.expression.has_side_effects(compressor)) {
|
||||
set_flag(this, WRITE_ONLY);
|
||||
} else {
|
||||
clear_flag(this, WRITE_ONLY);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
if (this.operator == "typeof" && this.expression instanceof AST_SymbolRef)
|
||||
return null;
|
||||
var expression = this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
if (first_in_statement && expression && is_iife_call(expression)) {
|
||||
if (expression === this.expression && this.operator == "!")
|
||||
return this;
|
||||
return expression.negate(compressor, first_in_statement);
|
||||
}
|
||||
return expression;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_SymbolRef, function (compressor) {
|
||||
const safe_access = this.is_declared(compressor)
|
||||
|| pure_prop_access_globals.has(this.name);
|
||||
return safe_access ? null : this;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Object, function (compressor, first_in_statement) {
|
||||
var values = trim(this.properties, compressor, first_in_statement);
|
||||
return values && make_sequence(this, values);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_ObjectProperty, function (compressor, first_in_statement) {
|
||||
const computed_key = this instanceof AST_ObjectKeyVal && this.key instanceof AST_Node;
|
||||
const key = computed_key && this.key.drop_side_effect_free(compressor, first_in_statement);
|
||||
const value = this.value && this.value.drop_side_effect_free(compressor, first_in_statement);
|
||||
if (key && value) {
|
||||
return make_sequence(this, [key, value]);
|
||||
}
|
||||
return key || value;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_ClassProperty, function (compressor) {
|
||||
const key = this.computed_key() && this.key.drop_side_effect_free(compressor);
|
||||
|
||||
const value = this.static && this.value
|
||||
&& this.value.drop_side_effect_free(compressor);
|
||||
|
||||
if (key && value)
|
||||
return make_sequence(this, [key, value]);
|
||||
return key || value || null;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_ConciseMethod, function () {
|
||||
return this.computed_key() ? this.key : null;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_ObjectGetter, function () {
|
||||
return this.computed_key() ? this.key : null;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_ObjectSetter, function () {
|
||||
return this.computed_key() ? this.key : null;
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Array, function (compressor, first_in_statement) {
|
||||
var values = trim(this.elements, compressor, first_in_statement);
|
||||
return values && make_sequence(this, values);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Dot, function (compressor, first_in_statement) {
|
||||
if (is_nullish_shortcircuited(this, compressor)) {
|
||||
return this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
}
|
||||
if (this.expression.may_throw_on_access(compressor)) return this;
|
||||
|
||||
return this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Sub, function (compressor, first_in_statement) {
|
||||
if (is_nullish_shortcircuited(this, compressor)) {
|
||||
return this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
}
|
||||
if (this.expression.may_throw_on_access(compressor)) return this;
|
||||
|
||||
var expression = this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
if (!expression)
|
||||
return this.property.drop_side_effect_free(compressor, first_in_statement);
|
||||
var property = this.property.drop_side_effect_free(compressor);
|
||||
if (!property)
|
||||
return expression;
|
||||
return make_sequence(this, [expression, property]);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Chain, function (compressor, first_in_statement) {
|
||||
return this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Sequence, function (compressor) {
|
||||
var last = this.tail_node();
|
||||
var expr = last.drop_side_effect_free(compressor);
|
||||
if (expr === last)
|
||||
return this;
|
||||
var expressions = this.expressions.slice(0, -1);
|
||||
if (expr)
|
||||
expressions.push(expr);
|
||||
if (!expressions.length) {
|
||||
return make_node(AST_Number, this, { value: 0 });
|
||||
}
|
||||
return make_sequence(this, expressions);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_Expansion, function (compressor, first_in_statement) {
|
||||
return this.expression.drop_side_effect_free(compressor, first_in_statement);
|
||||
});
|
||||
|
||||
def_drop_side_effect_free(AST_TemplateSegment, return_null);
|
||||
|
||||
def_drop_side_effect_free(AST_TemplateString, function (compressor) {
|
||||
var values = trim(this.segments, compressor, first_in_statement);
|
||||
return values && make_sequence(this, values);
|
||||
});
|
462
node_modules/terser/lib/compress/evaluate.js
generated
vendored
Normal file
462
node_modules/terser/lib/compress/evaluate.js
generated
vendored
Normal file
@@ -0,0 +1,462 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import {
|
||||
HOP,
|
||||
makePredicate,
|
||||
return_this,
|
||||
string_template,
|
||||
regexp_source_fix,
|
||||
regexp_is_safe,
|
||||
} from "../utils/index.js";
|
||||
import {
|
||||
AST_Array,
|
||||
AST_BigInt,
|
||||
AST_Binary,
|
||||
AST_Call,
|
||||
AST_Chain,
|
||||
AST_Class,
|
||||
AST_Conditional,
|
||||
AST_Constant,
|
||||
AST_Dot,
|
||||
AST_Expansion,
|
||||
AST_Function,
|
||||
AST_Lambda,
|
||||
AST_New,
|
||||
AST_Node,
|
||||
AST_Object,
|
||||
AST_PropAccess,
|
||||
AST_RegExp,
|
||||
AST_Statement,
|
||||
AST_Symbol,
|
||||
AST_SymbolRef,
|
||||
AST_TemplateString,
|
||||
AST_UnaryPrefix,
|
||||
AST_With,
|
||||
} from "../ast.js";
|
||||
import { is_undeclared_ref} from "./inference.js";
|
||||
import { is_pure_native_value, is_pure_native_fn, is_pure_native_method } from "./native-objects.js";
|
||||
|
||||
// methods to evaluate a constant expression
|
||||
|
||||
function def_eval(node, func) {
|
||||
node.DEFMETHOD("_eval", func);
|
||||
}
|
||||
|
||||
// Used to propagate a nullish short-circuit signal upwards through the chain.
|
||||
export const nullish = Symbol("This AST_Chain is nullish");
|
||||
|
||||
// If the node has been successfully reduced to a constant,
|
||||
// then its value is returned; otherwise the element itself
|
||||
// is returned.
|
||||
// They can be distinguished as constant value is never a
|
||||
// descendant of AST_Node.
|
||||
AST_Node.DEFMETHOD("evaluate", function (compressor) {
|
||||
if (!compressor.option("evaluate"))
|
||||
return this;
|
||||
var val = this._eval(compressor, 1);
|
||||
if (!val || val instanceof RegExp)
|
||||
return val;
|
||||
if (typeof val == "function" || typeof val == "object" || val == nullish)
|
||||
return this;
|
||||
return val;
|
||||
});
|
||||
|
||||
var unaryPrefix = makePredicate("! ~ - + void");
|
||||
AST_Node.DEFMETHOD("is_constant", function () {
|
||||
// Accomodate when compress option evaluate=false
|
||||
// as well as the common constant expressions !0 and -1
|
||||
if (this instanceof AST_Constant) {
|
||||
return !(this instanceof AST_RegExp);
|
||||
} else {
|
||||
return this instanceof AST_UnaryPrefix
|
||||
&& this.expression instanceof AST_Constant
|
||||
&& unaryPrefix.has(this.operator);
|
||||
}
|
||||
});
|
||||
|
||||
def_eval(AST_Statement, function () {
|
||||
throw new Error(string_template("Cannot evaluate a statement [{file}:{line},{col}]", this.start));
|
||||
});
|
||||
|
||||
def_eval(AST_Lambda, return_this);
|
||||
def_eval(AST_Class, return_this);
|
||||
def_eval(AST_Node, return_this);
|
||||
def_eval(AST_Constant, function () {
|
||||
return this.getValue();
|
||||
});
|
||||
|
||||
def_eval(AST_BigInt, return_this);
|
||||
|
||||
def_eval(AST_RegExp, function (compressor) {
|
||||
let evaluated = compressor.evaluated_regexps.get(this.value);
|
||||
if (evaluated === undefined && regexp_is_safe(this.value.source)) {
|
||||
try {
|
||||
const { source, flags } = this.value;
|
||||
evaluated = new RegExp(source, flags);
|
||||
} catch (e) {
|
||||
evaluated = null;
|
||||
}
|
||||
compressor.evaluated_regexps.set(this.value, evaluated);
|
||||
}
|
||||
return evaluated || this;
|
||||
});
|
||||
|
||||
def_eval(AST_TemplateString, function () {
|
||||
if (this.segments.length !== 1) return this;
|
||||
return this.segments[0].value;
|
||||
});
|
||||
|
||||
def_eval(AST_Function, function (compressor) {
|
||||
if (compressor.option("unsafe")) {
|
||||
var fn = function () { };
|
||||
fn.node = this;
|
||||
fn.toString = () => this.print_to_string();
|
||||
return fn;
|
||||
}
|
||||
return this;
|
||||
});
|
||||
|
||||
def_eval(AST_Array, function (compressor, depth) {
|
||||
if (compressor.option("unsafe")) {
|
||||
var elements = [];
|
||||
for (var i = 0, len = this.elements.length; i < len; i++) {
|
||||
var element = this.elements[i];
|
||||
var value = element._eval(compressor, depth);
|
||||
if (element === value)
|
||||
return this;
|
||||
elements.push(value);
|
||||
}
|
||||
return elements;
|
||||
}
|
||||
return this;
|
||||
});
|
||||
|
||||
def_eval(AST_Object, function (compressor, depth) {
|
||||
if (compressor.option("unsafe")) {
|
||||
var val = {};
|
||||
for (var i = 0, len = this.properties.length; i < len; i++) {
|
||||
var prop = this.properties[i];
|
||||
if (prop instanceof AST_Expansion)
|
||||
return this;
|
||||
var key = prop.key;
|
||||
if (key instanceof AST_Symbol) {
|
||||
key = key.name;
|
||||
} else if (key instanceof AST_Node) {
|
||||
key = key._eval(compressor, depth);
|
||||
if (key === prop.key)
|
||||
return this;
|
||||
}
|
||||
if (typeof Object.prototype[key] === "function") {
|
||||
return this;
|
||||
}
|
||||
if (prop.value instanceof AST_Function)
|
||||
continue;
|
||||
val[key] = prop.value._eval(compressor, depth);
|
||||
if (val[key] === prop.value)
|
||||
return this;
|
||||
}
|
||||
return val;
|
||||
}
|
||||
return this;
|
||||
});
|
||||
|
||||
var non_converting_unary = makePredicate("! typeof void");
|
||||
def_eval(AST_UnaryPrefix, function (compressor, depth) {
|
||||
var e = this.expression;
|
||||
// Function would be evaluated to an array and so typeof would
|
||||
// incorrectly return 'object'. Hence making is a special case.
|
||||
if (compressor.option("typeofs")
|
||||
&& this.operator == "typeof"
|
||||
&& (e instanceof AST_Lambda
|
||||
|| e instanceof AST_SymbolRef
|
||||
&& e.fixed_value() instanceof AST_Lambda)) {
|
||||
return typeof function () { };
|
||||
}
|
||||
if (!non_converting_unary.has(this.operator))
|
||||
depth++;
|
||||
e = e._eval(compressor, depth);
|
||||
if (e === this.expression)
|
||||
return this;
|
||||
switch (this.operator) {
|
||||
case "!": return !e;
|
||||
case "typeof":
|
||||
// typeof <RegExp> returns "object" or "function" on different platforms
|
||||
// so cannot evaluate reliably
|
||||
if (e instanceof RegExp)
|
||||
return this;
|
||||
return typeof e;
|
||||
case "void": return void e;
|
||||
case "~": return ~e;
|
||||
case "-": return -e;
|
||||
case "+": return +e;
|
||||
}
|
||||
return this;
|
||||
});
|
||||
|
||||
var non_converting_binary = makePredicate("&& || ?? === !==");
|
||||
const identity_comparison = makePredicate("== != === !==");
|
||||
const has_identity = value => typeof value === "object"
|
||||
|| typeof value === "function"
|
||||
|| typeof value === "symbol";
|
||||
|
||||
def_eval(AST_Binary, function (compressor, depth) {
|
||||
if (!non_converting_binary.has(this.operator))
|
||||
depth++;
|
||||
|
||||
var left = this.left._eval(compressor, depth);
|
||||
if (left === this.left)
|
||||
return this;
|
||||
var right = this.right._eval(compressor, depth);
|
||||
if (right === this.right)
|
||||
return this;
|
||||
var result;
|
||||
|
||||
if (left != null
|
||||
&& right != null
|
||||
&& identity_comparison.has(this.operator)
|
||||
&& has_identity(left)
|
||||
&& has_identity(right)
|
||||
&& typeof left === typeof right) {
|
||||
// Do not compare by reference
|
||||
return this;
|
||||
}
|
||||
|
||||
switch (this.operator) {
|
||||
case "&&": result = left && right; break;
|
||||
case "||": result = left || right; break;
|
||||
case "??": result = left != null ? left : right; break;
|
||||
case "|": result = left | right; break;
|
||||
case "&": result = left & right; break;
|
||||
case "^": result = left ^ right; break;
|
||||
case "+": result = left + right; break;
|
||||
case "*": result = left * right; break;
|
||||
case "**": result = Math.pow(left, right); break;
|
||||
case "/": result = left / right; break;
|
||||
case "%": result = left % right; break;
|
||||
case "-": result = left - right; break;
|
||||
case "<<": result = left << right; break;
|
||||
case ">>": result = left >> right; break;
|
||||
case ">>>": result = left >>> right; break;
|
||||
case "==": result = left == right; break;
|
||||
case "===": result = left === right; break;
|
||||
case "!=": result = left != right; break;
|
||||
case "!==": result = left !== right; break;
|
||||
case "<": result = left < right; break;
|
||||
case "<=": result = left <= right; break;
|
||||
case ">": result = left > right; break;
|
||||
case ">=": result = left >= right; break;
|
||||
default:
|
||||
return this;
|
||||
}
|
||||
if (isNaN(result) && compressor.find_parent(AST_With)) {
|
||||
// leave original expression as is
|
||||
return this;
|
||||
}
|
||||
return result;
|
||||
});
|
||||
|
||||
def_eval(AST_Conditional, function (compressor, depth) {
|
||||
var condition = this.condition._eval(compressor, depth);
|
||||
if (condition === this.condition)
|
||||
return this;
|
||||
var node = condition ? this.consequent : this.alternative;
|
||||
var value = node._eval(compressor, depth);
|
||||
return value === node ? this : value;
|
||||
});
|
||||
|
||||
// Set of AST_SymbolRef which are currently being evaluated.
|
||||
// Avoids infinite recursion of ._eval()
|
||||
const reentrant_ref_eval = new Set();
|
||||
def_eval(AST_SymbolRef, function (compressor, depth) {
|
||||
if (reentrant_ref_eval.has(this))
|
||||
return this;
|
||||
|
||||
var fixed = this.fixed_value();
|
||||
if (!fixed)
|
||||
return this;
|
||||
|
||||
reentrant_ref_eval.add(this);
|
||||
const value = fixed._eval(compressor, depth);
|
||||
reentrant_ref_eval.delete(this);
|
||||
|
||||
if (value === fixed)
|
||||
return this;
|
||||
|
||||
if (value && typeof value == "object") {
|
||||
var escaped = this.definition().escaped;
|
||||
if (escaped && depth > escaped)
|
||||
return this;
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
const global_objs = { Array, Math, Number, Object, String };
|
||||
|
||||
const regexp_flags = new Set([
|
||||
"dotAll",
|
||||
"global",
|
||||
"ignoreCase",
|
||||
"multiline",
|
||||
"sticky",
|
||||
"unicode",
|
||||
]);
|
||||
|
||||
def_eval(AST_PropAccess, function (compressor, depth) {
|
||||
let obj = this.expression._eval(compressor, depth + 1);
|
||||
if (obj === nullish || (this.optional && obj == null)) return nullish;
|
||||
if (compressor.option("unsafe")) {
|
||||
var key = this.property;
|
||||
if (key instanceof AST_Node) {
|
||||
key = key._eval(compressor, depth);
|
||||
if (key === this.property)
|
||||
return this;
|
||||
}
|
||||
var exp = this.expression;
|
||||
if (is_undeclared_ref(exp)) {
|
||||
|
||||
var aa;
|
||||
var first_arg = exp.name === "hasOwnProperty"
|
||||
&& key === "call"
|
||||
&& (aa = compressor.parent() && compressor.parent().args)
|
||||
&& (aa && aa[0]
|
||||
&& aa[0].evaluate(compressor));
|
||||
|
||||
first_arg = first_arg instanceof AST_Dot ? first_arg.expression : first_arg;
|
||||
|
||||
if (first_arg == null || first_arg.thedef && first_arg.thedef.undeclared) {
|
||||
return this.clone();
|
||||
}
|
||||
if (!is_pure_native_value(exp.name, key))
|
||||
return this;
|
||||
obj = global_objs[exp.name];
|
||||
} else {
|
||||
if (obj instanceof RegExp) {
|
||||
if (key == "source") {
|
||||
return regexp_source_fix(obj.source);
|
||||
} else if (key == "flags" || regexp_flags.has(key)) {
|
||||
return obj[key];
|
||||
}
|
||||
}
|
||||
if (!obj || obj === exp || !HOP(obj, key))
|
||||
return this;
|
||||
|
||||
if (typeof obj == "function")
|
||||
switch (key) {
|
||||
case "name":
|
||||
return obj.node.name ? obj.node.name.name : "";
|
||||
case "length":
|
||||
return obj.node.length_property();
|
||||
default:
|
||||
return this;
|
||||
}
|
||||
}
|
||||
return obj[key];
|
||||
}
|
||||
return this;
|
||||
});
|
||||
|
||||
def_eval(AST_Chain, function (compressor, depth) {
|
||||
const evaluated = this.expression._eval(compressor, depth);
|
||||
return evaluated === nullish
|
||||
? undefined
|
||||
: evaluated === this.expression
|
||||
? this
|
||||
: evaluated;
|
||||
});
|
||||
|
||||
def_eval(AST_Call, function (compressor, depth) {
|
||||
var exp = this.expression;
|
||||
|
||||
const callee = exp._eval(compressor, depth);
|
||||
if (callee === nullish || (this.optional && callee == null)) return nullish;
|
||||
|
||||
if (compressor.option("unsafe") && exp instanceof AST_PropAccess) {
|
||||
var key = exp.property;
|
||||
if (key instanceof AST_Node) {
|
||||
key = key._eval(compressor, depth);
|
||||
if (key === exp.property)
|
||||
return this;
|
||||
}
|
||||
var val;
|
||||
var e = exp.expression;
|
||||
if (is_undeclared_ref(e)) {
|
||||
var first_arg = e.name === "hasOwnProperty" &&
|
||||
key === "call" &&
|
||||
(this.args[0] && this.args[0].evaluate(compressor));
|
||||
|
||||
first_arg = first_arg instanceof AST_Dot ? first_arg.expression : first_arg;
|
||||
|
||||
if ((first_arg == null || first_arg.thedef && first_arg.thedef.undeclared)) {
|
||||
return this.clone();
|
||||
}
|
||||
if (!is_pure_native_fn(e.name, key)) return this;
|
||||
val = global_objs[e.name];
|
||||
} else {
|
||||
val = e._eval(compressor, depth + 1);
|
||||
if (val === e || !val)
|
||||
return this;
|
||||
if (!is_pure_native_method(val.constructor.name, key))
|
||||
return this;
|
||||
}
|
||||
var args = [];
|
||||
for (var i = 0, len = this.args.length; i < len; i++) {
|
||||
var arg = this.args[i];
|
||||
var value = arg._eval(compressor, depth);
|
||||
if (arg === value)
|
||||
return this;
|
||||
if (arg instanceof AST_Lambda)
|
||||
return this;
|
||||
args.push(value);
|
||||
}
|
||||
try {
|
||||
return val[key].apply(val, args);
|
||||
} catch (ex) {
|
||||
// We don't really care
|
||||
}
|
||||
}
|
||||
return this;
|
||||
});
|
||||
|
||||
// Also a subclass of AST_Call
|
||||
def_eval(AST_New, return_this);
|
4530
node_modules/terser/lib/compress/index.js
generated
vendored
4530
node_modules/terser/lib/compress/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
948
node_modules/terser/lib/compress/inference.js
generated
vendored
Normal file
948
node_modules/terser/lib/compress/inference.js
generated
vendored
Normal file
@@ -0,0 +1,948 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import {
|
||||
AST_Array,
|
||||
AST_Arrow,
|
||||
AST_Assign,
|
||||
AST_Binary,
|
||||
AST_Block,
|
||||
AST_BlockStatement,
|
||||
AST_Call,
|
||||
AST_Case,
|
||||
AST_Chain,
|
||||
AST_Class,
|
||||
AST_ClassProperty,
|
||||
AST_ConciseMethod,
|
||||
AST_Conditional,
|
||||
AST_Constant,
|
||||
AST_Definitions,
|
||||
AST_Dot,
|
||||
AST_EmptyStatement,
|
||||
AST_Expansion,
|
||||
AST_False,
|
||||
AST_Function,
|
||||
AST_If,
|
||||
AST_Import,
|
||||
AST_Jump,
|
||||
AST_LabeledStatement,
|
||||
AST_Lambda,
|
||||
AST_New,
|
||||
AST_Node,
|
||||
AST_Null,
|
||||
AST_Number,
|
||||
AST_Object,
|
||||
AST_ObjectGetter,
|
||||
AST_ObjectKeyVal,
|
||||
AST_ObjectProperty,
|
||||
AST_ObjectSetter,
|
||||
AST_PropAccess,
|
||||
AST_RegExp,
|
||||
AST_Return,
|
||||
AST_Sequence,
|
||||
AST_SimpleStatement,
|
||||
AST_Statement,
|
||||
AST_String,
|
||||
AST_Sub,
|
||||
AST_Switch,
|
||||
AST_SwitchBranch,
|
||||
AST_SymbolClassProperty,
|
||||
AST_SymbolDeclaration,
|
||||
AST_SymbolRef,
|
||||
AST_TemplateSegment,
|
||||
AST_TemplateString,
|
||||
AST_This,
|
||||
AST_Toplevel,
|
||||
AST_True,
|
||||
AST_Try,
|
||||
AST_Unary,
|
||||
AST_UnaryPostfix,
|
||||
AST_UnaryPrefix,
|
||||
AST_Undefined,
|
||||
AST_VarDef,
|
||||
|
||||
TreeTransformer,
|
||||
walk,
|
||||
walk_abort,
|
||||
|
||||
_PURE
|
||||
} from "../ast.js";
|
||||
import {
|
||||
makePredicate,
|
||||
return_true,
|
||||
return_false,
|
||||
return_null,
|
||||
return_this,
|
||||
make_node,
|
||||
member,
|
||||
noop,
|
||||
has_annotation,
|
||||
HOP
|
||||
} from "../utils/index.js";
|
||||
import { make_node_from_constant, make_sequence, best_of_expression, read_property } from "./common.js";
|
||||
|
||||
import { INLINED, UNDEFINED, has_flag } from "./compressor-flags.js";
|
||||
import { pure_prop_access_globals, is_pure_native_fn, is_pure_native_method } from "./native-objects.js";
|
||||
|
||||
// Functions and methods to infer certain facts about expressions
|
||||
// It's not always possible to be 100% sure about something just by static analysis,
|
||||
// so `true` means yes, and `false` means maybe
|
||||
|
||||
export const is_undeclared_ref = (node) =>
|
||||
node instanceof AST_SymbolRef && node.definition().undeclared;
|
||||
|
||||
export const lazy_op = makePredicate("&& || ??");
|
||||
export const unary_side_effects = makePredicate("delete ++ --");
|
||||
|
||||
// methods to determine whether an expression has a boolean result type
|
||||
(function(def_is_boolean) {
|
||||
const unary_bool = makePredicate("! delete");
|
||||
const binary_bool = makePredicate("in instanceof == != === !== < <= >= >");
|
||||
def_is_boolean(AST_Node, return_false);
|
||||
def_is_boolean(AST_UnaryPrefix, function() {
|
||||
return unary_bool.has(this.operator);
|
||||
});
|
||||
def_is_boolean(AST_Binary, function() {
|
||||
return binary_bool.has(this.operator)
|
||||
|| lazy_op.has(this.operator)
|
||||
&& this.left.is_boolean()
|
||||
&& this.right.is_boolean();
|
||||
});
|
||||
def_is_boolean(AST_Conditional, function() {
|
||||
return this.consequent.is_boolean() && this.alternative.is_boolean();
|
||||
});
|
||||
def_is_boolean(AST_Assign, function() {
|
||||
return this.operator == "=" && this.right.is_boolean();
|
||||
});
|
||||
def_is_boolean(AST_Sequence, function() {
|
||||
return this.tail_node().is_boolean();
|
||||
});
|
||||
def_is_boolean(AST_True, return_true);
|
||||
def_is_boolean(AST_False, return_true);
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("is_boolean", func);
|
||||
});
|
||||
|
||||
// methods to determine if an expression has a numeric result type
|
||||
(function(def_is_number) {
|
||||
def_is_number(AST_Node, return_false);
|
||||
def_is_number(AST_Number, return_true);
|
||||
const unary = makePredicate("+ - ~ ++ --");
|
||||
def_is_number(AST_Unary, function() {
|
||||
return unary.has(this.operator);
|
||||
});
|
||||
const numeric_ops = makePredicate("- * / % & | ^ << >> >>>");
|
||||
def_is_number(AST_Binary, function(compressor) {
|
||||
return numeric_ops.has(this.operator) || this.operator == "+"
|
||||
&& this.left.is_number(compressor)
|
||||
&& this.right.is_number(compressor);
|
||||
});
|
||||
def_is_number(AST_Assign, function(compressor) {
|
||||
return numeric_ops.has(this.operator.slice(0, -1))
|
||||
|| this.operator == "=" && this.right.is_number(compressor);
|
||||
});
|
||||
def_is_number(AST_Sequence, function(compressor) {
|
||||
return this.tail_node().is_number(compressor);
|
||||
});
|
||||
def_is_number(AST_Conditional, function(compressor) {
|
||||
return this.consequent.is_number(compressor) && this.alternative.is_number(compressor);
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("is_number", func);
|
||||
});
|
||||
|
||||
// methods to determine if an expression has a string result type
|
||||
(function(def_is_string) {
|
||||
def_is_string(AST_Node, return_false);
|
||||
def_is_string(AST_String, return_true);
|
||||
def_is_string(AST_TemplateString, return_true);
|
||||
def_is_string(AST_UnaryPrefix, function() {
|
||||
return this.operator == "typeof";
|
||||
});
|
||||
def_is_string(AST_Binary, function(compressor) {
|
||||
return this.operator == "+" &&
|
||||
(this.left.is_string(compressor) || this.right.is_string(compressor));
|
||||
});
|
||||
def_is_string(AST_Assign, function(compressor) {
|
||||
return (this.operator == "=" || this.operator == "+=") && this.right.is_string(compressor);
|
||||
});
|
||||
def_is_string(AST_Sequence, function(compressor) {
|
||||
return this.tail_node().is_string(compressor);
|
||||
});
|
||||
def_is_string(AST_Conditional, function(compressor) {
|
||||
return this.consequent.is_string(compressor) && this.alternative.is_string(compressor);
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("is_string", func);
|
||||
});
|
||||
|
||||
export function is_undefined(node, compressor) {
|
||||
return (
|
||||
has_flag(node, UNDEFINED)
|
||||
|| node instanceof AST_Undefined
|
||||
|| node instanceof AST_UnaryPrefix
|
||||
&& node.operator == "void"
|
||||
&& !node.expression.has_side_effects(compressor)
|
||||
);
|
||||
}
|
||||
|
||||
// Is the node explicitly null or undefined.
|
||||
function is_null_or_undefined(node, compressor) {
|
||||
let fixed;
|
||||
return (
|
||||
node instanceof AST_Null
|
||||
|| is_undefined(node, compressor)
|
||||
|| (
|
||||
node instanceof AST_SymbolRef
|
||||
&& (fixed = node.definition().fixed) instanceof AST_Node
|
||||
&& is_nullish(fixed, compressor)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Find out if this expression is optionally chained from a base-point that we
|
||||
// can statically analyze as null or undefined.
|
||||
export function is_nullish_shortcircuited(node, compressor) {
|
||||
if (node instanceof AST_PropAccess || node instanceof AST_Call) {
|
||||
return (
|
||||
(node.optional && is_null_or_undefined(node.expression, compressor))
|
||||
|| is_nullish_shortcircuited(node.expression, compressor)
|
||||
);
|
||||
}
|
||||
if (node instanceof AST_Chain) return is_nullish_shortcircuited(node.expression, compressor);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Find out if something is == null, or can short circuit into nullish.
|
||||
// Used to optimize ?. and ??
|
||||
export function is_nullish(node, compressor) {
|
||||
if (is_null_or_undefined(node, compressor)) return true;
|
||||
return is_nullish_shortcircuited(node, compressor);
|
||||
}
|
||||
|
||||
// Determine if expression might cause side effects
|
||||
// If there's a possibility that a node may change something when it's executed, this returns true
|
||||
(function(def_has_side_effects) {
|
||||
def_has_side_effects(AST_Node, return_true);
|
||||
|
||||
def_has_side_effects(AST_EmptyStatement, return_false);
|
||||
def_has_side_effects(AST_Constant, return_false);
|
||||
def_has_side_effects(AST_This, return_false);
|
||||
|
||||
function any(list, compressor) {
|
||||
for (var i = list.length; --i >= 0;)
|
||||
if (list[i].has_side_effects(compressor))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
def_has_side_effects(AST_Block, function(compressor) {
|
||||
return any(this.body, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Call, function(compressor) {
|
||||
if (
|
||||
!this.is_callee_pure(compressor)
|
||||
&& (!this.expression.is_call_pure(compressor)
|
||||
|| this.expression.has_side_effects(compressor))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return any(this.args, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Switch, function(compressor) {
|
||||
return this.expression.has_side_effects(compressor)
|
||||
|| any(this.body, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Case, function(compressor) {
|
||||
return this.expression.has_side_effects(compressor)
|
||||
|| any(this.body, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Try, function(compressor) {
|
||||
return any(this.body, compressor)
|
||||
|| this.bcatch && this.bcatch.has_side_effects(compressor)
|
||||
|| this.bfinally && this.bfinally.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_If, function(compressor) {
|
||||
return this.condition.has_side_effects(compressor)
|
||||
|| this.body && this.body.has_side_effects(compressor)
|
||||
|| this.alternative && this.alternative.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_LabeledStatement, function(compressor) {
|
||||
return this.body.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_SimpleStatement, function(compressor) {
|
||||
return this.body.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Lambda, return_false);
|
||||
def_has_side_effects(AST_Class, function (compressor) {
|
||||
if (this.extends && this.extends.has_side_effects(compressor)) {
|
||||
return true;
|
||||
}
|
||||
return any(this.properties, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Binary, function(compressor) {
|
||||
return this.left.has_side_effects(compressor)
|
||||
|| this.right.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Assign, return_true);
|
||||
def_has_side_effects(AST_Conditional, function(compressor) {
|
||||
return this.condition.has_side_effects(compressor)
|
||||
|| this.consequent.has_side_effects(compressor)
|
||||
|| this.alternative.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Unary, function(compressor) {
|
||||
return unary_side_effects.has(this.operator)
|
||||
|| this.expression.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_SymbolRef, function(compressor) {
|
||||
return !this.is_declared(compressor) && !pure_prop_access_globals.has(this.name);
|
||||
});
|
||||
def_has_side_effects(AST_SymbolClassProperty, return_false);
|
||||
def_has_side_effects(AST_SymbolDeclaration, return_false);
|
||||
def_has_side_effects(AST_Object, function(compressor) {
|
||||
return any(this.properties, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_ObjectProperty, function(compressor) {
|
||||
return (
|
||||
this.computed_key() && this.key.has_side_effects(compressor)
|
||||
|| this.value && this.value.has_side_effects(compressor)
|
||||
);
|
||||
});
|
||||
def_has_side_effects(AST_ClassProperty, function(compressor) {
|
||||
return (
|
||||
this.computed_key() && this.key.has_side_effects(compressor)
|
||||
|| this.static && this.value && this.value.has_side_effects(compressor)
|
||||
);
|
||||
});
|
||||
def_has_side_effects(AST_ConciseMethod, function(compressor) {
|
||||
return this.computed_key() && this.key.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_ObjectGetter, function(compressor) {
|
||||
return this.computed_key() && this.key.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_ObjectSetter, function(compressor) {
|
||||
return this.computed_key() && this.key.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Array, function(compressor) {
|
||||
return any(this.elements, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Dot, function(compressor) {
|
||||
if (is_nullish(this, compressor)) return false;
|
||||
return !this.optional && this.expression.may_throw_on_access(compressor)
|
||||
|| this.expression.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Sub, function(compressor) {
|
||||
if (is_nullish(this, compressor)) return false;
|
||||
|
||||
return !this.optional && this.expression.may_throw_on_access(compressor)
|
||||
|| this.expression.has_side_effects(compressor)
|
||||
|| this.property.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Chain, function (compressor) {
|
||||
return this.expression.has_side_effects(compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Sequence, function(compressor) {
|
||||
return any(this.expressions, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_Definitions, function(compressor) {
|
||||
return any(this.definitions, compressor);
|
||||
});
|
||||
def_has_side_effects(AST_VarDef, function() {
|
||||
return this.value;
|
||||
});
|
||||
def_has_side_effects(AST_TemplateSegment, return_false);
|
||||
def_has_side_effects(AST_TemplateString, function(compressor) {
|
||||
return any(this.segments, compressor);
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("has_side_effects", func);
|
||||
});
|
||||
|
||||
// determine if expression may throw
|
||||
(function(def_may_throw) {
|
||||
def_may_throw(AST_Node, return_true);
|
||||
|
||||
def_may_throw(AST_Constant, return_false);
|
||||
def_may_throw(AST_EmptyStatement, return_false);
|
||||
def_may_throw(AST_Lambda, return_false);
|
||||
def_may_throw(AST_SymbolDeclaration, return_false);
|
||||
def_may_throw(AST_This, return_false);
|
||||
|
||||
function any(list, compressor) {
|
||||
for (var i = list.length; --i >= 0;)
|
||||
if (list[i].may_throw(compressor))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
def_may_throw(AST_Class, function(compressor) {
|
||||
if (this.extends && this.extends.may_throw(compressor)) return true;
|
||||
return any(this.properties, compressor);
|
||||
});
|
||||
|
||||
def_may_throw(AST_Array, function(compressor) {
|
||||
return any(this.elements, compressor);
|
||||
});
|
||||
def_may_throw(AST_Assign, function(compressor) {
|
||||
if (this.right.may_throw(compressor)) return true;
|
||||
if (!compressor.has_directive("use strict")
|
||||
&& this.operator == "="
|
||||
&& this.left instanceof AST_SymbolRef) {
|
||||
return false;
|
||||
}
|
||||
return this.left.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Binary, function(compressor) {
|
||||
return this.left.may_throw(compressor)
|
||||
|| this.right.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Block, function(compressor) {
|
||||
return any(this.body, compressor);
|
||||
});
|
||||
def_may_throw(AST_Call, function(compressor) {
|
||||
if (is_nullish(this, compressor)) return false;
|
||||
if (any(this.args, compressor)) return true;
|
||||
if (this.is_callee_pure(compressor)) return false;
|
||||
if (this.expression.may_throw(compressor)) return true;
|
||||
return !(this.expression instanceof AST_Lambda)
|
||||
|| any(this.expression.body, compressor);
|
||||
});
|
||||
def_may_throw(AST_Case, function(compressor) {
|
||||
return this.expression.may_throw(compressor)
|
||||
|| any(this.body, compressor);
|
||||
});
|
||||
def_may_throw(AST_Conditional, function(compressor) {
|
||||
return this.condition.may_throw(compressor)
|
||||
|| this.consequent.may_throw(compressor)
|
||||
|| this.alternative.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Definitions, function(compressor) {
|
||||
return any(this.definitions, compressor);
|
||||
});
|
||||
def_may_throw(AST_If, function(compressor) {
|
||||
return this.condition.may_throw(compressor)
|
||||
|| this.body && this.body.may_throw(compressor)
|
||||
|| this.alternative && this.alternative.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_LabeledStatement, function(compressor) {
|
||||
return this.body.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Object, function(compressor) {
|
||||
return any(this.properties, compressor);
|
||||
});
|
||||
def_may_throw(AST_ObjectProperty, function(compressor) {
|
||||
// TODO key may throw too
|
||||
return this.value ? this.value.may_throw(compressor) : false;
|
||||
});
|
||||
def_may_throw(AST_ClassProperty, function(compressor) {
|
||||
return (
|
||||
this.computed_key() && this.key.may_throw(compressor)
|
||||
|| this.static && this.value && this.value.may_throw(compressor)
|
||||
);
|
||||
});
|
||||
def_may_throw(AST_ConciseMethod, function(compressor) {
|
||||
return this.computed_key() && this.key.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_ObjectGetter, function(compressor) {
|
||||
return this.computed_key() && this.key.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_ObjectSetter, function(compressor) {
|
||||
return this.computed_key() && this.key.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Return, function(compressor) {
|
||||
return this.value && this.value.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Sequence, function(compressor) {
|
||||
return any(this.expressions, compressor);
|
||||
});
|
||||
def_may_throw(AST_SimpleStatement, function(compressor) {
|
||||
return this.body.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Dot, function(compressor) {
|
||||
if (is_nullish(this, compressor)) return false;
|
||||
return !this.optional && this.expression.may_throw_on_access(compressor)
|
||||
|| this.expression.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Sub, function(compressor) {
|
||||
if (is_nullish(this, compressor)) return false;
|
||||
return !this.optional && this.expression.may_throw_on_access(compressor)
|
||||
|| this.expression.may_throw(compressor)
|
||||
|| this.property.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Chain, function(compressor) {
|
||||
return this.expression.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Switch, function(compressor) {
|
||||
return this.expression.may_throw(compressor)
|
||||
|| any(this.body, compressor);
|
||||
});
|
||||
def_may_throw(AST_SymbolRef, function(compressor) {
|
||||
return !this.is_declared(compressor) && !pure_prop_access_globals.has(this.name);
|
||||
});
|
||||
def_may_throw(AST_SymbolClassProperty, return_false);
|
||||
def_may_throw(AST_Try, function(compressor) {
|
||||
return this.bcatch ? this.bcatch.may_throw(compressor) : any(this.body, compressor)
|
||||
|| this.bfinally && this.bfinally.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_Unary, function(compressor) {
|
||||
if (this.operator == "typeof" && this.expression instanceof AST_SymbolRef)
|
||||
return false;
|
||||
return this.expression.may_throw(compressor);
|
||||
});
|
||||
def_may_throw(AST_VarDef, function(compressor) {
|
||||
if (!this.value) return false;
|
||||
return this.value.may_throw(compressor);
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("may_throw", func);
|
||||
});
|
||||
|
||||
// determine if expression is constant
|
||||
(function(def_is_constant_expression) {
|
||||
function all_refs_local(scope) {
|
||||
let result = true;
|
||||
walk(this, node => {
|
||||
if (node instanceof AST_SymbolRef) {
|
||||
if (has_flag(this, INLINED)) {
|
||||
result = false;
|
||||
return walk_abort;
|
||||
}
|
||||
var def = node.definition();
|
||||
if (
|
||||
member(def, this.enclosed)
|
||||
&& !this.variables.has(def.name)
|
||||
) {
|
||||
if (scope) {
|
||||
var scope_def = scope.find_variable(node);
|
||||
if (def.undeclared ? !scope_def : scope_def === def) {
|
||||
result = "f";
|
||||
return true;
|
||||
}
|
||||
}
|
||||
result = false;
|
||||
return walk_abort;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_This && this instanceof AST_Arrow) {
|
||||
// TODO check arguments too!
|
||||
result = false;
|
||||
return walk_abort;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
def_is_constant_expression(AST_Node, return_false);
|
||||
def_is_constant_expression(AST_Constant, return_true);
|
||||
def_is_constant_expression(AST_Class, function(scope) {
|
||||
if (this.extends && !this.extends.is_constant_expression(scope)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const prop of this.properties) {
|
||||
if (prop.computed_key() && !prop.key.is_constant_expression(scope)) {
|
||||
return false;
|
||||
}
|
||||
if (prop.static && prop.value && !prop.value.is_constant_expression(scope)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return all_refs_local.call(this, scope);
|
||||
});
|
||||
def_is_constant_expression(AST_Lambda, all_refs_local);
|
||||
def_is_constant_expression(AST_Unary, function() {
|
||||
return this.expression.is_constant_expression();
|
||||
});
|
||||
def_is_constant_expression(AST_Binary, function() {
|
||||
return this.left.is_constant_expression()
|
||||
&& this.right.is_constant_expression();
|
||||
});
|
||||
def_is_constant_expression(AST_Array, function() {
|
||||
return this.elements.every((l) => l.is_constant_expression());
|
||||
});
|
||||
def_is_constant_expression(AST_Object, function() {
|
||||
return this.properties.every((l) => l.is_constant_expression());
|
||||
});
|
||||
def_is_constant_expression(AST_ObjectProperty, function() {
|
||||
return !!(!(this.key instanceof AST_Node) && this.value && this.value.is_constant_expression());
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("is_constant_expression", func);
|
||||
});
|
||||
|
||||
|
||||
// may_throw_on_access()
|
||||
// returns true if this node may be null, undefined or contain `AST_Accessor`
|
||||
(function(def_may_throw_on_access) {
|
||||
AST_Node.DEFMETHOD("may_throw_on_access", function(compressor) {
|
||||
return !compressor.option("pure_getters")
|
||||
|| this._dot_throw(compressor);
|
||||
});
|
||||
|
||||
function is_strict(compressor) {
|
||||
return /strict/.test(compressor.option("pure_getters"));
|
||||
}
|
||||
|
||||
def_may_throw_on_access(AST_Node, is_strict);
|
||||
def_may_throw_on_access(AST_Null, return_true);
|
||||
def_may_throw_on_access(AST_Undefined, return_true);
|
||||
def_may_throw_on_access(AST_Constant, return_false);
|
||||
def_may_throw_on_access(AST_Array, return_false);
|
||||
def_may_throw_on_access(AST_Object, function(compressor) {
|
||||
if (!is_strict(compressor)) return false;
|
||||
for (var i = this.properties.length; --i >=0;)
|
||||
if (this.properties[i]._dot_throw(compressor)) return true;
|
||||
return false;
|
||||
});
|
||||
// Do not be as strict with classes as we are with objects.
|
||||
// Hopefully the community is not going to abuse static getters and setters.
|
||||
// https://github.com/terser/terser/issues/724#issuecomment-643655656
|
||||
def_may_throw_on_access(AST_Class, return_false);
|
||||
def_may_throw_on_access(AST_ObjectProperty, return_false);
|
||||
def_may_throw_on_access(AST_ObjectGetter, return_true);
|
||||
def_may_throw_on_access(AST_Expansion, function(compressor) {
|
||||
return this.expression._dot_throw(compressor);
|
||||
});
|
||||
def_may_throw_on_access(AST_Function, return_false);
|
||||
def_may_throw_on_access(AST_Arrow, return_false);
|
||||
def_may_throw_on_access(AST_UnaryPostfix, return_false);
|
||||
def_may_throw_on_access(AST_UnaryPrefix, function() {
|
||||
return this.operator == "void";
|
||||
});
|
||||
def_may_throw_on_access(AST_Binary, function(compressor) {
|
||||
return (this.operator == "&&" || this.operator == "||" || this.operator == "??")
|
||||
&& (this.left._dot_throw(compressor) || this.right._dot_throw(compressor));
|
||||
});
|
||||
def_may_throw_on_access(AST_Assign, function(compressor) {
|
||||
if (this.logical) return true;
|
||||
|
||||
return this.operator == "="
|
||||
&& this.right._dot_throw(compressor);
|
||||
});
|
||||
def_may_throw_on_access(AST_Conditional, function(compressor) {
|
||||
return this.consequent._dot_throw(compressor)
|
||||
|| this.alternative._dot_throw(compressor);
|
||||
});
|
||||
def_may_throw_on_access(AST_Dot, function(compressor) {
|
||||
if (!is_strict(compressor)) return false;
|
||||
|
||||
if (this.property == "prototype") {
|
||||
return !(
|
||||
this.expression instanceof AST_Function
|
||||
|| this.expression instanceof AST_Class
|
||||
);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
def_may_throw_on_access(AST_Chain, function(compressor) {
|
||||
return this.expression._dot_throw(compressor);
|
||||
});
|
||||
def_may_throw_on_access(AST_Sequence, function(compressor) {
|
||||
return this.tail_node()._dot_throw(compressor);
|
||||
});
|
||||
def_may_throw_on_access(AST_SymbolRef, function(compressor) {
|
||||
if (this.name === "arguments") return false;
|
||||
if (has_flag(this, UNDEFINED)) return true;
|
||||
if (!is_strict(compressor)) return false;
|
||||
if (is_undeclared_ref(this) && this.is_declared(compressor)) return false;
|
||||
if (this.is_immutable()) return false;
|
||||
var fixed = this.fixed_value();
|
||||
return !fixed || fixed._dot_throw(compressor);
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("_dot_throw", func);
|
||||
});
|
||||
|
||||
export function is_lhs(node, parent) {
|
||||
if (parent instanceof AST_Unary && unary_side_effects.has(parent.operator)) return parent.expression;
|
||||
if (parent instanceof AST_Assign && parent.left === node) return node;
|
||||
}
|
||||
|
||||
(function(def_find_defs) {
|
||||
function to_node(value, orig) {
|
||||
if (value instanceof AST_Node) {
|
||||
if (!(value instanceof AST_Constant)) {
|
||||
// Value may be a function, an array including functions and even a complex assign / block expression,
|
||||
// so it should never be shared in different places.
|
||||
// Otherwise wrong information may be used in the compression phase
|
||||
value = value.clone(true);
|
||||
}
|
||||
return make_node(value.CTOR, orig, value);
|
||||
}
|
||||
if (Array.isArray(value)) return make_node(AST_Array, orig, {
|
||||
elements: value.map(function(value) {
|
||||
return to_node(value, orig);
|
||||
})
|
||||
});
|
||||
if (value && typeof value == "object") {
|
||||
var props = [];
|
||||
for (var key in value) if (HOP(value, key)) {
|
||||
props.push(make_node(AST_ObjectKeyVal, orig, {
|
||||
key: key,
|
||||
value: to_node(value[key], orig)
|
||||
}));
|
||||
}
|
||||
return make_node(AST_Object, orig, {
|
||||
properties: props
|
||||
});
|
||||
}
|
||||
return make_node_from_constant(value, orig);
|
||||
}
|
||||
|
||||
AST_Toplevel.DEFMETHOD("resolve_defines", function(compressor) {
|
||||
if (!compressor.option("global_defs")) return this;
|
||||
this.figure_out_scope({ ie8: compressor.option("ie8") });
|
||||
return this.transform(new TreeTransformer(function(node) {
|
||||
var def = node._find_defs(compressor, "");
|
||||
if (!def) return;
|
||||
var level = 0, child = node, parent;
|
||||
while (parent = this.parent(level++)) {
|
||||
if (!(parent instanceof AST_PropAccess)) break;
|
||||
if (parent.expression !== child) break;
|
||||
child = parent;
|
||||
}
|
||||
if (is_lhs(child, parent)) {
|
||||
return;
|
||||
}
|
||||
return def;
|
||||
}));
|
||||
});
|
||||
def_find_defs(AST_Node, noop);
|
||||
def_find_defs(AST_Chain, function(compressor, suffix) {
|
||||
return this.expression._find_defs(compressor, suffix);
|
||||
});
|
||||
def_find_defs(AST_Dot, function(compressor, suffix) {
|
||||
return this.expression._find_defs(compressor, "." + this.property + suffix);
|
||||
});
|
||||
def_find_defs(AST_SymbolDeclaration, function() {
|
||||
if (!this.global()) return;
|
||||
});
|
||||
def_find_defs(AST_SymbolRef, function(compressor, suffix) {
|
||||
if (!this.global()) return;
|
||||
var defines = compressor.option("global_defs");
|
||||
var name = this.name + suffix;
|
||||
if (HOP(defines, name)) return to_node(defines[name], this);
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("_find_defs", func);
|
||||
});
|
||||
|
||||
// method to negate an expression
|
||||
(function(def_negate) {
|
||||
function basic_negation(exp) {
|
||||
return make_node(AST_UnaryPrefix, exp, {
|
||||
operator: "!",
|
||||
expression: exp
|
||||
});
|
||||
}
|
||||
function best(orig, alt, first_in_statement) {
|
||||
var negated = basic_negation(orig);
|
||||
if (first_in_statement) {
|
||||
var stat = make_node(AST_SimpleStatement, alt, {
|
||||
body: alt
|
||||
});
|
||||
return best_of_expression(negated, stat) === stat ? alt : negated;
|
||||
}
|
||||
return best_of_expression(negated, alt);
|
||||
}
|
||||
def_negate(AST_Node, function() {
|
||||
return basic_negation(this);
|
||||
});
|
||||
def_negate(AST_Statement, function() {
|
||||
throw new Error("Cannot negate a statement");
|
||||
});
|
||||
def_negate(AST_Function, function() {
|
||||
return basic_negation(this);
|
||||
});
|
||||
def_negate(AST_Arrow, function() {
|
||||
return basic_negation(this);
|
||||
});
|
||||
def_negate(AST_UnaryPrefix, function() {
|
||||
if (this.operator == "!")
|
||||
return this.expression;
|
||||
return basic_negation(this);
|
||||
});
|
||||
def_negate(AST_Sequence, function(compressor) {
|
||||
var expressions = this.expressions.slice();
|
||||
expressions.push(expressions.pop().negate(compressor));
|
||||
return make_sequence(this, expressions);
|
||||
});
|
||||
def_negate(AST_Conditional, function(compressor, first_in_statement) {
|
||||
var self = this.clone();
|
||||
self.consequent = self.consequent.negate(compressor);
|
||||
self.alternative = self.alternative.negate(compressor);
|
||||
return best(this, self, first_in_statement);
|
||||
});
|
||||
def_negate(AST_Binary, function(compressor, first_in_statement) {
|
||||
var self = this.clone(), op = this.operator;
|
||||
if (compressor.option("unsafe_comps")) {
|
||||
switch (op) {
|
||||
case "<=" : self.operator = ">" ; return self;
|
||||
case "<" : self.operator = ">=" ; return self;
|
||||
case ">=" : self.operator = "<" ; return self;
|
||||
case ">" : self.operator = "<=" ; return self;
|
||||
}
|
||||
}
|
||||
switch (op) {
|
||||
case "==" : self.operator = "!="; return self;
|
||||
case "!=" : self.operator = "=="; return self;
|
||||
case "===": self.operator = "!=="; return self;
|
||||
case "!==": self.operator = "==="; return self;
|
||||
case "&&":
|
||||
self.operator = "||";
|
||||
self.left = self.left.negate(compressor, first_in_statement);
|
||||
self.right = self.right.negate(compressor);
|
||||
return best(this, self, first_in_statement);
|
||||
case "||":
|
||||
self.operator = "&&";
|
||||
self.left = self.left.negate(compressor, first_in_statement);
|
||||
self.right = self.right.negate(compressor);
|
||||
return best(this, self, first_in_statement);
|
||||
}
|
||||
return basic_negation(this);
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("negate", function(compressor, first_in_statement) {
|
||||
return func.call(this, compressor, first_in_statement);
|
||||
});
|
||||
});
|
||||
|
||||
// Is the callee of this function pure?
|
||||
var global_pure_fns = makePredicate("Boolean decodeURI decodeURIComponent Date encodeURI encodeURIComponent Error escape EvalError isFinite isNaN Number Object parseFloat parseInt RangeError ReferenceError String SyntaxError TypeError unescape URIError");
|
||||
AST_Call.DEFMETHOD("is_callee_pure", function(compressor) {
|
||||
if (compressor.option("unsafe")) {
|
||||
var expr = this.expression;
|
||||
var first_arg = (this.args && this.args[0] && this.args[0].evaluate(compressor));
|
||||
if (
|
||||
expr.expression && expr.expression.name === "hasOwnProperty" &&
|
||||
(first_arg == null || first_arg.thedef && first_arg.thedef.undeclared)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (is_undeclared_ref(expr) && global_pure_fns.has(expr.name)) return true;
|
||||
if (
|
||||
expr instanceof AST_Dot
|
||||
&& is_undeclared_ref(expr.expression)
|
||||
&& is_pure_native_fn(expr.expression.name, expr.property)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return !!has_annotation(this, _PURE) || !compressor.pure_funcs(this);
|
||||
});
|
||||
|
||||
// If I call this, is it a pure function?
|
||||
AST_Node.DEFMETHOD("is_call_pure", return_false);
|
||||
AST_Dot.DEFMETHOD("is_call_pure", function(compressor) {
|
||||
if (!compressor.option("unsafe")) return;
|
||||
const expr = this.expression;
|
||||
|
||||
let native_obj;
|
||||
if (expr instanceof AST_Array) {
|
||||
native_obj = "Array";
|
||||
} else if (expr.is_boolean()) {
|
||||
native_obj = "Boolean";
|
||||
} else if (expr.is_number(compressor)) {
|
||||
native_obj = "Number";
|
||||
} else if (expr instanceof AST_RegExp) {
|
||||
native_obj = "RegExp";
|
||||
} else if (expr.is_string(compressor)) {
|
||||
native_obj = "String";
|
||||
} else if (!this.may_throw_on_access(compressor)) {
|
||||
native_obj = "Object";
|
||||
}
|
||||
return native_obj != null && is_pure_native_method(native_obj, this.property);
|
||||
});
|
||||
|
||||
// tell me if a statement aborts
|
||||
export const aborts = (thing) => thing && thing.aborts();
|
||||
|
||||
(function(def_aborts) {
|
||||
def_aborts(AST_Statement, return_null);
|
||||
def_aborts(AST_Jump, return_this);
|
||||
function block_aborts() {
|
||||
for (var i = 0; i < this.body.length; i++) {
|
||||
if (aborts(this.body[i])) {
|
||||
return this.body[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
def_aborts(AST_Import, function() { return null; });
|
||||
def_aborts(AST_BlockStatement, block_aborts);
|
||||
def_aborts(AST_SwitchBranch, block_aborts);
|
||||
def_aborts(AST_If, function() {
|
||||
return this.alternative && aborts(this.body) && aborts(this.alternative) && this;
|
||||
});
|
||||
})(function(node, func) {
|
||||
node.DEFMETHOD("aborts", func);
|
||||
});
|
||||
|
||||
export function is_modified(compressor, tw, node, value, level, immutable) {
|
||||
var parent = tw.parent(level);
|
||||
var lhs = is_lhs(node, parent);
|
||||
if (lhs) return lhs;
|
||||
if (!immutable
|
||||
&& parent instanceof AST_Call
|
||||
&& parent.expression === node
|
||||
&& !(value instanceof AST_Arrow)
|
||||
&& !(value instanceof AST_Class)
|
||||
&& !parent.is_callee_pure(compressor)
|
||||
&& (!(value instanceof AST_Function)
|
||||
|| !(parent instanceof AST_New) && value.contains_this())) {
|
||||
return true;
|
||||
}
|
||||
if (parent instanceof AST_Array) {
|
||||
return is_modified(compressor, tw, parent, parent, level + 1);
|
||||
}
|
||||
if (parent instanceof AST_ObjectKeyVal && node === parent.value) {
|
||||
var obj = tw.parent(level + 1);
|
||||
return is_modified(compressor, tw, obj, obj, level + 2);
|
||||
}
|
||||
if (parent instanceof AST_PropAccess && parent.expression === node) {
|
||||
var prop = read_property(value, parent.property);
|
||||
return !immutable && is_modified(compressor, tw, parent, prop, level + 1);
|
||||
}
|
||||
}
|
641
node_modules/terser/lib/compress/inline.js
generated
vendored
Normal file
641
node_modules/terser/lib/compress/inline.js
generated
vendored
Normal file
@@ -0,0 +1,641 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import {
|
||||
AST_Array,
|
||||
AST_Assign,
|
||||
AST_Block,
|
||||
AST_Call,
|
||||
AST_Catch,
|
||||
AST_Class,
|
||||
AST_ClassExpression,
|
||||
AST_DefaultAssign,
|
||||
AST_DefClass,
|
||||
AST_Defun,
|
||||
AST_Destructuring,
|
||||
AST_EmptyStatement,
|
||||
AST_Expansion,
|
||||
AST_Export,
|
||||
AST_Function,
|
||||
AST_Infinity,
|
||||
AST_IterationStatement,
|
||||
AST_Lambda,
|
||||
AST_NaN,
|
||||
AST_Node,
|
||||
AST_Number,
|
||||
AST_Object,
|
||||
AST_ObjectKeyVal,
|
||||
AST_PropAccess,
|
||||
AST_Return,
|
||||
AST_Scope,
|
||||
AST_SimpleStatement,
|
||||
AST_Statement,
|
||||
AST_SymbolDefun,
|
||||
AST_SymbolFunarg,
|
||||
AST_SymbolLambda,
|
||||
AST_SymbolRef,
|
||||
AST_SymbolVar,
|
||||
AST_This,
|
||||
AST_Toplevel,
|
||||
AST_UnaryPrefix,
|
||||
AST_Undefined,
|
||||
AST_Var,
|
||||
AST_VarDef,
|
||||
AST_With,
|
||||
|
||||
walk,
|
||||
|
||||
_INLINE,
|
||||
_NOINLINE,
|
||||
_PURE
|
||||
} from "../ast.js";
|
||||
import { make_node, has_annotation } from "../utils/index.js";
|
||||
import "../size.js";
|
||||
|
||||
import "./evaluate.js";
|
||||
import "./drop-side-effect-free.js";
|
||||
import "./reduce-vars.js";
|
||||
import { is_undeclared_ref, is_lhs } from "./inference.js";
|
||||
import {
|
||||
SQUEEZED,
|
||||
INLINED,
|
||||
UNUSED,
|
||||
|
||||
has_flag,
|
||||
set_flag,
|
||||
} from "./compressor-flags.js";
|
||||
import {
|
||||
make_sequence,
|
||||
best_of,
|
||||
make_node_from_constant,
|
||||
identifier_atom,
|
||||
is_empty,
|
||||
is_func_expr,
|
||||
is_iife_call,
|
||||
is_reachable,
|
||||
is_recursive_ref,
|
||||
retain_top_func,
|
||||
} from "./common.js";
|
||||
|
||||
|
||||
function within_array_or_object_literal(compressor) {
|
||||
var node, level = 0;
|
||||
while (node = compressor.parent(level++)) {
|
||||
if (node instanceof AST_Statement) return false;
|
||||
if (node instanceof AST_Array
|
||||
|| node instanceof AST_ObjectKeyVal
|
||||
|| node instanceof AST_Object) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function scope_encloses_variables_in_this_scope(scope, pulled_scope) {
|
||||
for (const enclosed of pulled_scope.enclosed) {
|
||||
if (pulled_scope.variables.has(enclosed.name)) {
|
||||
continue;
|
||||
}
|
||||
const looked_up = scope.find_variable(enclosed.name);
|
||||
if (looked_up) {
|
||||
if (looked_up === enclosed) continue;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function inline_into_symbolref(self, compressor) {
|
||||
if (
|
||||
!compressor.option("ie8")
|
||||
&& is_undeclared_ref(self)
|
||||
&& !compressor.find_parent(AST_With)
|
||||
) {
|
||||
switch (self.name) {
|
||||
case "undefined":
|
||||
return make_node(AST_Undefined, self).optimize(compressor);
|
||||
case "NaN":
|
||||
return make_node(AST_NaN, self).optimize(compressor);
|
||||
case "Infinity":
|
||||
return make_node(AST_Infinity, self).optimize(compressor);
|
||||
}
|
||||
}
|
||||
|
||||
const parent = compressor.parent();
|
||||
if (compressor.option("reduce_vars") && is_lhs(self, parent) !== self) {
|
||||
const def = self.definition();
|
||||
const nearest_scope = compressor.find_scope();
|
||||
if (compressor.top_retain && def.global && compressor.top_retain(def)) {
|
||||
def.fixed = false;
|
||||
def.single_use = false;
|
||||
return self;
|
||||
}
|
||||
|
||||
let fixed = self.fixed_value();
|
||||
let single_use = def.single_use
|
||||
&& !(parent instanceof AST_Call
|
||||
&& (parent.is_callee_pure(compressor))
|
||||
|| has_annotation(parent, _NOINLINE))
|
||||
&& !(parent instanceof AST_Export
|
||||
&& fixed instanceof AST_Lambda
|
||||
&& fixed.name);
|
||||
|
||||
if (single_use && fixed instanceof AST_Node) {
|
||||
single_use =
|
||||
!fixed.has_side_effects(compressor)
|
||||
&& !fixed.may_throw(compressor);
|
||||
}
|
||||
|
||||
if (single_use && (fixed instanceof AST_Lambda || fixed instanceof AST_Class)) {
|
||||
if (retain_top_func(fixed, compressor)) {
|
||||
single_use = false;
|
||||
} else if (def.scope !== self.scope
|
||||
&& (def.escaped == 1
|
||||
|| has_flag(fixed, INLINED)
|
||||
|| within_array_or_object_literal(compressor)
|
||||
|| !compressor.option("reduce_funcs"))) {
|
||||
single_use = false;
|
||||
} else if (is_recursive_ref(compressor, def)) {
|
||||
single_use = false;
|
||||
} else if (def.scope !== self.scope || def.orig[0] instanceof AST_SymbolFunarg) {
|
||||
single_use = fixed.is_constant_expression(self.scope);
|
||||
if (single_use == "f") {
|
||||
var scope = self.scope;
|
||||
do {
|
||||
if (scope instanceof AST_Defun || is_func_expr(scope)) {
|
||||
set_flag(scope, INLINED);
|
||||
}
|
||||
} while (scope = scope.parent_scope);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (single_use && fixed instanceof AST_Lambda) {
|
||||
single_use =
|
||||
def.scope === self.scope
|
||||
&& !scope_encloses_variables_in_this_scope(nearest_scope, fixed)
|
||||
|| parent instanceof AST_Call
|
||||
&& parent.expression === self
|
||||
&& !scope_encloses_variables_in_this_scope(nearest_scope, fixed)
|
||||
&& !(fixed.name && fixed.name.definition().recursive_refs > 0);
|
||||
}
|
||||
|
||||
if (single_use && fixed) {
|
||||
if (fixed instanceof AST_DefClass) {
|
||||
set_flag(fixed, SQUEEZED);
|
||||
fixed = make_node(AST_ClassExpression, fixed, fixed);
|
||||
}
|
||||
if (fixed instanceof AST_Defun) {
|
||||
set_flag(fixed, SQUEEZED);
|
||||
fixed = make_node(AST_Function, fixed, fixed);
|
||||
}
|
||||
if (def.recursive_refs > 0 && fixed.name instanceof AST_SymbolDefun) {
|
||||
const defun_def = fixed.name.definition();
|
||||
let lambda_def = fixed.variables.get(fixed.name.name);
|
||||
let name = lambda_def && lambda_def.orig[0];
|
||||
if (!(name instanceof AST_SymbolLambda)) {
|
||||
name = make_node(AST_SymbolLambda, fixed.name, fixed.name);
|
||||
name.scope = fixed;
|
||||
fixed.name = name;
|
||||
lambda_def = fixed.def_function(name);
|
||||
}
|
||||
walk(fixed, node => {
|
||||
if (node instanceof AST_SymbolRef && node.definition() === defun_def) {
|
||||
node.thedef = lambda_def;
|
||||
lambda_def.references.push(node);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (
|
||||
(fixed instanceof AST_Lambda || fixed instanceof AST_Class)
|
||||
&& fixed.parent_scope !== nearest_scope
|
||||
) {
|
||||
fixed = fixed.clone(true, compressor.get_toplevel());
|
||||
|
||||
nearest_scope.add_child_scope(fixed);
|
||||
}
|
||||
return fixed.optimize(compressor);
|
||||
}
|
||||
|
||||
// multiple uses
|
||||
if (fixed) {
|
||||
let replace;
|
||||
|
||||
if (fixed instanceof AST_This) {
|
||||
if (!(def.orig[0] instanceof AST_SymbolFunarg)
|
||||
&& def.references.every((ref) =>
|
||||
def.scope === ref.scope
|
||||
)) {
|
||||
replace = fixed;
|
||||
}
|
||||
} else {
|
||||
var ev = fixed.evaluate(compressor);
|
||||
if (
|
||||
ev !== fixed
|
||||
&& (compressor.option("unsafe_regexp") || !(ev instanceof RegExp))
|
||||
) {
|
||||
replace = make_node_from_constant(ev, fixed);
|
||||
}
|
||||
}
|
||||
|
||||
if (replace) {
|
||||
const name_length = self.size(compressor);
|
||||
const replace_size = replace.size(compressor);
|
||||
|
||||
let overhead = 0;
|
||||
if (compressor.option("unused") && !compressor.exposed(def)) {
|
||||
overhead =
|
||||
(name_length + 2 + replace_size) /
|
||||
(def.references.length - def.assignments);
|
||||
}
|
||||
|
||||
if (replace_size <= name_length + overhead) {
|
||||
return replace;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
export function inline_into_call(self, fn, compressor) {
|
||||
var exp = self.expression;
|
||||
var simple_args = self.args.every((arg) => !(arg instanceof AST_Expansion));
|
||||
|
||||
if (compressor.option("reduce_vars")
|
||||
&& fn instanceof AST_SymbolRef
|
||||
&& !has_annotation(self, _NOINLINE)
|
||||
) {
|
||||
const fixed = fn.fixed_value();
|
||||
if (!retain_top_func(fixed, compressor)) {
|
||||
fn = fixed;
|
||||
}
|
||||
}
|
||||
|
||||
var is_func = fn instanceof AST_Lambda;
|
||||
|
||||
var stat = is_func && fn.body[0];
|
||||
var is_regular_func = is_func && !fn.is_generator && !fn.async;
|
||||
var can_inline = is_regular_func && compressor.option("inline") && !self.is_callee_pure(compressor);
|
||||
if (can_inline && stat instanceof AST_Return) {
|
||||
let returned = stat.value;
|
||||
if (!returned || returned.is_constant_expression()) {
|
||||
if (returned) {
|
||||
returned = returned.clone(true);
|
||||
} else {
|
||||
returned = make_node(AST_Undefined, self);
|
||||
}
|
||||
const args = self.args.concat(returned);
|
||||
return make_sequence(self, args).optimize(compressor);
|
||||
}
|
||||
|
||||
// optimize identity function
|
||||
if (
|
||||
fn.argnames.length === 1
|
||||
&& (fn.argnames[0] instanceof AST_SymbolFunarg)
|
||||
&& self.args.length < 2
|
||||
&& returned instanceof AST_SymbolRef
|
||||
&& returned.name === fn.argnames[0].name
|
||||
) {
|
||||
const replacement =
|
||||
(self.args[0] || make_node(AST_Undefined)).optimize(compressor);
|
||||
|
||||
let parent;
|
||||
if (
|
||||
replacement instanceof AST_PropAccess
|
||||
&& (parent = compressor.parent()) instanceof AST_Call
|
||||
&& parent.expression === self
|
||||
) {
|
||||
// identity function was being used to remove `this`, like in
|
||||
//
|
||||
// id(bag.no_this)(...)
|
||||
//
|
||||
// Replace with a larger but more effish (0, bag.no_this) wrapper.
|
||||
|
||||
return make_sequence(self, [
|
||||
make_node(AST_Number, self, { value: 0 }),
|
||||
replacement
|
||||
]);
|
||||
}
|
||||
// replace call with first argument or undefined if none passed
|
||||
return replacement;
|
||||
}
|
||||
}
|
||||
|
||||
if (can_inline) {
|
||||
var scope, in_loop, level = -1;
|
||||
let def;
|
||||
let returned_value;
|
||||
let nearest_scope;
|
||||
if (simple_args
|
||||
&& !fn.uses_arguments
|
||||
&& !(compressor.parent() instanceof AST_Class)
|
||||
&& !(fn.name && fn instanceof AST_Function)
|
||||
&& (returned_value = can_flatten_body(stat))
|
||||
&& (exp === fn
|
||||
|| has_annotation(self, _INLINE)
|
||||
|| compressor.option("unused")
|
||||
&& (def = exp.definition()).references.length == 1
|
||||
&& !is_recursive_ref(compressor, def)
|
||||
&& fn.is_constant_expression(exp.scope))
|
||||
&& !has_annotation(self, _PURE | _NOINLINE)
|
||||
&& !fn.contains_this()
|
||||
&& can_inject_symbols()
|
||||
&& (nearest_scope = compressor.find_scope())
|
||||
&& !scope_encloses_variables_in_this_scope(nearest_scope, fn)
|
||||
&& !(function in_default_assign() {
|
||||
// Due to the fact function parameters have their own scope
|
||||
// which can't use `var something` in the function body within,
|
||||
// we simply don't inline into DefaultAssign.
|
||||
let i = 0;
|
||||
let p;
|
||||
while ((p = compressor.parent(i++))) {
|
||||
if (p instanceof AST_DefaultAssign) return true;
|
||||
if (p instanceof AST_Block) break;
|
||||
}
|
||||
return false;
|
||||
})()
|
||||
&& !(scope instanceof AST_Class)
|
||||
) {
|
||||
set_flag(fn, SQUEEZED);
|
||||
nearest_scope.add_child_scope(fn);
|
||||
return make_sequence(self, flatten_fn(returned_value)).optimize(compressor);
|
||||
}
|
||||
}
|
||||
|
||||
if (can_inline && has_annotation(self, _INLINE)) {
|
||||
set_flag(fn, SQUEEZED);
|
||||
fn = make_node(fn.CTOR === AST_Defun ? AST_Function : fn.CTOR, fn, fn);
|
||||
fn = fn.clone(true);
|
||||
fn.figure_out_scope({}, {
|
||||
parent_scope: compressor.find_scope(),
|
||||
toplevel: compressor.get_toplevel()
|
||||
});
|
||||
|
||||
return make_node(AST_Call, self, {
|
||||
expression: fn,
|
||||
args: self.args,
|
||||
}).optimize(compressor);
|
||||
}
|
||||
|
||||
const can_drop_this_call = is_regular_func && compressor.option("side_effects") && fn.body.every(is_empty);
|
||||
if (can_drop_this_call) {
|
||||
var args = self.args.concat(make_node(AST_Undefined, self));
|
||||
return make_sequence(self, args).optimize(compressor);
|
||||
}
|
||||
|
||||
if (compressor.option("negate_iife")
|
||||
&& compressor.parent() instanceof AST_SimpleStatement
|
||||
&& is_iife_call(self)) {
|
||||
return self.negate(compressor, true);
|
||||
}
|
||||
|
||||
var ev = self.evaluate(compressor);
|
||||
if (ev !== self) {
|
||||
ev = make_node_from_constant(ev, self).optimize(compressor);
|
||||
return best_of(compressor, ev, self);
|
||||
}
|
||||
|
||||
return self;
|
||||
|
||||
function return_value(stat) {
|
||||
if (!stat) return make_node(AST_Undefined, self);
|
||||
if (stat instanceof AST_Return) {
|
||||
if (!stat.value) return make_node(AST_Undefined, self);
|
||||
return stat.value.clone(true);
|
||||
}
|
||||
if (stat instanceof AST_SimpleStatement) {
|
||||
return make_node(AST_UnaryPrefix, stat, {
|
||||
operator: "void",
|
||||
expression: stat.body.clone(true)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function can_flatten_body(stat) {
|
||||
var body = fn.body;
|
||||
var len = body.length;
|
||||
if (compressor.option("inline") < 3) {
|
||||
return len == 1 && return_value(stat);
|
||||
}
|
||||
stat = null;
|
||||
for (var i = 0; i < len; i++) {
|
||||
var line = body[i];
|
||||
if (line instanceof AST_Var) {
|
||||
if (stat && !line.definitions.every((var_def) =>
|
||||
!var_def.value
|
||||
)) {
|
||||
return false;
|
||||
}
|
||||
} else if (stat) {
|
||||
return false;
|
||||
} else if (!(line instanceof AST_EmptyStatement)) {
|
||||
stat = line;
|
||||
}
|
||||
}
|
||||
return return_value(stat);
|
||||
}
|
||||
|
||||
function can_inject_args(block_scoped, safe_to_inject) {
|
||||
for (var i = 0, len = fn.argnames.length; i < len; i++) {
|
||||
var arg = fn.argnames[i];
|
||||
if (arg instanceof AST_DefaultAssign) {
|
||||
if (has_flag(arg.left, UNUSED)) continue;
|
||||
return false;
|
||||
}
|
||||
if (arg instanceof AST_Destructuring) return false;
|
||||
if (arg instanceof AST_Expansion) {
|
||||
if (has_flag(arg.expression, UNUSED)) continue;
|
||||
return false;
|
||||
}
|
||||
if (has_flag(arg, UNUSED)) continue;
|
||||
if (!safe_to_inject
|
||||
|| block_scoped.has(arg.name)
|
||||
|| identifier_atom.has(arg.name)
|
||||
|| scope.conflicting_def(arg.name)) {
|
||||
return false;
|
||||
}
|
||||
if (in_loop) in_loop.push(arg.definition());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function can_inject_vars(block_scoped, safe_to_inject) {
|
||||
var len = fn.body.length;
|
||||
for (var i = 0; i < len; i++) {
|
||||
var stat = fn.body[i];
|
||||
if (!(stat instanceof AST_Var)) continue;
|
||||
if (!safe_to_inject) return false;
|
||||
for (var j = stat.definitions.length; --j >= 0;) {
|
||||
var name = stat.definitions[j].name;
|
||||
if (name instanceof AST_Destructuring
|
||||
|| block_scoped.has(name.name)
|
||||
|| identifier_atom.has(name.name)
|
||||
|| scope.conflicting_def(name.name)) {
|
||||
return false;
|
||||
}
|
||||
if (in_loop) in_loop.push(name.definition());
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function can_inject_symbols() {
|
||||
var block_scoped = new Set();
|
||||
do {
|
||||
scope = compressor.parent(++level);
|
||||
if (scope.is_block_scope() && scope.block_scope) {
|
||||
// TODO this is sometimes undefined during compression.
|
||||
// But it should always have a value!
|
||||
scope.block_scope.variables.forEach(function (variable) {
|
||||
block_scoped.add(variable.name);
|
||||
});
|
||||
}
|
||||
if (scope instanceof AST_Catch) {
|
||||
// TODO can we delete? AST_Catch is a block scope.
|
||||
if (scope.argname) {
|
||||
block_scoped.add(scope.argname.name);
|
||||
}
|
||||
} else if (scope instanceof AST_IterationStatement) {
|
||||
in_loop = [];
|
||||
} else if (scope instanceof AST_SymbolRef) {
|
||||
if (scope.fixed_value() instanceof AST_Scope) return false;
|
||||
}
|
||||
} while (!(scope instanceof AST_Scope));
|
||||
|
||||
var safe_to_inject = !(scope instanceof AST_Toplevel) || compressor.toplevel.vars;
|
||||
var inline = compressor.option("inline");
|
||||
if (!can_inject_vars(block_scoped, inline >= 3 && safe_to_inject)) return false;
|
||||
if (!can_inject_args(block_scoped, inline >= 2 && safe_to_inject)) return false;
|
||||
return !in_loop || in_loop.length == 0 || !is_reachable(fn, in_loop);
|
||||
}
|
||||
|
||||
function append_var(decls, expressions, name, value) {
|
||||
var def = name.definition();
|
||||
|
||||
// Name already exists, only when a function argument had the same name
|
||||
const already_appended = scope.variables.has(name.name);
|
||||
if (!already_appended) {
|
||||
scope.variables.set(name.name, def);
|
||||
scope.enclosed.push(def);
|
||||
decls.push(make_node(AST_VarDef, name, {
|
||||
name: name,
|
||||
value: null
|
||||
}));
|
||||
}
|
||||
|
||||
var sym = make_node(AST_SymbolRef, name, name);
|
||||
def.references.push(sym);
|
||||
if (value) expressions.push(make_node(AST_Assign, self, {
|
||||
operator: "=",
|
||||
logical: false,
|
||||
left: sym,
|
||||
right: value.clone()
|
||||
}));
|
||||
}
|
||||
|
||||
function flatten_args(decls, expressions) {
|
||||
var len = fn.argnames.length;
|
||||
for (var i = self.args.length; --i >= len;) {
|
||||
expressions.push(self.args[i]);
|
||||
}
|
||||
for (i = len; --i >= 0;) {
|
||||
var name = fn.argnames[i];
|
||||
var value = self.args[i];
|
||||
if (has_flag(name, UNUSED) || !name.name || scope.conflicting_def(name.name)) {
|
||||
if (value) expressions.push(value);
|
||||
} else {
|
||||
var symbol = make_node(AST_SymbolVar, name, name);
|
||||
name.definition().orig.push(symbol);
|
||||
if (!value && in_loop) value = make_node(AST_Undefined, self);
|
||||
append_var(decls, expressions, symbol, value);
|
||||
}
|
||||
}
|
||||
decls.reverse();
|
||||
expressions.reverse();
|
||||
}
|
||||
|
||||
function flatten_vars(decls, expressions) {
|
||||
var pos = expressions.length;
|
||||
for (var i = 0, lines = fn.body.length; i < lines; i++) {
|
||||
var stat = fn.body[i];
|
||||
if (!(stat instanceof AST_Var)) continue;
|
||||
for (var j = 0, defs = stat.definitions.length; j < defs; j++) {
|
||||
var var_def = stat.definitions[j];
|
||||
var name = var_def.name;
|
||||
append_var(decls, expressions, name, var_def.value);
|
||||
if (in_loop && fn.argnames.every((argname) =>
|
||||
argname.name != name.name
|
||||
)) {
|
||||
var def = fn.variables.get(name.name);
|
||||
var sym = make_node(AST_SymbolRef, name, name);
|
||||
def.references.push(sym);
|
||||
expressions.splice(pos++, 0, make_node(AST_Assign, var_def, {
|
||||
operator: "=",
|
||||
logical: false,
|
||||
left: sym,
|
||||
right: make_node(AST_Undefined, name)
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function flatten_fn(returned_value) {
|
||||
var decls = [];
|
||||
var expressions = [];
|
||||
flatten_args(decls, expressions);
|
||||
flatten_vars(decls, expressions);
|
||||
expressions.push(returned_value);
|
||||
|
||||
if (decls.length) {
|
||||
const i = scope.body.indexOf(compressor.parent(level - 1)) + 1;
|
||||
scope.body.splice(i, 0, make_node(AST_Var, fn, {
|
||||
definitions: decls
|
||||
}));
|
||||
}
|
||||
|
||||
return expressions.map(exp => exp.clone(true));
|
||||
}
|
||||
}
|
184
node_modules/terser/lib/compress/native-objects.js
generated
vendored
Normal file
184
node_modules/terser/lib/compress/native-objects.js
generated
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import { makePredicate } from "../utils/index.js";
|
||||
|
||||
// Lists of native methods, useful for `unsafe` option which assumes they exist.
|
||||
// Note: Lots of methods and functions are missing here, in case they aren't pure
|
||||
// or not available in all JS environments.
|
||||
|
||||
function make_nested_lookup(obj) {
|
||||
const out = new Map();
|
||||
for (var key of Object.keys(obj)) {
|
||||
out.set(key, makePredicate(obj[key]));
|
||||
}
|
||||
|
||||
const does_have = (global_name, fname) => {
|
||||
const inner_map = out.get(global_name);
|
||||
return inner_map != null && inner_map.has(fname);
|
||||
};
|
||||
return does_have;
|
||||
}
|
||||
|
||||
// Objects which are safe to access without throwing or causing a side effect.
|
||||
// Usually we'd check the `unsafe` option first but these are way too common for that
|
||||
export const pure_prop_access_globals = new Set([
|
||||
"Number",
|
||||
"String",
|
||||
"Array",
|
||||
"Object",
|
||||
"Function",
|
||||
"Promise",
|
||||
]);
|
||||
|
||||
const object_methods = [
|
||||
"constructor",
|
||||
"toString",
|
||||
"valueOf",
|
||||
];
|
||||
|
||||
export const is_pure_native_method = make_nested_lookup({
|
||||
Array: [
|
||||
"indexOf",
|
||||
"join",
|
||||
"lastIndexOf",
|
||||
"slice",
|
||||
...object_methods,
|
||||
],
|
||||
Boolean: object_methods,
|
||||
Function: object_methods,
|
||||
Number: [
|
||||
"toExponential",
|
||||
"toFixed",
|
||||
"toPrecision",
|
||||
...object_methods,
|
||||
],
|
||||
Object: object_methods,
|
||||
RegExp: [
|
||||
"test",
|
||||
...object_methods,
|
||||
],
|
||||
String: [
|
||||
"charAt",
|
||||
"charCodeAt",
|
||||
"concat",
|
||||
"indexOf",
|
||||
"italics",
|
||||
"lastIndexOf",
|
||||
"match",
|
||||
"replace",
|
||||
"search",
|
||||
"slice",
|
||||
"split",
|
||||
"substr",
|
||||
"substring",
|
||||
"toLowerCase",
|
||||
"toUpperCase",
|
||||
"trim",
|
||||
...object_methods,
|
||||
],
|
||||
});
|
||||
|
||||
export const is_pure_native_fn = make_nested_lookup({
|
||||
Array: [
|
||||
"isArray",
|
||||
],
|
||||
Math: [
|
||||
"abs",
|
||||
"acos",
|
||||
"asin",
|
||||
"atan",
|
||||
"ceil",
|
||||
"cos",
|
||||
"exp",
|
||||
"floor",
|
||||
"log",
|
||||
"round",
|
||||
"sin",
|
||||
"sqrt",
|
||||
"tan",
|
||||
"atan2",
|
||||
"pow",
|
||||
"max",
|
||||
"min",
|
||||
],
|
||||
Number: [
|
||||
"isFinite",
|
||||
"isNaN",
|
||||
],
|
||||
Object: [
|
||||
"create",
|
||||
"getOwnPropertyDescriptor",
|
||||
"getOwnPropertyNames",
|
||||
"getPrototypeOf",
|
||||
"isExtensible",
|
||||
"isFrozen",
|
||||
"isSealed",
|
||||
"hasOwn",
|
||||
"keys",
|
||||
],
|
||||
String: [
|
||||
"fromCharCode",
|
||||
],
|
||||
});
|
||||
|
||||
// Known numeric values which come with JS environments
|
||||
export const is_pure_native_value = make_nested_lookup({
|
||||
Math: [
|
||||
"E",
|
||||
"LN10",
|
||||
"LN2",
|
||||
"LOG2E",
|
||||
"LOG10E",
|
||||
"PI",
|
||||
"SQRT1_2",
|
||||
"SQRT2",
|
||||
],
|
||||
Number: [
|
||||
"MAX_VALUE",
|
||||
"MIN_VALUE",
|
||||
"NaN",
|
||||
"NEGATIVE_INFINITY",
|
||||
"POSITIVE_INFINITY",
|
||||
],
|
||||
});
|
675
node_modules/terser/lib/compress/reduce-vars.js
generated
vendored
Normal file
675
node_modules/terser/lib/compress/reduce-vars.js
generated
vendored
Normal file
@@ -0,0 +1,675 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import {
|
||||
AST_Accessor,
|
||||
AST_Array,
|
||||
AST_Assign,
|
||||
AST_Await,
|
||||
AST_Binary,
|
||||
AST_Block,
|
||||
AST_Call,
|
||||
AST_Case,
|
||||
AST_Chain,
|
||||
AST_Class,
|
||||
AST_ClassExpression,
|
||||
AST_Conditional,
|
||||
AST_Default,
|
||||
AST_Defun,
|
||||
AST_Destructuring,
|
||||
AST_Do,
|
||||
AST_Exit,
|
||||
AST_Expansion,
|
||||
AST_For,
|
||||
AST_ForIn,
|
||||
AST_If,
|
||||
AST_LabeledStatement,
|
||||
AST_Lambda,
|
||||
AST_New,
|
||||
AST_Node,
|
||||
AST_Number,
|
||||
AST_ObjectKeyVal,
|
||||
AST_PropAccess,
|
||||
AST_Sequence,
|
||||
AST_SimpleStatement,
|
||||
AST_Symbol,
|
||||
AST_SymbolCatch,
|
||||
AST_SymbolConst,
|
||||
AST_SymbolDefun,
|
||||
AST_SymbolFunarg,
|
||||
AST_SymbolLambda,
|
||||
AST_SymbolRef,
|
||||
AST_This,
|
||||
AST_Toplevel,
|
||||
AST_Try,
|
||||
AST_Unary,
|
||||
AST_UnaryPrefix,
|
||||
AST_Undefined,
|
||||
AST_VarDef,
|
||||
AST_While,
|
||||
AST_Yield,
|
||||
|
||||
walk,
|
||||
walk_body,
|
||||
|
||||
_INLINE,
|
||||
_NOINLINE,
|
||||
_PURE
|
||||
} from "../ast.js";
|
||||
import { HOP, make_node, noop } from "../utils/index.js";
|
||||
|
||||
import { lazy_op, is_modified } from "./inference.js";
|
||||
import { INLINED, clear_flag } from "./compressor-flags.js";
|
||||
import { read_property, has_break_or_continue, is_recursive_ref } from "./common.js";
|
||||
|
||||
// Define the method AST_Node#reduce_vars, which goes through the AST in
|
||||
// execution order to perform basic flow analysis
|
||||
|
||||
function def_reduce_vars(node, func) {
|
||||
node.DEFMETHOD("reduce_vars", func);
|
||||
}
|
||||
|
||||
def_reduce_vars(AST_Node, noop);
|
||||
|
||||
function reset_def(compressor, def) {
|
||||
def.assignments = 0;
|
||||
def.chained = false;
|
||||
def.direct_access = false;
|
||||
def.escaped = 0;
|
||||
def.recursive_refs = 0;
|
||||
def.references = [];
|
||||
def.single_use = undefined;
|
||||
if (def.scope.pinned()) {
|
||||
def.fixed = false;
|
||||
} else if (def.orig[0] instanceof AST_SymbolConst || !compressor.exposed(def)) {
|
||||
def.fixed = def.init;
|
||||
} else {
|
||||
def.fixed = false;
|
||||
}
|
||||
}
|
||||
|
||||
function reset_variables(tw, compressor, node) {
|
||||
node.variables.forEach(function(def) {
|
||||
reset_def(compressor, def);
|
||||
if (def.fixed === null) {
|
||||
tw.defs_to_safe_ids.set(def.id, tw.safe_ids);
|
||||
mark(tw, def, true);
|
||||
} else if (def.fixed) {
|
||||
tw.loop_ids.set(def.id, tw.in_loop);
|
||||
mark(tw, def, true);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function reset_block_variables(compressor, node) {
|
||||
if (node.block_scope) node.block_scope.variables.forEach((def) => {
|
||||
reset_def(compressor, def);
|
||||
});
|
||||
}
|
||||
|
||||
function push(tw) {
|
||||
tw.safe_ids = Object.create(tw.safe_ids);
|
||||
}
|
||||
|
||||
function pop(tw) {
|
||||
tw.safe_ids = Object.getPrototypeOf(tw.safe_ids);
|
||||
}
|
||||
|
||||
function mark(tw, def, safe) {
|
||||
tw.safe_ids[def.id] = safe;
|
||||
}
|
||||
|
||||
function safe_to_read(tw, def) {
|
||||
if (def.single_use == "m") return false;
|
||||
if (tw.safe_ids[def.id]) {
|
||||
if (def.fixed == null) {
|
||||
var orig = def.orig[0];
|
||||
if (orig instanceof AST_SymbolFunarg || orig.name == "arguments") return false;
|
||||
def.fixed = make_node(AST_Undefined, orig);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return def.fixed instanceof AST_Defun;
|
||||
}
|
||||
|
||||
function safe_to_assign(tw, def, scope, value) {
|
||||
if (def.fixed === undefined) return true;
|
||||
let def_safe_ids;
|
||||
if (def.fixed === null
|
||||
&& (def_safe_ids = tw.defs_to_safe_ids.get(def.id))
|
||||
) {
|
||||
def_safe_ids[def.id] = false;
|
||||
tw.defs_to_safe_ids.delete(def.id);
|
||||
return true;
|
||||
}
|
||||
if (!HOP(tw.safe_ids, def.id)) return false;
|
||||
if (!safe_to_read(tw, def)) return false;
|
||||
if (def.fixed === false) return false;
|
||||
if (def.fixed != null && (!value || def.references.length > def.assignments)) return false;
|
||||
if (def.fixed instanceof AST_Defun) {
|
||||
return value instanceof AST_Node && def.fixed.parent_scope === scope;
|
||||
}
|
||||
return def.orig.every((sym) => {
|
||||
return !(sym instanceof AST_SymbolConst
|
||||
|| sym instanceof AST_SymbolDefun
|
||||
|| sym instanceof AST_SymbolLambda);
|
||||
});
|
||||
}
|
||||
|
||||
function ref_once(tw, compressor, def) {
|
||||
return compressor.option("unused")
|
||||
&& !def.scope.pinned()
|
||||
&& def.references.length - def.recursive_refs == 1
|
||||
&& tw.loop_ids.get(def.id) === tw.in_loop;
|
||||
}
|
||||
|
||||
function is_immutable(value) {
|
||||
if (!value) return false;
|
||||
return value.is_constant()
|
||||
|| value instanceof AST_Lambda
|
||||
|| value instanceof AST_This;
|
||||
}
|
||||
|
||||
// A definition "escapes" when its value can leave the point of use.
|
||||
// Example: `a = b || c`
|
||||
// In this example, "b" and "c" are escaping, because they're going into "a"
|
||||
//
|
||||
// def.escaped is != 0 when it escapes.
|
||||
//
|
||||
// When greater than 1, it means that N chained properties will be read off
|
||||
// of that def before an escape occurs. This is useful for evaluating
|
||||
// property accesses, where you need to know when to stop.
|
||||
function mark_escaped(tw, d, scope, node, value, level = 0, depth = 1) {
|
||||
var parent = tw.parent(level);
|
||||
if (value) {
|
||||
if (value.is_constant()) return;
|
||||
if (value instanceof AST_ClassExpression) return;
|
||||
}
|
||||
|
||||
if (
|
||||
parent instanceof AST_Assign && (parent.operator === "=" || parent.logical) && node === parent.right
|
||||
|| parent instanceof AST_Call && (node !== parent.expression || parent instanceof AST_New)
|
||||
|| parent instanceof AST_Exit && node === parent.value && node.scope !== d.scope
|
||||
|| parent instanceof AST_VarDef && node === parent.value
|
||||
|| parent instanceof AST_Yield && node === parent.value && node.scope !== d.scope
|
||||
) {
|
||||
if (depth > 1 && !(value && value.is_constant_expression(scope))) depth = 1;
|
||||
if (!d.escaped || d.escaped > depth) d.escaped = depth;
|
||||
return;
|
||||
} else if (
|
||||
parent instanceof AST_Array
|
||||
|| parent instanceof AST_Await
|
||||
|| parent instanceof AST_Binary && lazy_op.has(parent.operator)
|
||||
|| parent instanceof AST_Conditional && node !== parent.condition
|
||||
|| parent instanceof AST_Expansion
|
||||
|| parent instanceof AST_Sequence && node === parent.tail_node()
|
||||
) {
|
||||
mark_escaped(tw, d, scope, parent, parent, level + 1, depth);
|
||||
} else if (parent instanceof AST_ObjectKeyVal && node === parent.value) {
|
||||
var obj = tw.parent(level + 1);
|
||||
|
||||
mark_escaped(tw, d, scope, obj, obj, level + 2, depth);
|
||||
} else if (parent instanceof AST_PropAccess && node === parent.expression) {
|
||||
value = read_property(value, parent.property);
|
||||
|
||||
mark_escaped(tw, d, scope, parent, value, level + 1, depth + 1);
|
||||
if (value) return;
|
||||
}
|
||||
|
||||
if (level > 0) return;
|
||||
if (parent instanceof AST_Sequence && node !== parent.tail_node()) return;
|
||||
if (parent instanceof AST_SimpleStatement) return;
|
||||
|
||||
d.direct_access = true;
|
||||
}
|
||||
|
||||
const suppress = node => walk(node, node => {
|
||||
if (!(node instanceof AST_Symbol)) return;
|
||||
var d = node.definition();
|
||||
if (!d) return;
|
||||
if (node instanceof AST_SymbolRef) d.references.push(node);
|
||||
d.fixed = false;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Accessor, function(tw, descend, compressor) {
|
||||
push(tw);
|
||||
reset_variables(tw, compressor, this);
|
||||
descend();
|
||||
pop(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Assign, function(tw, descend, compressor) {
|
||||
var node = this;
|
||||
if (node.left instanceof AST_Destructuring) {
|
||||
suppress(node.left);
|
||||
return;
|
||||
}
|
||||
|
||||
const finish_walk = () => {
|
||||
if (node.logical) {
|
||||
node.left.walk(tw);
|
||||
|
||||
push(tw);
|
||||
node.right.walk(tw);
|
||||
pop(tw);
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
var sym = node.left;
|
||||
if (!(sym instanceof AST_SymbolRef)) return finish_walk();
|
||||
|
||||
var def = sym.definition();
|
||||
var safe = safe_to_assign(tw, def, sym.scope, node.right);
|
||||
def.assignments++;
|
||||
if (!safe) return finish_walk();
|
||||
|
||||
var fixed = def.fixed;
|
||||
if (!fixed && node.operator != "=" && !node.logical) return finish_walk();
|
||||
|
||||
var eq = node.operator == "=";
|
||||
var value = eq ? node.right : node;
|
||||
if (is_modified(compressor, tw, node, value, 0)) return finish_walk();
|
||||
|
||||
def.references.push(sym);
|
||||
|
||||
if (!node.logical) {
|
||||
if (!eq) def.chained = true;
|
||||
|
||||
def.fixed = eq ? function() {
|
||||
return node.right;
|
||||
} : function() {
|
||||
return make_node(AST_Binary, node, {
|
||||
operator: node.operator.slice(0, -1),
|
||||
left: fixed instanceof AST_Node ? fixed : fixed(),
|
||||
right: node.right
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
if (node.logical) {
|
||||
mark(tw, def, false);
|
||||
push(tw);
|
||||
node.right.walk(tw);
|
||||
pop(tw);
|
||||
return true;
|
||||
}
|
||||
|
||||
mark(tw, def, false);
|
||||
node.right.walk(tw);
|
||||
mark(tw, def, true);
|
||||
|
||||
mark_escaped(tw, def, sym.scope, node, value, 0, 1);
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Binary, function(tw) {
|
||||
if (!lazy_op.has(this.operator)) return;
|
||||
this.left.walk(tw);
|
||||
push(tw);
|
||||
this.right.walk(tw);
|
||||
pop(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Block, function(tw, descend, compressor) {
|
||||
reset_block_variables(compressor, this);
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Case, function(tw) {
|
||||
push(tw);
|
||||
this.expression.walk(tw);
|
||||
pop(tw);
|
||||
push(tw);
|
||||
walk_body(this, tw);
|
||||
pop(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Class, function(tw, descend) {
|
||||
clear_flag(this, INLINED);
|
||||
push(tw);
|
||||
descend();
|
||||
pop(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Conditional, function(tw) {
|
||||
this.condition.walk(tw);
|
||||
push(tw);
|
||||
this.consequent.walk(tw);
|
||||
pop(tw);
|
||||
push(tw);
|
||||
this.alternative.walk(tw);
|
||||
pop(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Chain, function(tw, descend) {
|
||||
// Chains' conditions apply left-to-right, cumulatively.
|
||||
// If we walk normally we don't go in that order because we would pop before pushing again
|
||||
// Solution: AST_PropAccess and AST_Call push when they are optional, and never pop.
|
||||
// Then we pop everything when they are done being walked.
|
||||
const safe_ids = tw.safe_ids;
|
||||
|
||||
descend();
|
||||
|
||||
// Unroll back to start
|
||||
tw.safe_ids = safe_ids;
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Call, function (tw) {
|
||||
this.expression.walk(tw);
|
||||
|
||||
if (this.optional) {
|
||||
// Never pop -- it's popped at AST_Chain above
|
||||
push(tw);
|
||||
}
|
||||
|
||||
for (const arg of this.args) arg.walk(tw);
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_PropAccess, function (tw) {
|
||||
if (!this.optional) return;
|
||||
|
||||
this.expression.walk(tw);
|
||||
|
||||
// Never pop -- it's popped at AST_Chain above
|
||||
push(tw);
|
||||
|
||||
if (this.property instanceof AST_Node) this.property.walk(tw);
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Default, function(tw, descend) {
|
||||
push(tw);
|
||||
descend();
|
||||
pop(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
function mark_lambda(tw, descend, compressor) {
|
||||
clear_flag(this, INLINED);
|
||||
push(tw);
|
||||
reset_variables(tw, compressor, this);
|
||||
if (this.uses_arguments) {
|
||||
descend();
|
||||
pop(tw);
|
||||
return;
|
||||
}
|
||||
var iife;
|
||||
if (!this.name
|
||||
&& (iife = tw.parent()) instanceof AST_Call
|
||||
&& iife.expression === this
|
||||
&& !iife.args.some(arg => arg instanceof AST_Expansion)
|
||||
&& this.argnames.every(arg_name => arg_name instanceof AST_Symbol)
|
||||
) {
|
||||
// Virtually turn IIFE parameters into variable definitions:
|
||||
// (function(a,b) {...})(c,d) => (function() {var a=c,b=d; ...})()
|
||||
// So existing transformation rules can work on them.
|
||||
this.argnames.forEach((arg, i) => {
|
||||
if (!arg.definition) return;
|
||||
var d = arg.definition();
|
||||
// Avoid setting fixed when there's more than one origin for a variable value
|
||||
if (d.orig.length > 1) return;
|
||||
if (d.fixed === undefined && (!this.uses_arguments || tw.has_directive("use strict"))) {
|
||||
d.fixed = function() {
|
||||
return iife.args[i] || make_node(AST_Undefined, iife);
|
||||
};
|
||||
tw.loop_ids.set(d.id, tw.in_loop);
|
||||
mark(tw, d, true);
|
||||
} else {
|
||||
d.fixed = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
descend();
|
||||
pop(tw);
|
||||
return true;
|
||||
}
|
||||
|
||||
def_reduce_vars(AST_Lambda, mark_lambda);
|
||||
|
||||
def_reduce_vars(AST_Do, function(tw, descend, compressor) {
|
||||
reset_block_variables(compressor, this);
|
||||
const saved_loop = tw.in_loop;
|
||||
tw.in_loop = this;
|
||||
push(tw);
|
||||
this.body.walk(tw);
|
||||
if (has_break_or_continue(this)) {
|
||||
pop(tw);
|
||||
push(tw);
|
||||
}
|
||||
this.condition.walk(tw);
|
||||
pop(tw);
|
||||
tw.in_loop = saved_loop;
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_For, function(tw, descend, compressor) {
|
||||
reset_block_variables(compressor, this);
|
||||
if (this.init) this.init.walk(tw);
|
||||
const saved_loop = tw.in_loop;
|
||||
tw.in_loop = this;
|
||||
push(tw);
|
||||
if (this.condition) this.condition.walk(tw);
|
||||
this.body.walk(tw);
|
||||
if (this.step) {
|
||||
if (has_break_or_continue(this)) {
|
||||
pop(tw);
|
||||
push(tw);
|
||||
}
|
||||
this.step.walk(tw);
|
||||
}
|
||||
pop(tw);
|
||||
tw.in_loop = saved_loop;
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_ForIn, function(tw, descend, compressor) {
|
||||
reset_block_variables(compressor, this);
|
||||
suppress(this.init);
|
||||
this.object.walk(tw);
|
||||
const saved_loop = tw.in_loop;
|
||||
tw.in_loop = this;
|
||||
push(tw);
|
||||
this.body.walk(tw);
|
||||
pop(tw);
|
||||
tw.in_loop = saved_loop;
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_If, function(tw) {
|
||||
this.condition.walk(tw);
|
||||
push(tw);
|
||||
this.body.walk(tw);
|
||||
pop(tw);
|
||||
if (this.alternative) {
|
||||
push(tw);
|
||||
this.alternative.walk(tw);
|
||||
pop(tw);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_LabeledStatement, function(tw) {
|
||||
push(tw);
|
||||
this.body.walk(tw);
|
||||
pop(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_SymbolCatch, function() {
|
||||
this.definition().fixed = false;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_SymbolRef, function(tw, descend, compressor) {
|
||||
var d = this.definition();
|
||||
d.references.push(this);
|
||||
if (d.references.length == 1
|
||||
&& !d.fixed
|
||||
&& d.orig[0] instanceof AST_SymbolDefun) {
|
||||
tw.loop_ids.set(d.id, tw.in_loop);
|
||||
}
|
||||
var fixed_value;
|
||||
if (d.fixed === undefined || !safe_to_read(tw, d)) {
|
||||
d.fixed = false;
|
||||
} else if (d.fixed) {
|
||||
fixed_value = this.fixed_value();
|
||||
if (
|
||||
fixed_value instanceof AST_Lambda
|
||||
&& is_recursive_ref(tw, d)
|
||||
) {
|
||||
d.recursive_refs++;
|
||||
} else if (fixed_value
|
||||
&& !compressor.exposed(d)
|
||||
&& ref_once(tw, compressor, d)
|
||||
) {
|
||||
d.single_use =
|
||||
fixed_value instanceof AST_Lambda && !fixed_value.pinned()
|
||||
|| fixed_value instanceof AST_Class
|
||||
|| d.scope === this.scope && fixed_value.is_constant_expression();
|
||||
} else {
|
||||
d.single_use = false;
|
||||
}
|
||||
if (is_modified(compressor, tw, this, fixed_value, 0, is_immutable(fixed_value))) {
|
||||
if (d.single_use) {
|
||||
d.single_use = "m";
|
||||
} else {
|
||||
d.fixed = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
mark_escaped(tw, d, this.scope, this, fixed_value, 0, 1);
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Toplevel, function(tw, descend, compressor) {
|
||||
this.globals.forEach(function(def) {
|
||||
reset_def(compressor, def);
|
||||
});
|
||||
reset_variables(tw, compressor, this);
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Try, function(tw, descend, compressor) {
|
||||
reset_block_variables(compressor, this);
|
||||
push(tw);
|
||||
walk_body(this, tw);
|
||||
pop(tw);
|
||||
if (this.bcatch) {
|
||||
push(tw);
|
||||
this.bcatch.walk(tw);
|
||||
pop(tw);
|
||||
}
|
||||
if (this.bfinally) this.bfinally.walk(tw);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_Unary, function(tw) {
|
||||
var node = this;
|
||||
if (node.operator !== "++" && node.operator !== "--") return;
|
||||
var exp = node.expression;
|
||||
if (!(exp instanceof AST_SymbolRef)) return;
|
||||
var def = exp.definition();
|
||||
var safe = safe_to_assign(tw, def, exp.scope, true);
|
||||
def.assignments++;
|
||||
if (!safe) return;
|
||||
var fixed = def.fixed;
|
||||
if (!fixed) return;
|
||||
def.references.push(exp);
|
||||
def.chained = true;
|
||||
def.fixed = function() {
|
||||
return make_node(AST_Binary, node, {
|
||||
operator: node.operator.slice(0, -1),
|
||||
left: make_node(AST_UnaryPrefix, node, {
|
||||
operator: "+",
|
||||
expression: fixed instanceof AST_Node ? fixed : fixed()
|
||||
}),
|
||||
right: make_node(AST_Number, node, {
|
||||
value: 1
|
||||
})
|
||||
});
|
||||
};
|
||||
mark(tw, def, true);
|
||||
return true;
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_VarDef, function(tw, descend) {
|
||||
var node = this;
|
||||
if (node.name instanceof AST_Destructuring) {
|
||||
suppress(node.name);
|
||||
return;
|
||||
}
|
||||
var d = node.name.definition();
|
||||
if (node.value) {
|
||||
if (safe_to_assign(tw, d, node.name.scope, node.value)) {
|
||||
d.fixed = function() {
|
||||
return node.value;
|
||||
};
|
||||
tw.loop_ids.set(d.id, tw.in_loop);
|
||||
mark(tw, d, false);
|
||||
descend();
|
||||
mark(tw, d, true);
|
||||
return true;
|
||||
} else {
|
||||
d.fixed = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
def_reduce_vars(AST_While, function(tw, descend, compressor) {
|
||||
reset_block_variables(compressor, this);
|
||||
const saved_loop = tw.in_loop;
|
||||
tw.in_loop = this;
|
||||
push(tw);
|
||||
descend();
|
||||
pop(tw);
|
||||
tw.in_loop = saved_loop;
|
||||
return true;
|
||||
});
|
1461
node_modules/terser/lib/compress/tighten-body.js
generated
vendored
Normal file
1461
node_modules/terser/lib/compress/tighten-body.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
187
node_modules/terser/lib/equivalent-to.js
generated
vendored
187
node_modules/terser/lib/equivalent-to.js
generated
vendored
@@ -18,6 +18,7 @@ import {
|
||||
AST_Directive,
|
||||
AST_Do,
|
||||
AST_Dot,
|
||||
AST_DotHash,
|
||||
AST_EmptyStatement,
|
||||
AST_Expansion,
|
||||
AST_Export,
|
||||
@@ -96,24 +97,6 @@ export const equivalent_to = (tree1, tree2) => {
|
||||
return walk_1_state.length == 0 && walk_2_state.length == 0;
|
||||
};
|
||||
|
||||
// Creates a shallow compare function
|
||||
const mkshallow = (props) => {
|
||||
const comparisons = Object
|
||||
.keys(props)
|
||||
.map(key => {
|
||||
if (props[key] === "eq") {
|
||||
return `this.${key} === other.${key}`;
|
||||
} else if (props[key] === "exist") {
|
||||
return `(this.${key} == null ? other.${key} == null : this.${key} === other.${key})`;
|
||||
} else {
|
||||
throw new Error(`mkshallow: Unexpected instruction: ${props[key]}`);
|
||||
}
|
||||
})
|
||||
.join(" && ");
|
||||
|
||||
return new Function("other", "return " + comparisons);
|
||||
};
|
||||
|
||||
const pass_through = () => true;
|
||||
|
||||
AST_Node.prototype.shallow_cmp = function () {
|
||||
@@ -122,7 +105,9 @@ AST_Node.prototype.shallow_cmp = function () {
|
||||
|
||||
AST_Debugger.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Directive.prototype.shallow_cmp = mkshallow({ value: "eq" });
|
||||
AST_Directive.prototype.shallow_cmp = function(other) {
|
||||
return this.value === other.value;
|
||||
};
|
||||
|
||||
AST_SimpleStatement.prototype.shallow_cmp = pass_through;
|
||||
|
||||
@@ -130,17 +115,17 @@ AST_Block.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_EmptyStatement.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_LabeledStatement.prototype.shallow_cmp = mkshallow({ "label.name": "eq" });
|
||||
AST_LabeledStatement.prototype.shallow_cmp = function(other) {
|
||||
return this.label.name === other.label.name;
|
||||
};
|
||||
|
||||
AST_Do.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_While.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_For.prototype.shallow_cmp = mkshallow({
|
||||
init: "exist",
|
||||
condition: "exist",
|
||||
step: "exist"
|
||||
});
|
||||
AST_For.prototype.shallow_cmp = function(other) {
|
||||
return (this.init == null ? other.init == null : this.init === other.init) && (this.condition == null ? other.condition == null : this.condition === other.condition) && (this.step == null ? other.step == null : this.step === other.step);
|
||||
};
|
||||
|
||||
AST_ForIn.prototype.shallow_cmp = pass_through;
|
||||
|
||||
@@ -152,22 +137,21 @@ AST_Toplevel.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Expansion.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Lambda.prototype.shallow_cmp = mkshallow({
|
||||
is_generator: "eq",
|
||||
async: "eq"
|
||||
});
|
||||
AST_Lambda.prototype.shallow_cmp = function(other) {
|
||||
return this.is_generator === other.is_generator && this.async === other.async;
|
||||
};
|
||||
|
||||
AST_Destructuring.prototype.shallow_cmp = mkshallow({
|
||||
is_array: "eq"
|
||||
});
|
||||
AST_Destructuring.prototype.shallow_cmp = function(other) {
|
||||
return this.is_array === other.is_array;
|
||||
};
|
||||
|
||||
AST_PrefixedTemplateString.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_TemplateString.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_TemplateSegment.prototype.shallow_cmp = mkshallow({
|
||||
"value": "eq"
|
||||
});
|
||||
AST_TemplateSegment.prototype.shallow_cmp = function(other) {
|
||||
return this.value === other.value;
|
||||
};
|
||||
|
||||
AST_Jump.prototype.shallow_cmp = pass_through;
|
||||
|
||||
@@ -175,51 +159,45 @@ AST_LoopControl.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Await.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Yield.prototype.shallow_cmp = mkshallow({
|
||||
is_star: "eq"
|
||||
});
|
||||
AST_Yield.prototype.shallow_cmp = function(other) {
|
||||
return this.is_star === other.is_star;
|
||||
};
|
||||
|
||||
AST_If.prototype.shallow_cmp = mkshallow({
|
||||
alternative: "exist"
|
||||
});
|
||||
AST_If.prototype.shallow_cmp = function(other) {
|
||||
return this.alternative == null ? other.alternative == null : this.alternative === other.alternative;
|
||||
};
|
||||
|
||||
AST_Switch.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_SwitchBranch.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Try.prototype.shallow_cmp = mkshallow({
|
||||
bcatch: "exist",
|
||||
bfinally: "exist"
|
||||
});
|
||||
AST_Try.prototype.shallow_cmp = function(other) {
|
||||
return (this.bcatch == null ? other.bcatch == null : this.bcatch === other.bcatch) && (this.bfinally == null ? other.bfinally == null : this.bfinally === other.bfinally);
|
||||
};
|
||||
|
||||
AST_Catch.prototype.shallow_cmp = mkshallow({
|
||||
argname: "exist"
|
||||
});
|
||||
AST_Catch.prototype.shallow_cmp = function(other) {
|
||||
return this.argname == null ? other.argname == null : this.argname === other.argname;
|
||||
};
|
||||
|
||||
AST_Finally.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Definitions.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_VarDef.prototype.shallow_cmp = mkshallow({
|
||||
value: "exist"
|
||||
});
|
||||
AST_VarDef.prototype.shallow_cmp = function(other) {
|
||||
return this.value == null ? other.value == null : this.value === other.value;
|
||||
};
|
||||
|
||||
AST_NameMapping.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Import.prototype.shallow_cmp = mkshallow({
|
||||
imported_name: "exist",
|
||||
imported_names: "exist"
|
||||
});
|
||||
AST_Import.prototype.shallow_cmp = function(other) {
|
||||
return (this.imported_name == null ? other.imported_name == null : this.imported_name === other.imported_name) && (this.imported_names == null ? other.imported_names == null : this.imported_names === other.imported_names);
|
||||
};
|
||||
|
||||
AST_ImportMeta.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Export.prototype.shallow_cmp = mkshallow({
|
||||
exported_definition: "exist",
|
||||
exported_value: "exist",
|
||||
exported_names: "exist",
|
||||
module_name: "eq",
|
||||
is_default: "eq",
|
||||
});
|
||||
AST_Export.prototype.shallow_cmp = function(other) {
|
||||
return (this.exported_definition == null ? other.exported_definition == null : this.exported_definition === other.exported_definition) && (this.exported_value == null ? other.exported_value == null : this.exported_value === other.exported_value) && (this.exported_names == null ? other.exported_names == null : this.exported_names === other.exported_names) && this.module_name === other.module_name && this.is_default === other.is_default;
|
||||
};
|
||||
|
||||
AST_Call.prototype.shallow_cmp = pass_through;
|
||||
|
||||
@@ -229,17 +207,21 @@ AST_PropAccess.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Chain.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Dot.prototype.shallow_cmp = mkshallow({
|
||||
property: "eq"
|
||||
});
|
||||
AST_Dot.prototype.shallow_cmp = function(other) {
|
||||
return this.property === other.property;
|
||||
};
|
||||
|
||||
AST_Unary.prototype.shallow_cmp = mkshallow({
|
||||
operator: "eq"
|
||||
});
|
||||
AST_DotHash.prototype.shallow_cmp = function(other) {
|
||||
return this.property === other.property;
|
||||
};
|
||||
|
||||
AST_Binary.prototype.shallow_cmp = mkshallow({
|
||||
operator: "eq"
|
||||
});
|
||||
AST_Unary.prototype.shallow_cmp = function(other) {
|
||||
return this.operator === other.operator;
|
||||
};
|
||||
|
||||
AST_Binary.prototype.shallow_cmp = function(other) {
|
||||
return this.operator === other.operator;
|
||||
};
|
||||
|
||||
AST_Conditional.prototype.shallow_cmp = pass_through;
|
||||
|
||||
@@ -249,36 +231,33 @@ AST_Object.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_ObjectProperty.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_ObjectKeyVal.prototype.shallow_cmp = mkshallow({
|
||||
key: "eq"
|
||||
});
|
||||
AST_ObjectKeyVal.prototype.shallow_cmp = function(other) {
|
||||
return this.key === other.key;
|
||||
};
|
||||
|
||||
AST_ObjectSetter.prototype.shallow_cmp = mkshallow({
|
||||
static: "eq"
|
||||
});
|
||||
AST_ObjectSetter.prototype.shallow_cmp = function(other) {
|
||||
return this.static === other.static;
|
||||
};
|
||||
|
||||
AST_ObjectGetter.prototype.shallow_cmp = mkshallow({
|
||||
static: "eq"
|
||||
});
|
||||
AST_ObjectGetter.prototype.shallow_cmp = function(other) {
|
||||
return this.static === other.static;
|
||||
};
|
||||
|
||||
AST_ConciseMethod.prototype.shallow_cmp = mkshallow({
|
||||
static: "eq",
|
||||
is_generator: "eq",
|
||||
async: "eq",
|
||||
});
|
||||
AST_ConciseMethod.prototype.shallow_cmp = function(other) {
|
||||
return this.static === other.static && this.is_generator === other.is_generator && this.async === other.async;
|
||||
};
|
||||
|
||||
AST_Class.prototype.shallow_cmp = mkshallow({
|
||||
name: "exist",
|
||||
extends: "exist",
|
||||
});
|
||||
AST_Class.prototype.shallow_cmp = function(other) {
|
||||
return (this.name == null ? other.name == null : this.name === other.name) && (this.extends == null ? other.extends == null : this.extends === other.extends);
|
||||
};
|
||||
|
||||
AST_ClassProperty.prototype.shallow_cmp = mkshallow({
|
||||
static: "eq"
|
||||
});
|
||||
AST_ClassProperty.prototype.shallow_cmp = function(other) {
|
||||
return this.static === other.static;
|
||||
};
|
||||
|
||||
AST_Symbol.prototype.shallow_cmp = mkshallow({
|
||||
name: "eq"
|
||||
});
|
||||
AST_Symbol.prototype.shallow_cmp = function(other) {
|
||||
return this.name === other.name;
|
||||
};
|
||||
|
||||
AST_NewTarget.prototype.shallow_cmp = pass_through;
|
||||
|
||||
@@ -286,17 +265,17 @@ AST_This.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_Super.prototype.shallow_cmp = pass_through;
|
||||
|
||||
AST_String.prototype.shallow_cmp = mkshallow({
|
||||
value: "eq"
|
||||
});
|
||||
AST_String.prototype.shallow_cmp = function(other) {
|
||||
return this.value === other.value;
|
||||
};
|
||||
|
||||
AST_Number.prototype.shallow_cmp = mkshallow({
|
||||
value: "eq"
|
||||
});
|
||||
AST_Number.prototype.shallow_cmp = function(other) {
|
||||
return this.value === other.value;
|
||||
};
|
||||
|
||||
AST_BigInt.prototype.shallow_cmp = mkshallow({
|
||||
value: "eq"
|
||||
});
|
||||
AST_BigInt.prototype.shallow_cmp = function(other) {
|
||||
return this.value === other.value;
|
||||
};
|
||||
|
||||
AST_RegExp.prototype.shallow_cmp = function (other) {
|
||||
return (
|
||||
|
115
node_modules/terser/lib/minify.js
generated
vendored
115
node_modules/terser/lib/minify.js
generated
vendored
@@ -7,7 +7,7 @@ import {
|
||||
map_to_object,
|
||||
HOP,
|
||||
} from "./utils/index.js";
|
||||
import { AST_Toplevel, AST_Node } from "./ast.js";
|
||||
import { AST_Toplevel, AST_Node, walk, AST_Scope } from "./ast.js";
|
||||
import { parse } from "./parse.js";
|
||||
import { OutputStream } from "./output.js";
|
||||
import { Compressor } from "./compress/index.js";
|
||||
@@ -15,6 +15,7 @@ import { base54 } from "./scope.js";
|
||||
import { SourceMap } from "./sourcemap.js";
|
||||
import {
|
||||
mangle_properties,
|
||||
mangle_private_properties,
|
||||
reserve_quoted_keys,
|
||||
} from "./propmangle.js";
|
||||
|
||||
@@ -60,7 +61,54 @@ function cache_to_json(cache) {
|
||||
};
|
||||
}
|
||||
|
||||
async function minify(files, options) {
|
||||
function log_input(files, options, fs, debug_folder) {
|
||||
if (!(fs && fs.writeFileSync && fs.mkdirSync)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.mkdirSync(debug_folder);
|
||||
} catch (e) {
|
||||
if (e.code !== "EEXIST") throw e;
|
||||
}
|
||||
|
||||
const log_path = `${debug_folder}/terser-debug-${(Math.random() * 9999999) | 0}.log`;
|
||||
|
||||
options = options || {};
|
||||
|
||||
const options_str = JSON.stringify(options, (_key, thing) => {
|
||||
if (typeof thing === "function") return "[Function " + thing.toString() + "]";
|
||||
if (thing instanceof RegExp) return "[RegExp " + thing.toString() + "]";
|
||||
return thing;
|
||||
}, 4);
|
||||
|
||||
const files_str = (file) => {
|
||||
if (typeof file === "object" && options.parse && options.parse.spidermonkey) {
|
||||
return JSON.stringify(file, null, 2);
|
||||
} else if (typeof file === "object") {
|
||||
return Object.keys(file)
|
||||
.map((key) => key + ": " + files_str(file[key]))
|
||||
.join("\n\n");
|
||||
} else if (typeof file === "string") {
|
||||
return "```\n" + file + "\n```";
|
||||
} else {
|
||||
return file; // What do?
|
||||
}
|
||||
};
|
||||
|
||||
fs.writeFileSync(log_path, "Options: \n" + options_str + "\n\nInput files:\n\n" + files_str(files) + "\n");
|
||||
}
|
||||
|
||||
async function minify(files, options, _fs_module) {
|
||||
if (
|
||||
_fs_module
|
||||
&& typeof process === "object"
|
||||
&& process.env
|
||||
&& typeof process.env.TERSER_DEBUG_DIR === "string"
|
||||
) {
|
||||
log_input(files, options, _fs_module, process.env.TERSER_DEBUG_DIR);
|
||||
}
|
||||
|
||||
options = defaults(options, {
|
||||
compress: {},
|
||||
ecma: undefined,
|
||||
@@ -83,6 +131,7 @@ async function minify(files, options) {
|
||||
warnings: false,
|
||||
wrap: false,
|
||||
}, true);
|
||||
|
||||
var timings = options.timings && {
|
||||
start: Date.now()
|
||||
};
|
||||
@@ -113,6 +162,7 @@ async function minify(files, options) {
|
||||
keep_classnames: false,
|
||||
keep_fnames: false,
|
||||
module: false,
|
||||
nth_identifier: base54,
|
||||
properties: false,
|
||||
reserved: [],
|
||||
safari10: false,
|
||||
@@ -144,6 +194,8 @@ async function minify(files, options) {
|
||||
url: null,
|
||||
}, true);
|
||||
}
|
||||
|
||||
// -- Parse phase --
|
||||
if (timings) timings.parse = Date.now();
|
||||
var toplevel;
|
||||
if (files instanceof AST_Toplevel) {
|
||||
@@ -193,24 +245,30 @@ async function minify(files, options) {
|
||||
toplevel.figure_out_scope(options.mangle);
|
||||
toplevel.expand_names(options.mangle);
|
||||
}
|
||||
|
||||
// -- Compress phase --
|
||||
if (timings) timings.compress = Date.now();
|
||||
if (options.compress) {
|
||||
toplevel = new Compressor(options.compress, {
|
||||
mangle_options: options.mangle
|
||||
}).compress(toplevel);
|
||||
}
|
||||
|
||||
// -- Mangle phase --
|
||||
if (timings) timings.scope = Date.now();
|
||||
if (options.mangle) toplevel.figure_out_scope(options.mangle);
|
||||
if (timings) timings.mangle = Date.now();
|
||||
if (options.mangle) {
|
||||
base54.reset();
|
||||
toplevel.compute_char_frequency(options.mangle);
|
||||
toplevel.mangle_names(options.mangle);
|
||||
toplevel = mangle_private_properties(toplevel, options.mangle);
|
||||
}
|
||||
if (timings) timings.properties = Date.now();
|
||||
if (options.mangle && options.mangle.properties) {
|
||||
toplevel = mangle_properties(toplevel, options.mangle.properties);
|
||||
}
|
||||
|
||||
// Format phase
|
||||
if (timings) timings.format = Date.now();
|
||||
var result = {};
|
||||
if (options.format.ast) {
|
||||
@@ -220,19 +278,34 @@ async function minify(files, options) {
|
||||
result.ast = toplevel.to_mozilla_ast();
|
||||
}
|
||||
if (!HOP(options.format, "code") || options.format.code) {
|
||||
if (!options.format.ast) {
|
||||
// Destroy stuff to save RAM. (unless the deprecated `ast` option is on)
|
||||
options.format._destroy_ast = true;
|
||||
|
||||
walk(toplevel, node => {
|
||||
if (node instanceof AST_Scope) {
|
||||
node.variables = undefined;
|
||||
node.enclosed = undefined;
|
||||
node.parent_scope = undefined;
|
||||
}
|
||||
if (node.block_scope) {
|
||||
node.block_scope.variables = undefined;
|
||||
node.block_scope.enclosed = undefined;
|
||||
node.parent_scope = undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (options.sourceMap) {
|
||||
if (options.sourceMap.includeSources && files instanceof AST_Toplevel) {
|
||||
throw new Error("original source content unavailable");
|
||||
}
|
||||
options.format.source_map = await SourceMap({
|
||||
file: options.sourceMap.filename,
|
||||
orig: options.sourceMap.content,
|
||||
root: options.sourceMap.root
|
||||
root: options.sourceMap.root,
|
||||
files: options.sourceMap.includeSources ? files : null,
|
||||
});
|
||||
if (options.sourceMap.includeSources) {
|
||||
if (files instanceof AST_Toplevel) {
|
||||
throw new Error("original source content unavailable");
|
||||
} else for (var name in files) if (HOP(files, name)) {
|
||||
options.format.source_map.get().setSourceContent(name, files[name]);
|
||||
}
|
||||
}
|
||||
}
|
||||
delete options.format.ast;
|
||||
delete options.format.code;
|
||||
@@ -241,11 +314,21 @@ async function minify(files, options) {
|
||||
toplevel.print(stream);
|
||||
result.code = stream.get();
|
||||
if (options.sourceMap) {
|
||||
if(options.sourceMap.asObject) {
|
||||
result.map = options.format.source_map.get().toJSON();
|
||||
} else {
|
||||
result.map = options.format.source_map.toString();
|
||||
}
|
||||
Object.defineProperty(result, "map", {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
const map = options.format.source_map.getEncoded();
|
||||
return (result.map = options.sourceMap.asObject ? map : JSON.stringify(map));
|
||||
},
|
||||
set(value) {
|
||||
Object.defineProperty(result, "map", {
|
||||
value,
|
||||
writable: true,
|
||||
});
|
||||
}
|
||||
});
|
||||
result.decoded_map = options.format.source_map.getDecoded();
|
||||
if (options.sourceMap.url == "inline") {
|
||||
var sourceMap = typeof result.map === "object" ? JSON.stringify(result.map) : result.map;
|
||||
result.code += "\n//# sourceMappingURL=data:application/json;charset=utf-8;base64," + to_base64(sourceMap);
|
||||
|
607
node_modules/terser/lib/mozilla-ast.js
generated
vendored
607
node_modules/terser/lib/mozilla-ast.js
generated
vendored
@@ -41,7 +41,6 @@
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
import * as ast from "./ast.js";
|
||||
import { make_node } from "./utils/index.js";
|
||||
import {
|
||||
AST_Accessor,
|
||||
@@ -158,6 +157,7 @@ import {
|
||||
AST_With,
|
||||
AST_Yield,
|
||||
} from "./ast.js";
|
||||
import { is_basic_identifier_string } from "./parse.js";
|
||||
|
||||
(function() {
|
||||
|
||||
@@ -179,6 +179,24 @@ import {
|
||||
return body;
|
||||
};
|
||||
|
||||
const assert_clause_from_moz = (assertions) => {
|
||||
if (assertions && assertions.length > 0) {
|
||||
return new AST_Object({
|
||||
start: my_start_token(assertions),
|
||||
end: my_end_token(assertions),
|
||||
properties: assertions.map((assertion_kv) =>
|
||||
new AST_ObjectKeyVal({
|
||||
start: my_start_token(assertion_kv),
|
||||
end: my_end_token(assertion_kv),
|
||||
key: assertion_kv.key.name || assertion_kv.key.value,
|
||||
value: from_moz(assertion_kv.value)
|
||||
})
|
||||
)
|
||||
});
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
var MOZ_TO_ME = {
|
||||
Program: function(M) {
|
||||
return new AST_Toplevel({
|
||||
@@ -187,6 +205,7 @@ import {
|
||||
body: normalize_directives(M.body.map(from_moz))
|
||||
});
|
||||
},
|
||||
|
||||
ArrayPattern: function(M) {
|
||||
return new AST_Destructuring({
|
||||
start: my_start_token(M),
|
||||
@@ -200,6 +219,7 @@ import {
|
||||
is_array: true
|
||||
});
|
||||
},
|
||||
|
||||
ObjectPattern: function(M) {
|
||||
return new AST_Destructuring({
|
||||
start: my_start_token(M),
|
||||
@@ -208,6 +228,7 @@ import {
|
||||
is_array: false
|
||||
});
|
||||
},
|
||||
|
||||
AssignmentPattern: function(M) {
|
||||
return new AST_DefaultAssign({
|
||||
start: my_start_token(M),
|
||||
@@ -217,6 +238,7 @@ import {
|
||||
right: from_moz(M.right)
|
||||
});
|
||||
},
|
||||
|
||||
SpreadElement: function(M) {
|
||||
return new AST_Expansion({
|
||||
start: my_start_token(M),
|
||||
@@ -224,6 +246,7 @@ import {
|
||||
expression: from_moz(M.argument)
|
||||
});
|
||||
},
|
||||
|
||||
RestElement: function(M) {
|
||||
return new AST_Expansion({
|
||||
start: my_start_token(M),
|
||||
@@ -231,6 +254,7 @@ import {
|
||||
expression: from_moz(M.argument)
|
||||
});
|
||||
},
|
||||
|
||||
TemplateElement: function(M) {
|
||||
return new AST_TemplateSegment({
|
||||
start: my_start_token(M),
|
||||
@@ -239,6 +263,7 @@ import {
|
||||
raw: M.value.raw
|
||||
});
|
||||
},
|
||||
|
||||
TemplateLiteral: function(M) {
|
||||
var segments = [];
|
||||
for (var i = 0; i < M.quasis.length; i++) {
|
||||
@@ -253,6 +278,7 @@ import {
|
||||
segments: segments
|
||||
});
|
||||
},
|
||||
|
||||
TaggedTemplateExpression: function(M) {
|
||||
return new AST_PrefixedTemplateString({
|
||||
start: my_start_token(M),
|
||||
@@ -261,6 +287,7 @@ import {
|
||||
prefix: from_moz(M.tag)
|
||||
});
|
||||
},
|
||||
|
||||
FunctionDeclaration: function(M) {
|
||||
return new AST_Defun({
|
||||
start: my_start_token(M),
|
||||
@@ -272,6 +299,7 @@ import {
|
||||
body: normalize_directives(from_moz(M.body).body)
|
||||
});
|
||||
},
|
||||
|
||||
FunctionExpression: function(M) {
|
||||
return new AST_Function({
|
||||
start: my_start_token(M),
|
||||
@@ -283,6 +311,7 @@ import {
|
||||
body: normalize_directives(from_moz(M.body).body)
|
||||
});
|
||||
},
|
||||
|
||||
ArrowFunctionExpression: function(M) {
|
||||
const body = M.body.type === "BlockStatement"
|
||||
? from_moz(M.body).body
|
||||
@@ -295,6 +324,7 @@ import {
|
||||
async: M.async,
|
||||
});
|
||||
},
|
||||
|
||||
ExpressionStatement: function(M) {
|
||||
return new AST_SimpleStatement({
|
||||
start: my_start_token(M),
|
||||
@@ -302,6 +332,7 @@ import {
|
||||
body: from_moz(M.expression)
|
||||
});
|
||||
},
|
||||
|
||||
TryStatement: function(M) {
|
||||
var handlers = M.handlers || [M.handler];
|
||||
if (handlers.length > 1 || M.guardedHandlers && M.guardedHandlers.length) {
|
||||
@@ -315,6 +346,7 @@ import {
|
||||
bfinally : M.finalizer ? new AST_Finally(from_moz(M.finalizer)) : null
|
||||
});
|
||||
},
|
||||
|
||||
Property: function(M) {
|
||||
var key = M.key;
|
||||
var args = {
|
||||
@@ -357,6 +389,7 @@ import {
|
||||
return new AST_ConciseMethod(args);
|
||||
}
|
||||
},
|
||||
|
||||
MethodDefinition: function(M) {
|
||||
var args = {
|
||||
start : my_start_token(M),
|
||||
@@ -375,6 +408,7 @@ import {
|
||||
args.async = M.value.async;
|
||||
return new AST_ConciseMethod(args);
|
||||
},
|
||||
|
||||
FieldDefinition: function(M) {
|
||||
let key;
|
||||
if (M.computed) {
|
||||
@@ -391,6 +425,7 @@ import {
|
||||
static : M.static,
|
||||
});
|
||||
},
|
||||
|
||||
PropertyDefinition: function(M) {
|
||||
let key;
|
||||
if (M.computed) {
|
||||
@@ -408,6 +443,7 @@ import {
|
||||
static : M.static,
|
||||
});
|
||||
},
|
||||
|
||||
ArrayExpression: function(M) {
|
||||
return new AST_Array({
|
||||
start : my_start_token(M),
|
||||
@@ -417,6 +453,7 @@ import {
|
||||
})
|
||||
});
|
||||
},
|
||||
|
||||
ObjectExpression: function(M) {
|
||||
return new AST_Object({
|
||||
start : my_start_token(M),
|
||||
@@ -430,6 +467,7 @@ import {
|
||||
})
|
||||
});
|
||||
},
|
||||
|
||||
SequenceExpression: function(M) {
|
||||
return new AST_Sequence({
|
||||
start : my_start_token(M),
|
||||
@@ -437,6 +475,7 @@ import {
|
||||
expressions: M.expressions.map(from_moz)
|
||||
});
|
||||
},
|
||||
|
||||
MemberExpression: function(M) {
|
||||
return new (M.computed ? AST_Sub : AST_Dot)({
|
||||
start : my_start_token(M),
|
||||
@@ -446,6 +485,7 @@ import {
|
||||
optional : M.optional || false
|
||||
});
|
||||
},
|
||||
|
||||
ChainExpression: function(M) {
|
||||
return new AST_Chain({
|
||||
start : my_start_token(M),
|
||||
@@ -453,6 +493,7 @@ import {
|
||||
expression : from_moz(M.expression)
|
||||
});
|
||||
},
|
||||
|
||||
SwitchCase: function(M) {
|
||||
return new (M.test ? AST_Case : AST_Default)({
|
||||
start : my_start_token(M),
|
||||
@@ -461,6 +502,7 @@ import {
|
||||
body : M.consequent.map(from_moz)
|
||||
});
|
||||
},
|
||||
|
||||
VariableDeclaration: function(M) {
|
||||
return new (M.kind === "const" ? AST_Const :
|
||||
M.kind === "let" ? AST_Let : AST_Var)({
|
||||
@@ -499,9 +541,11 @@ import {
|
||||
end : my_end_token(M),
|
||||
imported_name: imported_name,
|
||||
imported_names : imported_names,
|
||||
module_name : from_moz(M.source)
|
||||
module_name : from_moz(M.source),
|
||||
assert_clause: assert_clause_from_moz(M.assertions)
|
||||
});
|
||||
},
|
||||
|
||||
ExportAllDeclaration: function(M) {
|
||||
return new AST_Export({
|
||||
start: my_start_token(M),
|
||||
@@ -512,9 +556,11 @@ import {
|
||||
foreign_name: new AST_SymbolExportForeign({ name: "*" })
|
||||
})
|
||||
],
|
||||
module_name: from_moz(M.source)
|
||||
module_name: from_moz(M.source),
|
||||
assert_clause: assert_clause_from_moz(M.assertions)
|
||||
});
|
||||
},
|
||||
|
||||
ExportNamedDeclaration: function(M) {
|
||||
return new AST_Export({
|
||||
start: my_start_token(M),
|
||||
@@ -526,9 +572,11 @@ import {
|
||||
name: from_moz(specifier.local)
|
||||
});
|
||||
}) : null,
|
||||
module_name: from_moz(M.source)
|
||||
module_name: from_moz(M.source),
|
||||
assert_clause: assert_clause_from_moz(M.assertions)
|
||||
});
|
||||
},
|
||||
|
||||
ExportDefaultDeclaration: function(M) {
|
||||
return new AST_Export({
|
||||
start: my_start_token(M),
|
||||
@@ -537,6 +585,7 @@ import {
|
||||
is_default: true
|
||||
});
|
||||
},
|
||||
|
||||
Literal: function(M) {
|
||||
var val = M.value, args = {
|
||||
start : my_start_token(M),
|
||||
@@ -572,6 +621,7 @@ import {
|
||||
return new (val ? AST_True : AST_False)(args);
|
||||
}
|
||||
},
|
||||
|
||||
MetaProperty: function(M) {
|
||||
if (M.meta.name === "new" && M.property.name === "target") {
|
||||
return new AST_NewTarget({
|
||||
@@ -585,6 +635,7 @@ import {
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
Identifier: function(M) {
|
||||
var p = FROM_MOZ_STACK[FROM_MOZ_STACK.length - 2];
|
||||
return new ( p.type == "LabeledStatement" ? AST_Label
|
||||
@@ -607,12 +658,262 @@ import {
|
||||
name : M.name
|
||||
});
|
||||
},
|
||||
|
||||
BigIntLiteral(M) {
|
||||
return new AST_BigInt({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
value : M.value
|
||||
});
|
||||
},
|
||||
|
||||
EmptyStatement: function(M) {
|
||||
return new AST_EmptyStatement({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M)
|
||||
});
|
||||
},
|
||||
|
||||
BlockStatement: function(M) {
|
||||
return new AST_BlockStatement({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
body: M.body.map(from_moz)
|
||||
});
|
||||
},
|
||||
|
||||
IfStatement: function(M) {
|
||||
return new AST_If({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
condition: from_moz(M.test),
|
||||
body: from_moz(M.consequent),
|
||||
alternative: from_moz(M.alternate)
|
||||
});
|
||||
},
|
||||
|
||||
LabeledStatement: function(M) {
|
||||
return new AST_LabeledStatement({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
label: from_moz(M.label),
|
||||
body: from_moz(M.body)
|
||||
});
|
||||
},
|
||||
|
||||
BreakStatement: function(M) {
|
||||
return new AST_Break({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
label: from_moz(M.label)
|
||||
});
|
||||
},
|
||||
|
||||
ContinueStatement: function(M) {
|
||||
return new AST_Continue({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
label: from_moz(M.label)
|
||||
});
|
||||
},
|
||||
|
||||
WithStatement: function(M) {
|
||||
return new AST_With({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
expression: from_moz(M.object),
|
||||
body: from_moz(M.body)
|
||||
});
|
||||
},
|
||||
|
||||
SwitchStatement: function(M) {
|
||||
return new AST_Switch({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
expression: from_moz(M.discriminant),
|
||||
body: M.cases.map(from_moz)
|
||||
});
|
||||
},
|
||||
|
||||
ReturnStatement: function(M) {
|
||||
return new AST_Return({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
value: from_moz(M.argument)
|
||||
});
|
||||
},
|
||||
|
||||
ThrowStatement: function(M) {
|
||||
return new AST_Throw({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
value: from_moz(M.argument)
|
||||
});
|
||||
},
|
||||
|
||||
WhileStatement: function(M) {
|
||||
return new AST_While({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
condition: from_moz(M.test),
|
||||
body: from_moz(M.body)
|
||||
});
|
||||
},
|
||||
|
||||
DoWhileStatement: function(M) {
|
||||
return new AST_Do({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
condition: from_moz(M.test),
|
||||
body: from_moz(M.body)
|
||||
});
|
||||
},
|
||||
|
||||
ForStatement: function(M) {
|
||||
return new AST_For({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
init: from_moz(M.init),
|
||||
condition: from_moz(M.test),
|
||||
step: from_moz(M.update),
|
||||
body: from_moz(M.body)
|
||||
});
|
||||
},
|
||||
|
||||
ForInStatement: function(M) {
|
||||
return new AST_ForIn({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
init: from_moz(M.left),
|
||||
object: from_moz(M.right),
|
||||
body: from_moz(M.body)
|
||||
});
|
||||
},
|
||||
|
||||
ForOfStatement: function(M) {
|
||||
return new AST_ForOf({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
init: from_moz(M.left),
|
||||
object: from_moz(M.right),
|
||||
body: from_moz(M.body),
|
||||
await: M.await
|
||||
});
|
||||
},
|
||||
|
||||
AwaitExpression: function(M) {
|
||||
return new AST_Await({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
expression: from_moz(M.argument)
|
||||
});
|
||||
},
|
||||
|
||||
YieldExpression: function(M) {
|
||||
return new AST_Yield({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
expression: from_moz(M.argument),
|
||||
is_star: M.delegate
|
||||
});
|
||||
},
|
||||
|
||||
DebuggerStatement: function(M) {
|
||||
return new AST_Debugger({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M)
|
||||
});
|
||||
},
|
||||
|
||||
VariableDeclarator: function(M) {
|
||||
return new AST_VarDef({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
name: from_moz(M.id),
|
||||
value: from_moz(M.init)
|
||||
});
|
||||
},
|
||||
|
||||
CatchClause: function(M) {
|
||||
return new AST_Catch({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
argname: from_moz(M.param),
|
||||
body: from_moz(M.body).body
|
||||
});
|
||||
},
|
||||
|
||||
ThisExpression: function(M) {
|
||||
return new AST_This({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M)
|
||||
});
|
||||
},
|
||||
|
||||
Super: function(M) {
|
||||
return new AST_Super({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M)
|
||||
});
|
||||
},
|
||||
|
||||
BinaryExpression: function(M) {
|
||||
return new AST_Binary({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
operator: M.operator,
|
||||
left: from_moz(M.left),
|
||||
right: from_moz(M.right)
|
||||
});
|
||||
},
|
||||
|
||||
LogicalExpression: function(M) {
|
||||
return new AST_Binary({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
operator: M.operator,
|
||||
left: from_moz(M.left),
|
||||
right: from_moz(M.right)
|
||||
});
|
||||
},
|
||||
|
||||
AssignmentExpression: function(M) {
|
||||
return new AST_Assign({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
operator: M.operator,
|
||||
left: from_moz(M.left),
|
||||
right: from_moz(M.right)
|
||||
});
|
||||
},
|
||||
|
||||
ConditionalExpression: function(M) {
|
||||
return new AST_Conditional({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
condition: from_moz(M.test),
|
||||
consequent: from_moz(M.consequent),
|
||||
alternative: from_moz(M.alternate)
|
||||
});
|
||||
},
|
||||
|
||||
NewExpression: function(M) {
|
||||
return new AST_New({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
expression: from_moz(M.callee),
|
||||
args: M.arguments.map(from_moz)
|
||||
});
|
||||
},
|
||||
|
||||
CallExpression: function(M) {
|
||||
return new AST_Call({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
expression: from_moz(M.callee),
|
||||
optional: M.optional,
|
||||
args: M.arguments.map(from_moz)
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -639,35 +940,200 @@ import {
|
||||
});
|
||||
};
|
||||
|
||||
map("EmptyStatement", AST_EmptyStatement);
|
||||
map("BlockStatement", AST_BlockStatement, "body@body");
|
||||
map("IfStatement", AST_If, "test>condition, consequent>body, alternate>alternative");
|
||||
map("LabeledStatement", AST_LabeledStatement, "label>label, body>body");
|
||||
map("BreakStatement", AST_Break, "label>label");
|
||||
map("ContinueStatement", AST_Continue, "label>label");
|
||||
map("WithStatement", AST_With, "object>expression, body>body");
|
||||
map("SwitchStatement", AST_Switch, "discriminant>expression, cases@body");
|
||||
map("ReturnStatement", AST_Return, "argument>value");
|
||||
map("ThrowStatement", AST_Throw, "argument>value");
|
||||
map("WhileStatement", AST_While, "test>condition, body>body");
|
||||
map("DoWhileStatement", AST_Do, "test>condition, body>body");
|
||||
map("ForStatement", AST_For, "init>init, test>condition, update>step, body>body");
|
||||
map("ForInStatement", AST_ForIn, "left>init, right>object, body>body");
|
||||
map("ForOfStatement", AST_ForOf, "left>init, right>object, body>body, await=await");
|
||||
map("AwaitExpression", AST_Await, "argument>expression");
|
||||
map("YieldExpression", AST_Yield, "argument>expression, delegate=is_star");
|
||||
map("DebuggerStatement", AST_Debugger);
|
||||
map("VariableDeclarator", AST_VarDef, "id>name, init>value");
|
||||
map("CatchClause", AST_Catch, "param>argname, body%body");
|
||||
def_to_moz(AST_EmptyStatement, function To_Moz_EmptyStatement() {
|
||||
return {
|
||||
type: "EmptyStatement"
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_BlockStatement, function To_Moz_BlockStatement(M) {
|
||||
return {
|
||||
type: "BlockStatement",
|
||||
body: M.body.map(to_moz)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_If, function To_Moz_IfStatement(M) {
|
||||
return {
|
||||
type: "IfStatement",
|
||||
test: to_moz(M.condition),
|
||||
consequent: to_moz(M.body),
|
||||
alternate: to_moz(M.alternative)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_LabeledStatement, function To_Moz_LabeledStatement(M) {
|
||||
return {
|
||||
type: "LabeledStatement",
|
||||
label: to_moz(M.label),
|
||||
body: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Break, function To_Moz_BreakStatement(M) {
|
||||
return {
|
||||
type: "BreakStatement",
|
||||
label: to_moz(M.label)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Continue, function To_Moz_ContinueStatement(M) {
|
||||
return {
|
||||
type: "ContinueStatement",
|
||||
label: to_moz(M.label)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_With, function To_Moz_WithStatement(M) {
|
||||
return {
|
||||
type: "WithStatement",
|
||||
object: to_moz(M.expression),
|
||||
body: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Switch, function To_Moz_SwitchStatement(M) {
|
||||
return {
|
||||
type: "SwitchStatement",
|
||||
discriminant: to_moz(M.expression),
|
||||
cases: M.body.map(to_moz)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Return, function To_Moz_ReturnStatement(M) {
|
||||
return {
|
||||
type: "ReturnStatement",
|
||||
argument: to_moz(M.value)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Throw, function To_Moz_ThrowStatement(M) {
|
||||
return {
|
||||
type: "ThrowStatement",
|
||||
argument: to_moz(M.value)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_While, function To_Moz_WhileStatement(M) {
|
||||
return {
|
||||
type: "WhileStatement",
|
||||
test: to_moz(M.condition),
|
||||
body: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Do, function To_Moz_DoWhileStatement(M) {
|
||||
return {
|
||||
type: "DoWhileStatement",
|
||||
test: to_moz(M.condition),
|
||||
body: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_For, function To_Moz_ForStatement(M) {
|
||||
return {
|
||||
type: "ForStatement",
|
||||
init: to_moz(M.init),
|
||||
test: to_moz(M.condition),
|
||||
update: to_moz(M.step),
|
||||
body: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_ForIn, function To_Moz_ForInStatement(M) {
|
||||
return {
|
||||
type: "ForInStatement",
|
||||
left: to_moz(M.init),
|
||||
right: to_moz(M.object),
|
||||
body: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_ForOf, function To_Moz_ForOfStatement(M) {
|
||||
return {
|
||||
type: "ForOfStatement",
|
||||
left: to_moz(M.init),
|
||||
right: to_moz(M.object),
|
||||
body: to_moz(M.body),
|
||||
await: M.await
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Await, function To_Moz_AwaitExpression(M) {
|
||||
return {
|
||||
type: "AwaitExpression",
|
||||
argument: to_moz(M.expression)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Yield, function To_Moz_YieldExpression(M) {
|
||||
return {
|
||||
type: "YieldExpression",
|
||||
argument: to_moz(M.expression),
|
||||
delegate: M.is_star
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Debugger, function To_Moz_DebuggerStatement() {
|
||||
return {
|
||||
type: "DebuggerStatement"
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_VarDef, function To_Moz_VariableDeclarator(M) {
|
||||
return {
|
||||
type: "VariableDeclarator",
|
||||
id: to_moz(M.name),
|
||||
init: to_moz(M.value)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Catch, function To_Moz_CatchClause(M) {
|
||||
return {
|
||||
type: "CatchClause",
|
||||
param: to_moz(M.argname),
|
||||
body: to_moz_block(M)
|
||||
};
|
||||
});
|
||||
|
||||
map("ThisExpression", AST_This);
|
||||
map("Super", AST_Super);
|
||||
map("BinaryExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("LogicalExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("AssignmentExpression", AST_Assign, "operator=operator, left>left, right>right");
|
||||
map("ConditionalExpression", AST_Conditional, "test>condition, consequent>consequent, alternate>alternative");
|
||||
map("NewExpression", AST_New, "callee>expression, arguments@args");
|
||||
map("CallExpression", AST_Call, "callee>expression, optional=optional, arguments@args");
|
||||
def_to_moz(AST_This, function To_Moz_ThisExpression() {
|
||||
return {
|
||||
type: "ThisExpression"
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Super, function To_Moz_Super() {
|
||||
return {
|
||||
type: "Super"
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Binary, function To_Moz_BinaryExpression(M) {
|
||||
return {
|
||||
type: "BinaryExpression",
|
||||
operator: M.operator,
|
||||
left: to_moz(M.left),
|
||||
right: to_moz(M.right)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Binary, function To_Moz_LogicalExpression(M) {
|
||||
return {
|
||||
type: "LogicalExpression",
|
||||
operator: M.operator,
|
||||
left: to_moz(M.left),
|
||||
right: to_moz(M.right)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Assign, function To_Moz_AssignmentExpression(M) {
|
||||
return {
|
||||
type: "AssignmentExpression",
|
||||
operator: M.operator,
|
||||
left: to_moz(M.left),
|
||||
right: to_moz(M.right)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Conditional, function To_Moz_ConditionalExpression(M) {
|
||||
return {
|
||||
type: "ConditionalExpression",
|
||||
test: to_moz(M.condition),
|
||||
consequent: to_moz(M.consequent),
|
||||
alternate: to_moz(M.alternative)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_New, function To_Moz_NewExpression(M) {
|
||||
return {
|
||||
type: "NewExpression",
|
||||
callee: to_moz(M.expression),
|
||||
arguments: M.args.map(to_moz)
|
||||
};
|
||||
});
|
||||
def_to_moz(AST_Call, function To_Moz_CallExpression(M) {
|
||||
return {
|
||||
type: "CallExpression",
|
||||
callee: to_moz(M.expression),
|
||||
optional: M.optional,
|
||||
arguments: M.args.map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Toplevel, function To_Moz_Program(M) {
|
||||
return to_moz_scope("Program", M);
|
||||
@@ -818,12 +1284,30 @@ import {
|
||||
};
|
||||
});
|
||||
|
||||
const assert_clause_to_moz = assert_clause => {
|
||||
const assertions = [];
|
||||
if (assert_clause) {
|
||||
for (const { key, value } of assert_clause.properties) {
|
||||
const key_moz = is_basic_identifier_string(key)
|
||||
? { type: "Identifier", name: key }
|
||||
: { type: "Literal", value: key, raw: JSON.stringify(key) };
|
||||
assertions.push({
|
||||
type: "ImportAttribute",
|
||||
key: key_moz,
|
||||
value: to_moz(value)
|
||||
});
|
||||
}
|
||||
}
|
||||
return assertions;
|
||||
};
|
||||
|
||||
def_to_moz(AST_Export, function To_Moz_ExportDeclaration(M) {
|
||||
if (M.exported_names) {
|
||||
if (M.exported_names[0].name.name === "*") {
|
||||
return {
|
||||
type: "ExportAllDeclaration",
|
||||
source: to_moz(M.module_name)
|
||||
source: to_moz(M.module_name),
|
||||
assertions: assert_clause_to_moz(M.assert_clause)
|
||||
};
|
||||
}
|
||||
return {
|
||||
@@ -836,7 +1320,8 @@ import {
|
||||
};
|
||||
}),
|
||||
declaration: to_moz(M.exported_definition),
|
||||
source: to_moz(M.module_name)
|
||||
source: to_moz(M.module_name),
|
||||
assertions: assert_clause_to_moz(M.assert_clause)
|
||||
};
|
||||
}
|
||||
return {
|
||||
@@ -870,7 +1355,8 @@ import {
|
||||
return {
|
||||
type: "ImportDeclaration",
|
||||
specifiers: specifiers,
|
||||
source: to_moz(M.module_name)
|
||||
source: to_moz(M.module_name),
|
||||
assertions: assert_clause_to_moz(M.assert_clause)
|
||||
};
|
||||
});
|
||||
|
||||
@@ -1198,57 +1684,6 @@ import {
|
||||
);
|
||||
}
|
||||
|
||||
function map(moztype, mytype, propmap) {
|
||||
var moz_to_me = "function From_Moz_" + moztype + "(M){\n";
|
||||
moz_to_me += "return new U2." + mytype.name + "({\n" +
|
||||
"start: my_start_token(M),\n" +
|
||||
"end: my_end_token(M)";
|
||||
|
||||
var me_to_moz = "function To_Moz_" + moztype + "(M){\n";
|
||||
me_to_moz += "return {\n" +
|
||||
"type: " + JSON.stringify(moztype);
|
||||
|
||||
if (propmap) propmap.split(/\s*,\s*/).forEach(function(prop) {
|
||||
var m = /([a-z0-9$_]+)([=@>%])([a-z0-9$_]+)/i.exec(prop);
|
||||
if (!m) throw new Error("Can't understand property map: " + prop);
|
||||
var moz = m[1], how = m[2], my = m[3];
|
||||
moz_to_me += ",\n" + my + ": ";
|
||||
me_to_moz += ",\n" + moz + ": ";
|
||||
switch (how) {
|
||||
case "@":
|
||||
moz_to_me += "M." + moz + ".map(from_moz)";
|
||||
me_to_moz += "M." + my + ".map(to_moz)";
|
||||
break;
|
||||
case ">":
|
||||
moz_to_me += "from_moz(M." + moz + ")";
|
||||
me_to_moz += "to_moz(M." + my + ")";
|
||||
break;
|
||||
case "=":
|
||||
moz_to_me += "M." + moz;
|
||||
me_to_moz += "M." + my;
|
||||
break;
|
||||
case "%":
|
||||
moz_to_me += "from_moz(M." + moz + ").body";
|
||||
me_to_moz += "to_moz_block(M)";
|
||||
break;
|
||||
default:
|
||||
throw new Error("Can't understand operator in propmap: " + prop);
|
||||
}
|
||||
});
|
||||
|
||||
moz_to_me += "\n})\n}";
|
||||
me_to_moz += "\n}\n}";
|
||||
|
||||
moz_to_me = new Function("U2", "my_start_token", "my_end_token", "from_moz", "return(" + moz_to_me + ")")(
|
||||
ast, my_start_token, my_end_token, from_moz
|
||||
);
|
||||
me_to_moz = new Function("to_moz", "to_moz_block", "to_moz_scope", "return(" + me_to_moz + ")")(
|
||||
to_moz, to_moz_block, to_moz_scope
|
||||
);
|
||||
MOZ_TO_ME[moztype] = moz_to_me;
|
||||
def_to_moz(mytype, me_to_moz);
|
||||
}
|
||||
|
||||
var FROM_MOZ_STACK = null;
|
||||
|
||||
function from_moz(node) {
|
||||
|
145
node_modules/terser/lib/output.js
generated
vendored
145
node_modules/terser/lib/output.js
generated
vendored
@@ -159,7 +159,7 @@ import {
|
||||
is_basic_identifier_string,
|
||||
is_identifier_string,
|
||||
PRECEDENCE,
|
||||
RESERVED_WORDS,
|
||||
ALL_RESERVED_WORDS,
|
||||
} from "./parse.js";
|
||||
|
||||
const EXPECT_DIRECTIVE = /^$|[;{][\s\n]*$/;
|
||||
@@ -172,10 +172,52 @@ function is_some_comments(comment) {
|
||||
// multiline comment
|
||||
return (
|
||||
(comment.type === "comment2" || comment.type === "comment1")
|
||||
&& /@preserve|@lic|@cc_on|^\**!/i.test(comment.value)
|
||||
&& /@preserve|@copyright|@lic|@cc_on|^\**!/i.test(comment.value)
|
||||
);
|
||||
}
|
||||
|
||||
class Rope {
|
||||
constructor() {
|
||||
this.committed = "";
|
||||
this.current = "";
|
||||
}
|
||||
|
||||
append(str) {
|
||||
this.current += str;
|
||||
}
|
||||
|
||||
insertAt(char, index) {
|
||||
const { committed, current } = this;
|
||||
if (index < committed.length) {
|
||||
this.committed = committed.slice(0, index) + char + committed.slice(index);
|
||||
} else if (index === committed.length) {
|
||||
this.committed += char;
|
||||
} else {
|
||||
index -= committed.length;
|
||||
this.committed += current.slice(0, index) + char;
|
||||
this.current = current.slice(index);
|
||||
}
|
||||
}
|
||||
|
||||
charAt(index) {
|
||||
const { committed } = this;
|
||||
if (index < committed.length) return committed[index];
|
||||
return this.current[index - committed.length];
|
||||
}
|
||||
|
||||
curLength() {
|
||||
return this.current.length;
|
||||
}
|
||||
|
||||
length() {
|
||||
return this.committed.length + this.current.length;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.committed + this.current;
|
||||
}
|
||||
}
|
||||
|
||||
function OutputStream(options) {
|
||||
|
||||
var readonly = !options;
|
||||
@@ -205,6 +247,8 @@ function OutputStream(options) {
|
||||
width : 80,
|
||||
wrap_iife : false,
|
||||
wrap_func_args : true,
|
||||
|
||||
_destroy_ast : false
|
||||
}, true);
|
||||
|
||||
if (options.shorthand === undefined)
|
||||
@@ -240,11 +284,11 @@ function OutputStream(options) {
|
||||
var current_col = 0;
|
||||
var current_line = 1;
|
||||
var current_pos = 0;
|
||||
var OUTPUT = "";
|
||||
var OUTPUT = new Rope();
|
||||
let printed_comments = new Set();
|
||||
|
||||
var to_utf8 = options.ascii_only ? function(str, identifier) {
|
||||
if (options.ecma >= 2015 && !options.safari10) {
|
||||
var to_utf8 = options.ascii_only ? function(str, identifier = false, regexp = false) {
|
||||
if (options.ecma >= 2015 && !options.safari10 && !regexp) {
|
||||
str = str.replace(/[\ud800-\udbff][\udc00-\udfff]/g, function(ch) {
|
||||
var code = get_full_char_code(ch, 0).toString(16);
|
||||
return "\\u{" + code + "}";
|
||||
@@ -349,11 +393,17 @@ function OutputStream(options) {
|
||||
var do_add_mapping = mappings ? function() {
|
||||
mappings.forEach(function(mapping) {
|
||||
try {
|
||||
let { name, token } = mapping;
|
||||
if (token.type == "name" || token.type === "privatename") {
|
||||
name = token.value;
|
||||
} else if (name instanceof AST_Symbol) {
|
||||
name = token.type === "string" ? token.value : name.name;
|
||||
}
|
||||
options.source_map.add(
|
||||
mapping.token.file,
|
||||
mapping.line, mapping.col,
|
||||
mapping.token.line, mapping.token.col,
|
||||
!mapping.name && mapping.token.type == "name" ? mapping.token.value : mapping.name
|
||||
is_basic_identifier_string(name) ? name : undefined
|
||||
);
|
||||
} catch(ex) {
|
||||
// Ignore bad mapping
|
||||
@@ -365,19 +415,18 @@ function OutputStream(options) {
|
||||
var ensure_line_len = options.max_line_len ? function() {
|
||||
if (current_col > options.max_line_len) {
|
||||
if (might_add_newline) {
|
||||
var left = OUTPUT.slice(0, might_add_newline);
|
||||
var right = OUTPUT.slice(might_add_newline);
|
||||
OUTPUT.insertAt("\n", might_add_newline);
|
||||
const curLength = OUTPUT.curLength();
|
||||
if (mappings) {
|
||||
var delta = right.length - current_col;
|
||||
var delta = curLength - current_col;
|
||||
mappings.forEach(function(mapping) {
|
||||
mapping.line++;
|
||||
mapping.col += delta;
|
||||
});
|
||||
}
|
||||
OUTPUT = left + "\n" + right;
|
||||
current_line++;
|
||||
current_pos++;
|
||||
current_col = right.length;
|
||||
current_col = curLength;
|
||||
}
|
||||
}
|
||||
if (might_add_newline) {
|
||||
@@ -411,13 +460,13 @@ function OutputStream(options) {
|
||||
|
||||
if (prev === ":" && ch === "}" || (!ch || !";}".includes(ch)) && prev !== ";") {
|
||||
if (options.semicolons || requireSemicolonChars.has(ch)) {
|
||||
OUTPUT += ";";
|
||||
OUTPUT.append(";");
|
||||
current_col++;
|
||||
current_pos++;
|
||||
} else {
|
||||
ensure_line_len();
|
||||
if (current_col > 0) {
|
||||
OUTPUT += "\n";
|
||||
OUTPUT.append("\n");
|
||||
current_pos++;
|
||||
current_line++;
|
||||
current_col = 0;
|
||||
@@ -441,7 +490,7 @@ function OutputStream(options) {
|
||||
|| (ch == "/" && ch == prev)
|
||||
|| ((ch == "+" || ch == "-") && ch == last)
|
||||
) {
|
||||
OUTPUT += " ";
|
||||
OUTPUT.append(" ");
|
||||
current_col++;
|
||||
current_pos++;
|
||||
}
|
||||
@@ -459,7 +508,7 @@ function OutputStream(options) {
|
||||
if (!might_add_newline) do_add_mapping();
|
||||
}
|
||||
|
||||
OUTPUT += str;
|
||||
OUTPUT.append(str);
|
||||
has_parens = str[str.length - 1] == "(";
|
||||
current_pos += str.length;
|
||||
var a = str.split(/\r?\n/), n = a.length - 1;
|
||||
@@ -499,15 +548,15 @@ function OutputStream(options) {
|
||||
|
||||
var newline = options.beautify ? function() {
|
||||
if (newline_insert < 0) return print("\n");
|
||||
if (OUTPUT[newline_insert] != "\n") {
|
||||
OUTPUT = OUTPUT.slice(0, newline_insert) + "\n" + OUTPUT.slice(newline_insert);
|
||||
if (OUTPUT.charAt(newline_insert) != "\n") {
|
||||
OUTPUT.insertAt("\n", newline_insert);
|
||||
current_pos++;
|
||||
current_line++;
|
||||
}
|
||||
newline_insert++;
|
||||
} : options.max_line_len ? function() {
|
||||
ensure_line_len();
|
||||
might_add_newline = OUTPUT.length;
|
||||
might_add_newline = OUTPUT.length();
|
||||
} : noop;
|
||||
|
||||
var semicolon = options.beautify ? function() {
|
||||
@@ -573,13 +622,14 @@ function OutputStream(options) {
|
||||
if (might_add_newline) {
|
||||
ensure_line_len();
|
||||
}
|
||||
return OUTPUT;
|
||||
return OUTPUT.toString();
|
||||
}
|
||||
|
||||
function has_nlb() {
|
||||
let n = OUTPUT.length - 1;
|
||||
const output = OUTPUT.toString();
|
||||
let n = output.length - 1;
|
||||
while (n >= 0) {
|
||||
const code = OUTPUT.charCodeAt(n);
|
||||
const code = output.charCodeAt(n);
|
||||
if (code === CODE_LINE_BREAK) {
|
||||
return true;
|
||||
}
|
||||
@@ -716,7 +766,7 @@ function OutputStream(options) {
|
||||
!/comment[134]/.test(c.type)
|
||||
))) return;
|
||||
printed_comments.add(comments);
|
||||
var insert = OUTPUT.length;
|
||||
var insert = OUTPUT.length();
|
||||
comments.filter(comment_filter, node).forEach(function(c, i) {
|
||||
if (printed_comments.has(c)) return;
|
||||
printed_comments.add(c);
|
||||
@@ -745,9 +795,21 @@ function OutputStream(options) {
|
||||
need_space = true;
|
||||
}
|
||||
});
|
||||
if (OUTPUT.length > insert) newline_insert = insert;
|
||||
if (OUTPUT.length() > insert) newline_insert = insert;
|
||||
}
|
||||
|
||||
/**
|
||||
* When output.option("_destroy_ast") is enabled, destroy the function.
|
||||
* Call this after printing it.
|
||||
*/
|
||||
const gc_scope =
|
||||
options["_destroy_ast"]
|
||||
? function gc_scope(scope) {
|
||||
scope.body.length = 0;
|
||||
scope.argnames.length = 0;
|
||||
}
|
||||
: noop;
|
||||
|
||||
var stack = [];
|
||||
return {
|
||||
get : get,
|
||||
@@ -775,7 +837,7 @@ function OutputStream(options) {
|
||||
var encoded = encode_string(str, quote);
|
||||
if (escape_directive === true && !encoded.includes("\\")) {
|
||||
// Insert semicolons to break directive prologue
|
||||
if (!EXPECT_DIRECTIVE.test(OUTPUT)) {
|
||||
if (!EXPECT_DIRECTIVE.test(OUTPUT.toString())) {
|
||||
force_semicolon();
|
||||
}
|
||||
force_semicolon();
|
||||
@@ -794,6 +856,7 @@ function OutputStream(options) {
|
||||
with_square : with_square,
|
||||
add_mapping : add_mapping,
|
||||
option : function(opt) { return options[opt]; },
|
||||
gc_scope,
|
||||
printed_comments: printed_comments,
|
||||
prepend_comments: readonly ? noop : prepend_comments,
|
||||
append_comments : readonly || comment_filter === return_false ? noop : append_comments,
|
||||
@@ -1050,7 +1113,8 @@ function OutputStream(options) {
|
||||
var p = output.parent();
|
||||
if (this.args.length === 0
|
||||
&& (p instanceof AST_PropAccess // (new Date).getTime(), (new Date)["getTime"]()
|
||||
|| p instanceof AST_Call && p.expression === this)) // (new foo)(bar)
|
||||
|| p instanceof AST_Call && p.expression === this
|
||||
|| p instanceof AST_PrefixedTemplateString && p.prefix === this)) // (new foo)(bar)
|
||||
return true;
|
||||
});
|
||||
|
||||
@@ -1183,12 +1247,14 @@ function OutputStream(options) {
|
||||
output.with_indent(output.next_indent(), function() {
|
||||
output.append_comments(self, true);
|
||||
});
|
||||
output.add_mapping(self.end);
|
||||
output.print("}");
|
||||
}
|
||||
function print_braced(self, output, allow_directives) {
|
||||
if (self.body.length > 0) {
|
||||
output.with_block(function() {
|
||||
display_body(self.body, false, output, allow_directives);
|
||||
output.add_mapping(self.end);
|
||||
});
|
||||
} else print_braced_empty(self, output);
|
||||
}
|
||||
@@ -1309,6 +1375,7 @@ function OutputStream(options) {
|
||||
});
|
||||
DEFPRINT(AST_Lambda, function(self, output) {
|
||||
self._do_print(output);
|
||||
output.gc_scope(self);
|
||||
});
|
||||
|
||||
DEFPRINT(AST_PrefixedTemplateString, function(self, output) {
|
||||
@@ -1388,6 +1455,7 @@ function OutputStream(options) {
|
||||
print_braced(self, output);
|
||||
}
|
||||
if (needs_parens) { output.print(")"); }
|
||||
output.gc_scope(self);
|
||||
});
|
||||
|
||||
/* -----[ exits ]----- */
|
||||
@@ -1631,6 +1699,10 @@ function OutputStream(options) {
|
||||
output.space();
|
||||
}
|
||||
self.module_name.print(output);
|
||||
if (self.assert_clause) {
|
||||
output.print("assert");
|
||||
self.assert_clause.print(output);
|
||||
}
|
||||
output.semicolon();
|
||||
});
|
||||
DEFPRINT(AST_ImportMeta, function(self, output) {
|
||||
@@ -1696,6 +1768,10 @@ function OutputStream(options) {
|
||||
output.space();
|
||||
self.module_name.print(output);
|
||||
}
|
||||
if (self.assert_clause) {
|
||||
output.print("assert");
|
||||
self.assert_clause.print(output);
|
||||
}
|
||||
if (self.exported_value
|
||||
&& !(self.exported_value instanceof AST_Defun ||
|
||||
self.exported_value instanceof AST_Function ||
|
||||
@@ -1713,7 +1789,11 @@ function OutputStream(options) {
|
||||
// https://github.com/mishoo/UglifyJS2/issues/60
|
||||
if (noin) {
|
||||
parens = walk(node, node => {
|
||||
if (node instanceof AST_Scope) return true;
|
||||
// Don't go into scopes -- except arrow functions:
|
||||
// https://github.com/terser/terser/issues/1019#issuecomment-877642607
|
||||
if (node instanceof AST_Scope && !(node instanceof AST_Arrow)) {
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_Binary && node.operator == "in") {
|
||||
return walk_abort; // makes walk() return true
|
||||
}
|
||||
@@ -1783,7 +1863,7 @@ function OutputStream(options) {
|
||||
var expr = self.expression;
|
||||
expr.print(output);
|
||||
var prop = self.property;
|
||||
var print_computed = RESERVED_WORDS.has(prop)
|
||||
var print_computed = ALL_RESERVED_WORDS.has(prop)
|
||||
? output.option("ie8")
|
||||
: !is_identifier_string(
|
||||
prop,
|
||||
@@ -1816,6 +1896,7 @@ function OutputStream(options) {
|
||||
|
||||
if (self.optional) output.print("?");
|
||||
output.print(".#");
|
||||
output.add_mapping(self.end);
|
||||
output.print_name(prop);
|
||||
});
|
||||
DEFPRINT(AST_Sub, function(self, output) {
|
||||
@@ -1964,7 +2045,7 @@ function OutputStream(options) {
|
||||
}
|
||||
return output.print(make_num(key));
|
||||
}
|
||||
var print_string = RESERVED_WORDS.has(key)
|
||||
var print_string = ALL_RESERVED_WORDS.has(key)
|
||||
? output.option("ie8")
|
||||
: (
|
||||
output.option("ecma") < 2015 || output.option("safari10")
|
||||
@@ -1991,7 +2072,7 @@ function OutputStream(options) {
|
||||
output.option("ecma") >= 2015 || output.option("safari10")
|
||||
) &&
|
||||
get_name(self.value) === self.key &&
|
||||
!RESERVED_WORDS.has(self.key)
|
||||
!ALL_RESERVED_WORDS.has(self.key)
|
||||
) {
|
||||
print_property_name(self.key, self.quote, output);
|
||||
|
||||
@@ -2152,7 +2233,7 @@ function OutputStream(options) {
|
||||
flags = flags ? sort_regexp_flags(flags) : "";
|
||||
source = source.replace(r_slash_script, slash_script_replace);
|
||||
|
||||
output.print(output.to_utf8(`/${source}/${flags}`));
|
||||
output.print(output.to_utf8(`/${source}/${flags}`, false, true));
|
||||
|
||||
const parent = output.parent();
|
||||
if (
|
||||
@@ -2269,8 +2350,10 @@ function OutputStream(options) {
|
||||
DEFMAP([
|
||||
AST_ObjectGetter,
|
||||
AST_ObjectSetter,
|
||||
AST_PrivateGetter,
|
||||
AST_PrivateSetter,
|
||||
], function(output) {
|
||||
output.add_mapping(this.start, this.key.name);
|
||||
output.add_mapping(this.key.end, this.key.name);
|
||||
});
|
||||
|
||||
DEFMAP([ AST_ObjectProperty ], function(output) {
|
||||
|
169
node_modules/terser/lib/parse.js
generated
vendored
169
node_modules/terser/lib/parse.js
generated
vendored
@@ -162,17 +162,19 @@ import {
|
||||
} from "./ast.js";
|
||||
|
||||
var LATEST_RAW = ""; // Only used for numbers and template strings
|
||||
var LATEST_TEMPLATE_END = true;
|
||||
var TEMPLATE_RAWS = new Map(); // Raw template strings
|
||||
|
||||
var KEYWORDS = "break case catch class const continue debugger default delete do else export extends finally for function if in instanceof let new return switch throw try typeof var void while with";
|
||||
var KEYWORDS_ATOM = "false null true";
|
||||
var RESERVED_WORDS = "enum implements import interface package private protected public static super this " + KEYWORDS_ATOM + " " + KEYWORDS;
|
||||
var RESERVED_WORDS = "enum import super this " + KEYWORDS_ATOM + " " + KEYWORDS;
|
||||
var ALL_RESERVED_WORDS = "implements interface package private protected public static " + RESERVED_WORDS;
|
||||
var KEYWORDS_BEFORE_EXPRESSION = "return new delete throw else case yield await";
|
||||
|
||||
KEYWORDS = makePredicate(KEYWORDS);
|
||||
RESERVED_WORDS = makePredicate(RESERVED_WORDS);
|
||||
KEYWORDS_BEFORE_EXPRESSION = makePredicate(KEYWORDS_BEFORE_EXPRESSION);
|
||||
KEYWORDS_ATOM = makePredicate(KEYWORDS_ATOM);
|
||||
ALL_RESERVED_WORDS = makePredicate(ALL_RESERVED_WORDS);
|
||||
|
||||
var OPERATOR_CHARS = makePredicate(characters("+-*&%=<>!?|~^"));
|
||||
|
||||
@@ -691,8 +693,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
next(true, true);
|
||||
S.brace_counter++;
|
||||
tok = token(begin ? "template_head" : "template_substitution", content);
|
||||
LATEST_RAW = raw;
|
||||
LATEST_TEMPLATE_END = false;
|
||||
TEMPLATE_RAWS.set(tok, raw);
|
||||
tok.template_end = false;
|
||||
return tok;
|
||||
}
|
||||
|
||||
@@ -708,8 +710,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
||||
}
|
||||
S.template_braces.pop();
|
||||
tok = token(begin ? "template_head" : "template_substitution", content);
|
||||
LATEST_RAW = raw;
|
||||
LATEST_TEMPLATE_END = true;
|
||||
TEMPLATE_RAWS.set(tok, raw);
|
||||
tok.template_end = true;
|
||||
return tok;
|
||||
});
|
||||
|
||||
@@ -1220,7 +1222,7 @@ function parse($TEXT, options) {
|
||||
}
|
||||
if (S.token.value == "import" && !is_token(peek(), "punc", "(") && !is_token(peek(), "punc", ".")) {
|
||||
next();
|
||||
var node = import_();
|
||||
var node = import_statement();
|
||||
semicolon();
|
||||
return node;
|
||||
}
|
||||
@@ -1372,7 +1374,7 @@ function parse($TEXT, options) {
|
||||
case "export":
|
||||
if (!is_token(peek(), "punc", "(")) {
|
||||
next();
|
||||
var node = export_();
|
||||
var node = export_statement();
|
||||
if (is("punc", ";")) semicolon();
|
||||
return node;
|
||||
}
|
||||
@@ -1570,66 +1572,66 @@ function parse($TEXT, options) {
|
||||
});
|
||||
};
|
||||
|
||||
function track_used_binding_identifiers(is_parameter, strict) {
|
||||
var parameters = new Set();
|
||||
var duplicate = false;
|
||||
var default_assignment = false;
|
||||
var spread = false;
|
||||
var strict_mode = !!strict;
|
||||
var tracker = {
|
||||
add_parameter: function(token) {
|
||||
if (parameters.has(token.value)) {
|
||||
if (duplicate === false) {
|
||||
duplicate = token;
|
||||
}
|
||||
tracker.check_strict();
|
||||
} else {
|
||||
parameters.add(token.value);
|
||||
if (is_parameter) {
|
||||
switch (token.value) {
|
||||
case "arguments":
|
||||
case "eval":
|
||||
case "yield":
|
||||
if (strict_mode) {
|
||||
token_error(token, "Unexpected " + token.value + " identifier as parameter inside strict mode");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (RESERVED_WORDS.has(token.value)) {
|
||||
unexpected();
|
||||
}
|
||||
class UsedParametersTracker {
|
||||
constructor(is_parameter, strict, duplicates_ok = false) {
|
||||
this.is_parameter = is_parameter;
|
||||
this.duplicates_ok = duplicates_ok;
|
||||
this.parameters = new Set();
|
||||
this.duplicate = null;
|
||||
this.default_assignment = false;
|
||||
this.spread = false;
|
||||
this.strict_mode = !!strict;
|
||||
}
|
||||
add_parameter(token) {
|
||||
if (this.parameters.has(token.value)) {
|
||||
if (this.duplicate === null) {
|
||||
this.duplicate = token;
|
||||
}
|
||||
this.check_strict();
|
||||
} else {
|
||||
this.parameters.add(token.value);
|
||||
if (this.is_parameter) {
|
||||
switch (token.value) {
|
||||
case "arguments":
|
||||
case "eval":
|
||||
case "yield":
|
||||
if (this.strict_mode) {
|
||||
token_error(token, "Unexpected " + token.value + " identifier as parameter inside strict mode");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (RESERVED_WORDS.has(token.value)) {
|
||||
unexpected();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
mark_default_assignment: function(token) {
|
||||
if (default_assignment === false) {
|
||||
default_assignment = token;
|
||||
}
|
||||
},
|
||||
mark_spread: function(token) {
|
||||
if (spread === false) {
|
||||
spread = token;
|
||||
}
|
||||
},
|
||||
mark_strict_mode: function() {
|
||||
strict_mode = true;
|
||||
},
|
||||
is_strict: function() {
|
||||
return default_assignment !== false || spread !== false || strict_mode;
|
||||
},
|
||||
check_strict: function() {
|
||||
if (tracker.is_strict() && duplicate !== false) {
|
||||
token_error(duplicate, "Parameter " + duplicate.value + " was used already");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return tracker;
|
||||
}
|
||||
mark_default_assignment(token) {
|
||||
if (this.default_assignment === false) {
|
||||
this.default_assignment = token;
|
||||
}
|
||||
}
|
||||
mark_spread(token) {
|
||||
if (this.spread === false) {
|
||||
this.spread = token;
|
||||
}
|
||||
}
|
||||
mark_strict_mode() {
|
||||
this.strict_mode = true;
|
||||
}
|
||||
is_strict() {
|
||||
return this.default_assignment !== false || this.spread !== false || this.strict_mode;
|
||||
}
|
||||
check_strict() {
|
||||
if (this.is_strict() && this.duplicate !== null && !this.duplicates_ok) {
|
||||
token_error(this.duplicate, "Parameter " + this.duplicate.value + " was used already");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parameters(params) {
|
||||
var used_parameters = track_used_binding_identifiers(true, S.input.has_directive("use strict"));
|
||||
var used_parameters = new UsedParametersTracker(true, S.input.has_directive("use strict"));
|
||||
|
||||
expect("(");
|
||||
|
||||
@@ -1653,7 +1655,7 @@ function parse($TEXT, options) {
|
||||
var param;
|
||||
var expand = false;
|
||||
if (used_parameters === undefined) {
|
||||
used_parameters = track_used_binding_identifiers(true, S.input.has_directive("use strict"));
|
||||
used_parameters = new UsedParametersTracker(true, S.input.has_directive("use strict"));
|
||||
}
|
||||
if (is("expand", "...")) {
|
||||
expand = S.token;
|
||||
@@ -1696,7 +1698,9 @@ function parse($TEXT, options) {
|
||||
var expand_token;
|
||||
var first_token = S.token;
|
||||
if (used_parameters === undefined) {
|
||||
used_parameters = track_used_binding_identifiers(false, S.input.has_directive("use strict"));
|
||||
const strict = S.input.has_directive("use strict");
|
||||
const duplicates_ok = symbol_type === AST_SymbolVar;
|
||||
used_parameters = new UsedParametersTracker(false, strict, duplicates_ok);
|
||||
}
|
||||
symbol_type = symbol_type === undefined ? AST_SymbolFunarg : symbol_type;
|
||||
if (is("punc", "[")) {
|
||||
@@ -2089,7 +2093,7 @@ function parse($TEXT, options) {
|
||||
if (is("punc", "{") || is("punc", "[")) {
|
||||
def = new AST_VarDef({
|
||||
start: S.token,
|
||||
name: binding_element(undefined ,sym_type),
|
||||
name: binding_element(undefined, sym_type),
|
||||
value: is("operator", "=") ? (expect_token("operator", "="), expression(false, no_in)) : null,
|
||||
end: prev()
|
||||
});
|
||||
@@ -2363,19 +2367,19 @@ function parse($TEXT, options) {
|
||||
|
||||
segments.push(new AST_TemplateSegment({
|
||||
start: S.token,
|
||||
raw: LATEST_RAW,
|
||||
raw: TEMPLATE_RAWS.get(S.token),
|
||||
value: S.token.value,
|
||||
end: S.token
|
||||
}));
|
||||
|
||||
while (!LATEST_TEMPLATE_END) {
|
||||
while (!S.token.template_end) {
|
||||
next();
|
||||
handle_regexp();
|
||||
segments.push(expression(true));
|
||||
|
||||
segments.push(new AST_TemplateSegment({
|
||||
start: S.token,
|
||||
raw: LATEST_RAW,
|
||||
raw: TEMPLATE_RAWS.get(S.token),
|
||||
value: S.token.value,
|
||||
end: S.token
|
||||
}));
|
||||
@@ -2549,7 +2553,7 @@ function parse($TEXT, options) {
|
||||
};
|
||||
|
||||
const is_not_method_start = () =>
|
||||
!is("punc", "(") && !is("punc", ",") && !is("punc", "}") && !is("operator", "=");
|
||||
!is("punc", "(") && !is("punc", ",") && !is("punc", "}") && !is("punc", ";") && !is("operator", "=");
|
||||
|
||||
var is_async = false;
|
||||
var is_static = false;
|
||||
@@ -2664,7 +2668,15 @@ function parse($TEXT, options) {
|
||||
}
|
||||
}
|
||||
|
||||
function import_() {
|
||||
function maybe_import_assertion() {
|
||||
if (is("name", "assert") && !has_newline_before(S.token)) {
|
||||
next();
|
||||
return object_or_destructuring_();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function import_statement() {
|
||||
var start = prev();
|
||||
|
||||
var imported_name;
|
||||
@@ -2687,16 +2699,20 @@ function parse($TEXT, options) {
|
||||
unexpected();
|
||||
}
|
||||
next();
|
||||
|
||||
const assert_clause = maybe_import_assertion();
|
||||
|
||||
return new AST_Import({
|
||||
start: start,
|
||||
imported_name: imported_name,
|
||||
imported_names: imported_names,
|
||||
start,
|
||||
imported_name,
|
||||
imported_names,
|
||||
module_name: new AST_String({
|
||||
start: mod_str,
|
||||
value: mod_str.value,
|
||||
quote: mod_str.quote,
|
||||
end: mod_str,
|
||||
}),
|
||||
assert_clause,
|
||||
end: S.token,
|
||||
});
|
||||
}
|
||||
@@ -2804,7 +2820,7 @@ function parse($TEXT, options) {
|
||||
return names;
|
||||
}
|
||||
|
||||
function export_() {
|
||||
function export_statement() {
|
||||
var start = S.token;
|
||||
var is_default;
|
||||
var exported_names;
|
||||
@@ -2822,6 +2838,8 @@ function parse($TEXT, options) {
|
||||
}
|
||||
next();
|
||||
|
||||
const assert_clause = maybe_import_assertion();
|
||||
|
||||
return new AST_Export({
|
||||
start: start,
|
||||
is_default: is_default,
|
||||
@@ -2833,6 +2851,7 @@ function parse($TEXT, options) {
|
||||
end: mod_str,
|
||||
}),
|
||||
end: prev(),
|
||||
assert_clause
|
||||
});
|
||||
} else {
|
||||
return new AST_Export({
|
||||
@@ -2878,6 +2897,7 @@ function parse($TEXT, options) {
|
||||
exported_value: exported_value,
|
||||
exported_definition: exported_definition,
|
||||
end: prev(),
|
||||
assert_clause: null
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3322,6 +3342,7 @@ function parse($TEXT, options) {
|
||||
} else {
|
||||
toplevel = new AST_Toplevel({ start: start, body: body, end: end });
|
||||
}
|
||||
TEMPLATE_RAWS = new Map();
|
||||
return toplevel;
|
||||
})();
|
||||
|
||||
@@ -3339,6 +3360,6 @@ export {
|
||||
JS_Parse_Error,
|
||||
parse,
|
||||
PRECEDENCE,
|
||||
RESERVED_WORDS,
|
||||
ALL_RESERVED_WORDS,
|
||||
tokenizer,
|
||||
};
|
||||
|
93
node_modules/terser/lib/propmangle.js
generated
vendored
93
node_modules/terser/lib/propmangle.js
generated
vendored
@@ -59,6 +59,8 @@ import {
|
||||
AST_ObjectKeyVal,
|
||||
AST_ObjectProperty,
|
||||
AST_PrivateMethod,
|
||||
AST_PrivateGetter,
|
||||
AST_PrivateSetter,
|
||||
AST_Sequence,
|
||||
AST_String,
|
||||
AST_Sub,
|
||||
@@ -76,7 +78,7 @@ function find_builtins(reserved) {
|
||||
var global_ref = typeof global === "object" ? global : self;
|
||||
|
||||
new_globals.forEach(function (new_global) {
|
||||
objects[new_global] = global_ref[new_global] || new Function();
|
||||
objects[new_global] = global_ref[new_global] || function() {};
|
||||
});
|
||||
|
||||
[
|
||||
@@ -140,33 +142,61 @@ function addStrings(node, add) {
|
||||
}));
|
||||
}
|
||||
|
||||
function mangle_private_properties(ast, options) {
|
||||
var cprivate = -1;
|
||||
var private_cache = new Map();
|
||||
var nth_identifier = options.nth_identifier || base54;
|
||||
|
||||
ast = ast.transform(new TreeTransformer(function(node) {
|
||||
if (
|
||||
node instanceof AST_ClassPrivateProperty
|
||||
|| node instanceof AST_PrivateMethod
|
||||
|| node instanceof AST_PrivateGetter
|
||||
|| node instanceof AST_PrivateSetter
|
||||
) {
|
||||
node.key.name = mangle_private(node.key.name);
|
||||
} else if (node instanceof AST_DotHash) {
|
||||
node.property = mangle_private(node.property);
|
||||
}
|
||||
}));
|
||||
return ast;
|
||||
|
||||
function mangle_private(name) {
|
||||
let mangled = private_cache.get(name);
|
||||
if (!mangled) {
|
||||
mangled = nth_identifier.get(++cprivate);
|
||||
private_cache.set(name, mangled);
|
||||
}
|
||||
|
||||
return mangled;
|
||||
}
|
||||
}
|
||||
|
||||
function mangle_properties(ast, options) {
|
||||
options = defaults(options, {
|
||||
builtins: false,
|
||||
cache: null,
|
||||
debug: false,
|
||||
keep_quoted: false,
|
||||
nth_identifier: base54,
|
||||
only_cache: false,
|
||||
regex: null,
|
||||
reserved: null,
|
||||
undeclared: false,
|
||||
}, true);
|
||||
|
||||
var nth_identifier = options.nth_identifier;
|
||||
|
||||
var reserved_option = options.reserved;
|
||||
if (!Array.isArray(reserved_option)) reserved_option = [reserved_option];
|
||||
var reserved = new Set(reserved_option);
|
||||
if (!options.builtins) find_builtins(reserved);
|
||||
|
||||
var cname = -1;
|
||||
var cprivate = -1;
|
||||
|
||||
var cache;
|
||||
var private_cache = new Map();
|
||||
if (options.cache) {
|
||||
cache = options.cache.props;
|
||||
cache.forEach(function(mangled_name) {
|
||||
reserved.add(mangled_name);
|
||||
});
|
||||
} else {
|
||||
cache = new Map();
|
||||
}
|
||||
@@ -184,27 +214,29 @@ function mangle_properties(ast, options) {
|
||||
|
||||
var names_to_mangle = new Set();
|
||||
var unmangleable = new Set();
|
||||
var private_properties = new Set();
|
||||
// Track each already-mangled name to prevent nth_identifier from generating
|
||||
// the same name.
|
||||
cache.forEach((mangled_name) => unmangleable.add(mangled_name));
|
||||
|
||||
var keep_quoted_strict = options.keep_quoted === "strict";
|
||||
var keep_quoted = !!options.keep_quoted;
|
||||
|
||||
// step 1: find candidates to mangle
|
||||
ast.walk(new TreeWalker(function(node) {
|
||||
if (
|
||||
node instanceof AST_ClassPrivateProperty
|
||||
|| node instanceof AST_PrivateMethod
|
||||
|| node instanceof AST_PrivateGetter
|
||||
|| node instanceof AST_PrivateSetter
|
||||
|| node instanceof AST_DotHash
|
||||
) {
|
||||
private_properties.add(node.key.name);
|
||||
} else if (node instanceof AST_DotHash) {
|
||||
private_properties.add(node.property);
|
||||
// handled by mangle_private_properties
|
||||
} else if (node instanceof AST_ObjectKeyVal) {
|
||||
if (typeof node.key == "string" &&
|
||||
(!keep_quoted_strict || !node.quote)) {
|
||||
if (typeof node.key == "string" && (!keep_quoted || !node.quote)) {
|
||||
add(node.key);
|
||||
}
|
||||
} else if (node instanceof AST_ObjectProperty) {
|
||||
// setter or getter, since KeyVal is handled above
|
||||
if (!keep_quoted_strict || !node.key.end.quote) {
|
||||
if (!keep_quoted || !node.quote) {
|
||||
add(node.key.name);
|
||||
}
|
||||
} else if (node instanceof AST_Dot) {
|
||||
@@ -217,11 +249,11 @@ function mangle_properties(ast, options) {
|
||||
declared = !(root.thedef && root.thedef.undeclared);
|
||||
}
|
||||
if (declared &&
|
||||
(!keep_quoted_strict || !node.quote)) {
|
||||
(!keep_quoted || !node.quote)) {
|
||||
add(node.property);
|
||||
}
|
||||
} else if (node instanceof AST_Sub) {
|
||||
if (!keep_quoted_strict) {
|
||||
if (!keep_quoted) {
|
||||
addStrings(node.property, add);
|
||||
}
|
||||
} else if (node instanceof AST_Call
|
||||
@@ -237,25 +269,25 @@ function mangle_properties(ast, options) {
|
||||
if (
|
||||
node instanceof AST_ClassPrivateProperty
|
||||
|| node instanceof AST_PrivateMethod
|
||||
|| node instanceof AST_PrivateGetter
|
||||
|| node instanceof AST_PrivateSetter
|
||||
|| node instanceof AST_DotHash
|
||||
) {
|
||||
node.key.name = mangle_private(node.key.name);
|
||||
} else if (node instanceof AST_DotHash) {
|
||||
node.property = mangle_private(node.property);
|
||||
// handled by mangle_private_properties
|
||||
} else if (node instanceof AST_ObjectKeyVal) {
|
||||
if (typeof node.key == "string" &&
|
||||
(!keep_quoted_strict || !node.quote)) {
|
||||
if (typeof node.key == "string" && (!keep_quoted || !node.quote)) {
|
||||
node.key = mangle(node.key);
|
||||
}
|
||||
} else if (node instanceof AST_ObjectProperty) {
|
||||
// setter, getter, method or class field
|
||||
if (!keep_quoted_strict || !node.key.end.quote) {
|
||||
if (!keep_quoted || !node.quote) {
|
||||
node.key.name = mangle(node.key.name);
|
||||
}
|
||||
} else if (node instanceof AST_Dot) {
|
||||
if (!keep_quoted_strict || !node.quote) {
|
||||
if (!keep_quoted || !node.quote) {
|
||||
node.property = mangle(node.property);
|
||||
}
|
||||
} else if (!options.keep_quoted && node instanceof AST_Sub) {
|
||||
} else if (!keep_quoted && node instanceof AST_Sub) {
|
||||
node.property = mangleStrings(node.property);
|
||||
} else if (node instanceof AST_Call
|
||||
&& node.expression.print_to_string() == "Object.defineProperty") {
|
||||
@@ -312,7 +344,7 @@ function mangle_properties(ast, options) {
|
||||
// either debug mode is off, or it is on and we could not use the mangled name
|
||||
if (!mangled) {
|
||||
do {
|
||||
mangled = base54(++cname);
|
||||
mangled = nth_identifier.get(++cname);
|
||||
} while (!can_mangle(mangled));
|
||||
}
|
||||
|
||||
@@ -321,16 +353,6 @@ function mangle_properties(ast, options) {
|
||||
return mangled;
|
||||
}
|
||||
|
||||
function mangle_private(name) {
|
||||
let mangled = private_cache.get(name);
|
||||
if (!mangled) {
|
||||
mangled = base54(++cprivate);
|
||||
private_cache.set(name, mangled);
|
||||
}
|
||||
|
||||
return mangled;
|
||||
}
|
||||
|
||||
function mangleStrings(node) {
|
||||
return node.transform(new TreeTransformer(function(node) {
|
||||
if (node instanceof AST_Sequence) {
|
||||
@@ -350,4 +372,5 @@ function mangle_properties(ast, options) {
|
||||
export {
|
||||
reserve_quoted_keys,
|
||||
mangle_properties,
|
||||
mangle_private_properties,
|
||||
};
|
||||
|
104
node_modules/terser/lib/scope.js
generated
vendored
104
node_modules/terser/lib/scope.js
generated
vendored
@@ -107,7 +107,7 @@ import {
|
||||
walk
|
||||
} from "./ast.js";
|
||||
import {
|
||||
RESERVED_WORDS,
|
||||
ALL_RESERVED_WORDS,
|
||||
js_error,
|
||||
} from "./parse.js";
|
||||
|
||||
@@ -116,6 +116,11 @@ const MASK_EXPORT_WANT_MANGLE = 1 << 1;
|
||||
|
||||
let function_defs = null;
|
||||
let unmangleable_names = null;
|
||||
/**
|
||||
* When defined, there is a function declaration somewhere that's inside of a block.
|
||||
* See https://tc39.es/ecma262/multipage/additional-ecmascript-features-for-web-browsers.html#sec-block-level-function-declarations-web-legacy-compatibility-semantics
|
||||
*/
|
||||
let scopes_with_block_defuns = null;
|
||||
|
||||
class SymbolDef {
|
||||
constructor(scope, orig, init) {
|
||||
@@ -299,7 +304,14 @@ AST_Scope.DEFMETHOD("figure_out_scope", function(options, { parent_scope = null,
|
||||
// scope when we encounter the AST_Defun node (which is
|
||||
// instanceof AST_Scope) but we get to the symbol a bit
|
||||
// later.
|
||||
mark_export((node.scope = defun.parent_scope.get_defun_scope()).def_function(node, defun), 1);
|
||||
const closest_scope = defun.parent_scope;
|
||||
|
||||
// In strict mode, function definitions are block-scoped
|
||||
node.scope = tw.directives["use strict"]
|
||||
? closest_scope
|
||||
: closest_scope.get_defun_scope();
|
||||
|
||||
mark_export(node.scope.def_function(node, defun), 1);
|
||||
} else if (node instanceof AST_SymbolClass) {
|
||||
mark_export(defun.def_variable(node, defun), 1);
|
||||
} else if (node instanceof AST_SymbolImport) {
|
||||
@@ -478,7 +490,6 @@ AST_Toplevel.DEFMETHOD("def_global", function(node) {
|
||||
|
||||
AST_Scope.DEFMETHOD("init_scope_vars", function(parent_scope) {
|
||||
this.variables = new Map(); // map name to AST_SymbolVar (variables defined in this scope; includes functions)
|
||||
this.functions = new Map(); // map name to AST_SymbolDefun (functions defined in this scope)
|
||||
this.uses_with = false; // will be set to true if this or some nested scope uses the `with` statement
|
||||
this.uses_eval = false; // will be set to true if this or nested scope uses the global `eval`
|
||||
this.parent_scope = parent_scope; // the parent scope
|
||||
@@ -641,7 +652,6 @@ AST_Scope.DEFMETHOD("find_variable", function(name) {
|
||||
AST_Scope.DEFMETHOD("def_function", function(symbol, init) {
|
||||
var def = this.def_variable(symbol, init);
|
||||
if (!def.init || def.init instanceof AST_Defun) def.init = init;
|
||||
this.functions.set(symbol.name, def);
|
||||
return def;
|
||||
});
|
||||
|
||||
@@ -661,10 +671,20 @@ AST_Scope.DEFMETHOD("def_variable", function(symbol, init) {
|
||||
});
|
||||
|
||||
function next_mangled(scope, options) {
|
||||
let defun_scope;
|
||||
if (
|
||||
scopes_with_block_defuns
|
||||
&& (defun_scope = scope.get_defun_scope())
|
||||
&& scopes_with_block_defuns.has(defun_scope)
|
||||
) {
|
||||
scope = defun_scope;
|
||||
}
|
||||
|
||||
var ext = scope.enclosed;
|
||||
var nth_identifier = options.nth_identifier;
|
||||
out: while (true) {
|
||||
var m = base54(++scope.cname);
|
||||
if (RESERVED_WORDS.has(m)) continue; // skip over "do"
|
||||
var m = nth_identifier.get(++scope.cname);
|
||||
if (ALL_RESERVED_WORDS.has(m)) continue; // skip over "do"
|
||||
|
||||
// https://github.com/mishoo/UglifyJS2/issues/242 -- do not
|
||||
// shadow a name reserved from mangling.
|
||||
@@ -739,6 +759,7 @@ AST_Symbol.DEFMETHOD("global", function() {
|
||||
AST_Toplevel.DEFMETHOD("_default_mangler_options", function(options) {
|
||||
options = defaults(options, {
|
||||
eval : false,
|
||||
nth_identifier : base54,
|
||||
ie8 : false,
|
||||
keep_classnames: false,
|
||||
keep_fnames : false,
|
||||
@@ -760,6 +781,7 @@ AST_Toplevel.DEFMETHOD("_default_mangler_options", function(options) {
|
||||
|
||||
AST_Toplevel.DEFMETHOD("mangle_names", function(options) {
|
||||
options = this._default_mangler_options(options);
|
||||
var nth_identifier = options.nth_identifier;
|
||||
|
||||
// We only need to mangle declaration nodes. Special logic wired
|
||||
// into the code generator will display the mangled name if it's
|
||||
@@ -773,6 +795,8 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options) {
|
||||
}
|
||||
|
||||
const mangled_names = this.mangled_names = new Set();
|
||||
unmangleable_names = new Set();
|
||||
|
||||
if (options.cache) {
|
||||
this.globals.forEach(collect);
|
||||
if (options.cache.props) {
|
||||
@@ -790,6 +814,13 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options) {
|
||||
lname = save_nesting;
|
||||
return true; // don't descend again in TreeWalker
|
||||
}
|
||||
if (
|
||||
node instanceof AST_Defun
|
||||
&& !(tw.parent() instanceof AST_Scope)
|
||||
) {
|
||||
scopes_with_block_defuns = scopes_with_block_defuns || new Set();
|
||||
scopes_with_block_defuns.add(node.parent_scope.get_defun_scope());
|
||||
}
|
||||
if (node instanceof AST_Scope) {
|
||||
node.variables.forEach(collect);
|
||||
return;
|
||||
@@ -811,8 +842,8 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options) {
|
||||
if (node instanceof AST_Label) {
|
||||
let name;
|
||||
do {
|
||||
name = base54(++lname);
|
||||
} while (RESERVED_WORDS.has(name));
|
||||
name = nth_identifier.get(++lname);
|
||||
} while (ALL_RESERVED_WORDS.has(name));
|
||||
node.mangled_name = name;
|
||||
return true;
|
||||
}
|
||||
@@ -825,7 +856,6 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options) {
|
||||
this.walk(tw);
|
||||
|
||||
if (options.keep_fnames || options.keep_classnames) {
|
||||
unmangleable_names = new Set();
|
||||
// Collect a set of short names which are unmangleable,
|
||||
// for use in avoiding collisions in next_mangled.
|
||||
to_mangle.forEach(def => {
|
||||
@@ -839,11 +869,12 @@ AST_Toplevel.DEFMETHOD("mangle_names", function(options) {
|
||||
|
||||
function_defs = null;
|
||||
unmangleable_names = null;
|
||||
scopes_with_block_defuns = null;
|
||||
|
||||
function collect(symbol) {
|
||||
const should_mangle = !options.reserved.has(symbol.name)
|
||||
&& !(symbol.export & MASK_EXPORT_DONT_MANGLE);
|
||||
if (should_mangle) {
|
||||
if (symbol.export & MASK_EXPORT_DONT_MANGLE) {
|
||||
unmangleable_names.add(symbol.name);
|
||||
} else if (!options.reserved.has(symbol.name)) {
|
||||
to_mangle.push(symbol);
|
||||
}
|
||||
}
|
||||
@@ -873,9 +904,12 @@ AST_Toplevel.DEFMETHOD("find_colliding_names", function(options) {
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("expand_names", function(options) {
|
||||
base54.reset();
|
||||
base54.sort();
|
||||
options = this._default_mangler_options(options);
|
||||
var nth_identifier = options.nth_identifier;
|
||||
if (nth_identifier.reset && nth_identifier.sort) {
|
||||
nth_identifier.reset();
|
||||
nth_identifier.sort();
|
||||
}
|
||||
var avoid = this.find_colliding_names(options);
|
||||
var cname = 0;
|
||||
this.globals.forEach(rename);
|
||||
@@ -887,8 +921,8 @@ AST_Toplevel.DEFMETHOD("expand_names", function(options) {
|
||||
function next_name() {
|
||||
var name;
|
||||
do {
|
||||
name = base54(cname++);
|
||||
} while (avoid.has(name) || RESERVED_WORDS.has(name));
|
||||
name = nth_identifier.get(cname++);
|
||||
} while (avoid.has(name) || ALL_RESERVED_WORDS.has(name));
|
||||
return name;
|
||||
}
|
||||
|
||||
@@ -914,30 +948,37 @@ AST_Sequence.DEFMETHOD("tail_node", function() {
|
||||
|
||||
AST_Toplevel.DEFMETHOD("compute_char_frequency", function(options) {
|
||||
options = this._default_mangler_options(options);
|
||||
var nth_identifier = options.nth_identifier;
|
||||
if (!nth_identifier.reset || !nth_identifier.consider || !nth_identifier.sort) {
|
||||
// If the identifier mangler is invariant, skip computing character frequency.
|
||||
return;
|
||||
}
|
||||
nth_identifier.reset();
|
||||
|
||||
try {
|
||||
AST_Node.prototype.print = function(stream, force_parens) {
|
||||
this._print(stream, force_parens);
|
||||
if (this instanceof AST_Symbol && !this.unmangleable(options)) {
|
||||
base54.consider(this.name, -1);
|
||||
nth_identifier.consider(this.name, -1);
|
||||
} else if (options.properties) {
|
||||
if (this instanceof AST_DotHash) {
|
||||
base54.consider("#" + this.property, -1);
|
||||
nth_identifier.consider("#" + this.property, -1);
|
||||
} else if (this instanceof AST_Dot) {
|
||||
base54.consider(this.property, -1);
|
||||
nth_identifier.consider(this.property, -1);
|
||||
} else if (this instanceof AST_Sub) {
|
||||
skip_string(this.property);
|
||||
}
|
||||
}
|
||||
};
|
||||
base54.consider(this.print_to_string(), 1);
|
||||
nth_identifier.consider(this.print_to_string(), 1);
|
||||
} finally {
|
||||
AST_Node.prototype.print = AST_Node.prototype._print;
|
||||
}
|
||||
base54.sort();
|
||||
nth_identifier.sort();
|
||||
|
||||
function skip_string(node) {
|
||||
if (node instanceof AST_String) {
|
||||
base54.consider(node.value, -1);
|
||||
nth_identifier.consider(node.value, -1);
|
||||
} else if (node instanceof AST_Conditional) {
|
||||
skip_string(node.consequent);
|
||||
skip_string(node.alternative);
|
||||
@@ -961,19 +1002,20 @@ const base54 = (() => {
|
||||
frequency.set(ch, 0);
|
||||
});
|
||||
}
|
||||
base54.consider = function(str, delta) {
|
||||
function consider(str, delta) {
|
||||
for (var i = str.length; --i >= 0;) {
|
||||
frequency.set(str[i], frequency.get(str[i]) + delta);
|
||||
}
|
||||
};
|
||||
}
|
||||
function compare(a, b) {
|
||||
return frequency.get(b) - frequency.get(a);
|
||||
}
|
||||
base54.sort = function() {
|
||||
function sort() {
|
||||
chars = mergeSort(leading, compare).concat(mergeSort(digits, compare));
|
||||
};
|
||||
base54.reset = reset;
|
||||
}
|
||||
// Ensure this is in a usable initial state.
|
||||
reset();
|
||||
sort();
|
||||
function base54(num) {
|
||||
var ret = "", base = 54;
|
||||
num++;
|
||||
@@ -985,7 +1027,13 @@ const base54 = (() => {
|
||||
} while (num > 0);
|
||||
return ret;
|
||||
}
|
||||
return base54;
|
||||
|
||||
return {
|
||||
get: base54,
|
||||
consider,
|
||||
reset,
|
||||
sort
|
||||
};
|
||||
})();
|
||||
|
||||
export {
|
||||
|
14
node_modules/terser/lib/size.js
generated
vendored
14
node_modules/terser/lib/size.js
generated
vendored
@@ -115,6 +115,7 @@ AST_Directive.prototype._size = function () {
|
||||
return 2 + this.value.length;
|
||||
};
|
||||
|
||||
/** Count commas/semicolons necessary to show a list of expressions/statements */
|
||||
const list_overhead = (array) => array.length && array.length - 1;
|
||||
|
||||
AST_Block.prototype._size = function () {
|
||||
@@ -167,7 +168,7 @@ AST_Arrow.prototype._size = function () {
|
||||
&& this.argnames[0] instanceof AST_Symbol
|
||||
)
|
||||
) {
|
||||
args_and_arrow += 2;
|
||||
args_and_arrow += 2; // parens around the args
|
||||
}
|
||||
|
||||
const body_overhead = this.is_braceless() ? 0 : list_overhead(this.body) + 2;
|
||||
@@ -229,19 +230,16 @@ AST_Finally.prototype._size = function () {
|
||||
return 7 + list_overhead(this.body);
|
||||
};
|
||||
|
||||
/*#__INLINE__*/
|
||||
const def_size = (size, def) => size + list_overhead(def.definitions);
|
||||
|
||||
AST_Var.prototype._size = function () {
|
||||
return def_size(4, this);
|
||||
return 4 + list_overhead(this.definitions);
|
||||
};
|
||||
|
||||
AST_Let.prototype._size = function () {
|
||||
return def_size(4, this);
|
||||
return 4 + list_overhead(this.definitions);
|
||||
};
|
||||
|
||||
AST_Const.prototype._size = function () {
|
||||
return def_size(6, this);
|
||||
return 6 + list_overhead(this.definitions);
|
||||
};
|
||||
|
||||
AST_VarDef.prototype._size = function () {
|
||||
@@ -477,7 +475,7 @@ AST_NaN.prototype._size = () => 3;
|
||||
|
||||
AST_Undefined.prototype._size = () => 6; // "void 0"
|
||||
|
||||
AST_Hole.prototype._size = () => 0; // comma is taken into account
|
||||
AST_Hole.prototype._size = () => 0; // comma is taken into account by list_overhead()
|
||||
|
||||
AST_Infinity.prototype._size = () => 8;
|
||||
|
||||
|
86
node_modules/terser/lib/sourcemap.js
generated
vendored
86
node_modules/terser/lib/sourcemap.js
generated
vendored
@@ -43,45 +43,59 @@
|
||||
|
||||
"use strict";
|
||||
|
||||
import MOZ_SourceMap from "source-map";
|
||||
import {
|
||||
defaults,
|
||||
} from "./utils/index.js";
|
||||
import {SourceMapConsumer, SourceMapGenerator} from "@jridgewell/source-map";
|
||||
import {defaults, HOP} from "./utils/index.js";
|
||||
|
||||
// a small wrapper around fitzgen's source-map library
|
||||
// a small wrapper around source-map and @jridgewell/source-map
|
||||
async function SourceMap(options) {
|
||||
options = defaults(options, {
|
||||
file : null,
|
||||
root : null,
|
||||
orig : null,
|
||||
|
||||
orig_line_diff : 0,
|
||||
dest_line_diff : 0,
|
||||
files: {},
|
||||
});
|
||||
|
||||
var orig_map;
|
||||
var generator = new MOZ_SourceMap.SourceMapGenerator({
|
||||
var generator = new SourceMapGenerator({
|
||||
file : options.file,
|
||||
sourceRoot : options.root
|
||||
});
|
||||
|
||||
let sourcesContent = {__proto__: null};
|
||||
let files = options.files;
|
||||
for (var name in files) if (HOP(files, name)) {
|
||||
sourcesContent[name] = files[name];
|
||||
}
|
||||
if (options.orig) {
|
||||
orig_map = await new MOZ_SourceMap.SourceMapConsumer(options.orig);
|
||||
orig_map.sources.forEach(function(source) {
|
||||
var sourceContent = orig_map.sourceContentFor(source, true);
|
||||
if (sourceContent) {
|
||||
generator.setSourceContent(source, sourceContent);
|
||||
}
|
||||
});
|
||||
// We support both @jridgewell/source-map (which has a sync
|
||||
// SourceMapConsumer) and source-map (which has an async
|
||||
// SourceMapConsumer).
|
||||
orig_map = await new SourceMapConsumer(options.orig);
|
||||
if (orig_map.sourcesContent) {
|
||||
orig_map.sources.forEach(function(source, i) {
|
||||
var content = orig_map.sourcesContent[i];
|
||||
if (content) {
|
||||
sourcesContent[source] = content;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function add(source, gen_line, gen_col, orig_line, orig_col, name) {
|
||||
let generatedPos = { line: gen_line, column: gen_col };
|
||||
|
||||
if (orig_map) {
|
||||
var info = orig_map.originalPositionFor({
|
||||
line: orig_line,
|
||||
column: orig_col
|
||||
});
|
||||
if (info.source === null) {
|
||||
generator.addMapping({
|
||||
generated: generatedPos,
|
||||
original: null,
|
||||
source: null,
|
||||
name: null
|
||||
});
|
||||
return;
|
||||
}
|
||||
source = info.source;
|
||||
@@ -90,22 +104,42 @@ async function SourceMap(options) {
|
||||
name = info.name || name;
|
||||
}
|
||||
generator.addMapping({
|
||||
generated : { line: gen_line + options.dest_line_diff, column: gen_col },
|
||||
original : { line: orig_line + options.orig_line_diff, column: orig_col },
|
||||
generated : generatedPos,
|
||||
original : { line: orig_line, column: orig_col },
|
||||
source : source,
|
||||
name : name
|
||||
});
|
||||
generator.setSourceContent(source, sourcesContent[source]);
|
||||
}
|
||||
|
||||
function clean(map) {
|
||||
const allNull = map.sourcesContent && map.sourcesContent.every(c => c == null);
|
||||
if (allNull) delete map.sourcesContent;
|
||||
if (map.file === undefined) delete map.file;
|
||||
if (map.sourceRoot === undefined) delete map.sourceRoot;
|
||||
return map;
|
||||
}
|
||||
|
||||
function getDecoded() {
|
||||
if (!generator.toDecodedMap) return null;
|
||||
return clean(generator.toDecodedMap());
|
||||
}
|
||||
|
||||
function getEncoded() {
|
||||
return clean(generator.toJSON());
|
||||
}
|
||||
|
||||
function destroy() {
|
||||
// @jridgewell/source-map's SourceMapConsumer does not need to be
|
||||
// manually freed.
|
||||
if (orig_map && orig_map.destroy) orig_map.destroy();
|
||||
}
|
||||
|
||||
return {
|
||||
add : add,
|
||||
get : function() { return generator; },
|
||||
toString : function() { return generator.toString(); },
|
||||
destroy : function () {
|
||||
if (orig_map && orig_map.destroy) {
|
||||
orig_map.destroy();
|
||||
}
|
||||
}
|
||||
add,
|
||||
getDecoded,
|
||||
getEncoded,
|
||||
destroy,
|
||||
};
|
||||
}
|
||||
|
||||
|
4
node_modules/terser/lib/transform.js
generated
vendored
4
node_modules/terser/lib/transform.js
generated
vendored
@@ -57,7 +57,6 @@ import {
|
||||
AST_Definitions,
|
||||
AST_Destructuring,
|
||||
AST_Do,
|
||||
AST_Dot,
|
||||
AST_Exit,
|
||||
AST_Expansion,
|
||||
AST_Export,
|
||||
@@ -74,6 +73,7 @@ import {
|
||||
AST_Object,
|
||||
AST_ObjectProperty,
|
||||
AST_PrefixedTemplateString,
|
||||
AST_PropAccess,
|
||||
AST_Sequence,
|
||||
AST_SimpleStatement,
|
||||
AST_Sub,
|
||||
@@ -228,7 +228,7 @@ def_transform(AST_Sequence, function(self, tw) {
|
||||
: [new AST_Number({ value: 0 })];
|
||||
});
|
||||
|
||||
def_transform(AST_Dot, function(self, tw) {
|
||||
def_transform(AST_PropAccess, function(self, tw) {
|
||||
self.expression = self.expression.transform(tw);
|
||||
});
|
||||
|
||||
|
16
node_modules/terser/lib/utils/index.js
generated
vendored
16
node_modules/terser/lib/utils/index.js
generated
vendored
@@ -64,10 +64,8 @@ class DefaultsError extends Error {
|
||||
function defaults(args, defs, croak) {
|
||||
if (args === true) {
|
||||
args = {};
|
||||
}
|
||||
|
||||
if (args != null && typeof args === "object") {
|
||||
args = Object.assign({}, args);
|
||||
} else if (args != null && typeof args === "object") {
|
||||
args = {...args};
|
||||
}
|
||||
|
||||
const ret = args || {};
|
||||
@@ -251,7 +249,15 @@ function regexp_source_fix(source) {
|
||||
return (escaped ? "" : "\\") + lineTerminatorEscape[match];
|
||||
});
|
||||
}
|
||||
const all_flags = "gimuy";
|
||||
|
||||
// Subset of regexps that is not going to cause regexp based DDOS
|
||||
// https://owasp.org/www-community/attacks/Regular_expression_Denial_of_Service_-_ReDoS
|
||||
const re_safe_regexp = /^[\\/|\0\s\w^$.[\]()]*$/;
|
||||
|
||||
/** Check if the regexp is safe for Terser to create without risking a RegExp DOS */
|
||||
export const regexp_is_safe = (source) => re_safe_regexp.test(source);
|
||||
|
||||
const all_flags = "dgimsuy";
|
||||
function sort_regexp_flags(flags) {
|
||||
const existing_flags = new Set(flags.split(""));
|
||||
let out = "";
|
||||
|
344
node_modules/terser/node_modules/source-map/CHANGELOG.md
generated
vendored
344
node_modules/terser/node_modules/source-map/CHANGELOG.md
generated
vendored
@@ -1,344 +0,0 @@
|
||||
# Change Log
|
||||
|
||||
## 0.7.3
|
||||
|
||||
* Fix a bug where nested uses of `SourceMapConsumer` could result in a
|
||||
`TypeError`. [#338](https://github.com/mozilla/source-map/issues/338)
|
||||
[#330](https://github.com/mozilla/source-map/issues/330)
|
||||
[#319](https://github.com/mozilla/source-map/issues/319)
|
||||
|
||||
## 0.7.2
|
||||
|
||||
* Another 3x speed up in `SourceMapConsumer`. Read about it here:
|
||||
http://fitzgeraldnick.com/2018/02/26/speed-without-wizardry.html
|
||||
|
||||
## 0.7.1
|
||||
|
||||
* Updated TypeScript typings. [#321][]
|
||||
|
||||
[#321]: https://github.com/mozilla/source-map/pull/321
|
||||
|
||||
## 0.7.0
|
||||
|
||||
* `SourceMapConsumer` now uses WebAssembly, and is **much** faster! Read about
|
||||
it here:
|
||||
https://hacks.mozilla.org/2018/01/oxidizing-source-maps-with-rust-and-webassembly/
|
||||
|
||||
* **Breaking change:** `new SourceMapConsumer` now returns a `Promise` object
|
||||
that resolves to the newly constructed `SourceMapConsumer` instance, rather
|
||||
than returning the new instance immediately.
|
||||
|
||||
* **Breaking change:** when you're done using a `SourceMapConsumer` instance,
|
||||
you must call `SourceMapConsumer.prototype.destroy` on it. After calling
|
||||
`destroy`, you must not use the instance again.
|
||||
|
||||
* **Breaking change:** `SourceMapConsumer` used to be able to handle lines,
|
||||
columns numbers and source and name indices up to `2^53 - 1` (aka
|
||||
`Number.MAX_SAFE_INTEGER`). It can now only handle them up to `2^32 - 1`.
|
||||
|
||||
* **Breaking change:** The `source-map` library now uses modern ECMAScript-isms:
|
||||
`let`, arrow functions, `async`, etc. Use Babel to compile it down to
|
||||
ECMAScript 5 if you need to support older JavaScript environments.
|
||||
|
||||
* **Breaking change:** Drop support for Node < 8. If you want to support older
|
||||
versions of node, please use v0.6 or below.
|
||||
|
||||
## 0.5.6
|
||||
|
||||
* Fix for regression when people were using numbers as names in source maps. See
|
||||
#236.
|
||||
|
||||
## 0.5.5
|
||||
|
||||
* Fix "regression" of unsupported, implementation behavior that half the world
|
||||
happens to have come to depend on. See #235.
|
||||
|
||||
* Fix regression involving function hoisting in SpiderMonkey. See #233.
|
||||
|
||||
## 0.5.4
|
||||
|
||||
* Large performance improvements to source-map serialization. See #228 and #229.
|
||||
|
||||
## 0.5.3
|
||||
|
||||
* Do not include unnecessary distribution files. See
|
||||
commit ef7006f8d1647e0a83fdc60f04f5a7ca54886f86.
|
||||
|
||||
## 0.5.2
|
||||
|
||||
* Include browser distributions of the library in package.json's `files`. See
|
||||
issue #212.
|
||||
|
||||
## 0.5.1
|
||||
|
||||
* Fix latent bugs in IndexedSourceMapConsumer.prototype._parseMappings. See
|
||||
ff05274becc9e6e1295ed60f3ea090d31d843379.
|
||||
|
||||
## 0.5.0
|
||||
|
||||
* Node 0.8 is no longer supported.
|
||||
|
||||
* Use webpack instead of dryice for bundling.
|
||||
|
||||
* Big speedups serializing source maps. See pull request #203.
|
||||
|
||||
* Fix a bug with `SourceMapConsumer.prototype.sourceContentFor` and sources that
|
||||
explicitly start with the source root. See issue #199.
|
||||
|
||||
## 0.4.4
|
||||
|
||||
* Fix an issue where using a `SourceMapGenerator` after having created a
|
||||
`SourceMapConsumer` from it via `SourceMapConsumer.fromSourceMap` failed. See
|
||||
issue #191.
|
||||
|
||||
* Fix an issue with where `SourceMapGenerator` would mistakenly consider
|
||||
different mappings as duplicates of each other and avoid generating them. See
|
||||
issue #192.
|
||||
|
||||
## 0.4.3
|
||||
|
||||
* A very large number of performance improvements, particularly when parsing
|
||||
source maps. Collectively about 75% of time shaved off of the source map
|
||||
parsing benchmark!
|
||||
|
||||
* Fix a bug in `SourceMapConsumer.prototype.allGeneratedPositionsFor` and fuzzy
|
||||
searching in the presence of a column option. See issue #177.
|
||||
|
||||
* Fix a bug with joining a source and its source root when the source is above
|
||||
the root. See issue #182.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.hasContentsOfAllSources` method to
|
||||
determine when all sources' contents are inlined into the source map. See
|
||||
issue #190.
|
||||
|
||||
## 0.4.2
|
||||
|
||||
* Add an `.npmignore` file so that the benchmarks aren't pulled down by
|
||||
dependent projects. Issue #169.
|
||||
|
||||
* Add an optional `column` argument to
|
||||
`SourceMapConsumer.prototype.allGeneratedPositionsFor` and better handle lines
|
||||
with no mappings. Issues #172 and #173.
|
||||
|
||||
## 0.4.1
|
||||
|
||||
* Fix accidentally defining a global variable. #170.
|
||||
|
||||
## 0.4.0
|
||||
|
||||
* The default direction for fuzzy searching was changed back to its original
|
||||
direction. See #164.
|
||||
|
||||
* There is now a `bias` option you can supply to `SourceMapConsumer` to control
|
||||
the fuzzy searching direction. See #167.
|
||||
|
||||
* About an 8% speed up in parsing source maps. See #159.
|
||||
|
||||
* Added a benchmark for parsing and generating source maps.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
* Change the default direction that searching for positions fuzzes when there is
|
||||
not an exact match. See #154.
|
||||
|
||||
* Support for environments using json2.js for JSON serialization. See #156.
|
||||
|
||||
## 0.2.0
|
||||
|
||||
* Support for consuming "indexed" source maps which do not have any remote
|
||||
sections. See pull request #127. This introduces a minor backwards
|
||||
incompatibility if you are monkey patching `SourceMapConsumer.prototype`
|
||||
methods.
|
||||
|
||||
## 0.1.43
|
||||
|
||||
* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue
|
||||
#148 for some discussion and issues #150, #151, and #152 for implementations.
|
||||
|
||||
## 0.1.42
|
||||
|
||||
* Fix an issue where `SourceNode`s from different versions of the source-map
|
||||
library couldn't be used in conjunction with each other. See issue #142.
|
||||
|
||||
## 0.1.41
|
||||
|
||||
* Fix a bug with getting the source content of relative sources with a "./"
|
||||
prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768).
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the
|
||||
column span of each mapping.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find
|
||||
all generated positions associated with a given original source and line.
|
||||
|
||||
## 0.1.40
|
||||
|
||||
* Performance improvements for parsing source maps in SourceMapConsumer.
|
||||
|
||||
## 0.1.39
|
||||
|
||||
* Fix a bug where setting a source's contents to null before any source content
|
||||
had been set before threw a TypeError. See issue #131.
|
||||
|
||||
## 0.1.38
|
||||
|
||||
* Fix a bug where finding relative paths from an empty path were creating
|
||||
absolute paths. See issue #129.
|
||||
|
||||
## 0.1.37
|
||||
|
||||
* Fix a bug where if the source root was an empty string, relative source paths
|
||||
would turn into absolute source paths. Issue #124.
|
||||
|
||||
## 0.1.36
|
||||
|
||||
* Allow the `names` mapping property to be an empty string. Issue #121.
|
||||
|
||||
## 0.1.35
|
||||
|
||||
* A third optional parameter was added to `SourceNode.fromStringWithSourceMap`
|
||||
to specify a path that relative sources in the second parameter should be
|
||||
relative to. Issue #105.
|
||||
|
||||
* If no file property is given to a `SourceMapGenerator`, then the resulting
|
||||
source map will no longer have a `null` file property. The property will
|
||||
simply not exist. Issue #104.
|
||||
|
||||
* Fixed a bug where consecutive newlines were ignored in `SourceNode`s.
|
||||
Issue #116.
|
||||
|
||||
## 0.1.34
|
||||
|
||||
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
|
||||
|
||||
* Fix bug involving source contents and the
|
||||
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
|
||||
|
||||
## 0.1.33
|
||||
|
||||
* Fix some edge cases surrounding path joining and URL resolution.
|
||||
|
||||
* Add a third parameter for relative path to
|
||||
`SourceMapGenerator.prototype.applySourceMap`.
|
||||
|
||||
* Fix issues with mappings and EOLs.
|
||||
|
||||
## 0.1.32
|
||||
|
||||
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
|
||||
(issue 92).
|
||||
|
||||
* Fixed test runner to actually report number of failed tests as its process
|
||||
exit code.
|
||||
|
||||
* Fixed a typo when reporting bad mappings (issue 87).
|
||||
|
||||
## 0.1.31
|
||||
|
||||
* Delay parsing the mappings in SourceMapConsumer until queried for a source
|
||||
location.
|
||||
|
||||
* Support Sass source maps (which at the time of writing deviate from the spec
|
||||
in small ways) in SourceMapConsumer.
|
||||
|
||||
## 0.1.30
|
||||
|
||||
* Do not join source root with a source, when the source is a data URI.
|
||||
|
||||
* Extend the test runner to allow running single specific test files at a time.
|
||||
|
||||
* Performance improvements in `SourceNode.prototype.walk` and
|
||||
`SourceMapConsumer.prototype.eachMapping`.
|
||||
|
||||
* Source map browser builds will now work inside Workers.
|
||||
|
||||
* Better error messages when attempting to add an invalid mapping to a
|
||||
`SourceMapGenerator`.
|
||||
|
||||
## 0.1.29
|
||||
|
||||
* Allow duplicate entries in the `names` and `sources` arrays of source maps
|
||||
(usually from TypeScript) we are parsing. Fixes github issue 72.
|
||||
|
||||
## 0.1.28
|
||||
|
||||
* Skip duplicate mappings when creating source maps from SourceNode; github
|
||||
issue 75.
|
||||
|
||||
## 0.1.27
|
||||
|
||||
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
|
||||
we don't use it anyway.
|
||||
|
||||
## 0.1.26
|
||||
|
||||
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
|
||||
|
||||
## 0.1.25
|
||||
|
||||
* Make compatible with browserify
|
||||
|
||||
## 0.1.24
|
||||
|
||||
* Fix issue with absolute paths and `file://` URIs. See
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
|
||||
|
||||
## 0.1.23
|
||||
|
||||
* Fix issue with absolute paths and sourcesContent, github issue 64.
|
||||
|
||||
## 0.1.22
|
||||
|
||||
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
|
||||
|
||||
## 0.1.21
|
||||
|
||||
* Fixed handling of sources that start with a slash so that they are relative to
|
||||
the source root's host.
|
||||
|
||||
## 0.1.20
|
||||
|
||||
* Fixed github issue #43: absolute URLs aren't joined with the source root
|
||||
anymore.
|
||||
|
||||
## 0.1.19
|
||||
|
||||
* Using Travis CI to run tests.
|
||||
|
||||
## 0.1.18
|
||||
|
||||
* Fixed a bug in the handling of sourceRoot.
|
||||
|
||||
## 0.1.17
|
||||
|
||||
* Added SourceNode.fromStringWithSourceMap.
|
||||
|
||||
## 0.1.16
|
||||
|
||||
* Added missing documentation.
|
||||
|
||||
* Fixed the generating of empty mappings in SourceNode.
|
||||
|
||||
## 0.1.15
|
||||
|
||||
* Added SourceMapGenerator.applySourceMap.
|
||||
|
||||
## 0.1.14
|
||||
|
||||
* The sourceRoot is now handled consistently.
|
||||
|
||||
## 0.1.13
|
||||
|
||||
* Added SourceMapGenerator.fromSourceMap.
|
||||
|
||||
## 0.1.12
|
||||
|
||||
* SourceNode now generates empty mappings too.
|
||||
|
||||
## 0.1.11
|
||||
|
||||
* Added name support to SourceNode.
|
||||
|
||||
## 0.1.10
|
||||
|
||||
* Added sourcesContent support to the customer and generator.
|
28
node_modules/terser/node_modules/source-map/LICENSE
generated
vendored
28
node_modules/terser/node_modules/source-map/LICENSE
generated
vendored
@@ -1,28 +0,0 @@
|
||||
|
||||
Copyright (c) 2009-2011, Mozilla Foundation and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the names of the Mozilla Foundation nor the names of project
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
822
node_modules/terser/node_modules/source-map/README.md
generated
vendored
822
node_modules/terser/node_modules/source-map/README.md
generated
vendored
@@ -1,822 +0,0 @@
|
||||
# Source Map
|
||||
|
||||
[](https://travis-ci.org/mozilla/source-map)
|
||||
|
||||
[](https://coveralls.io/github/mozilla/source-map)
|
||||
|
||||
[](https://www.npmjs.com/package/source-map)
|
||||
|
||||
This is a library to generate and consume the source map format
|
||||
[described here][format].
|
||||
|
||||
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
|
||||
## Use with Node
|
||||
|
||||
$ npm install source-map
|
||||
|
||||
## Use on the Web
|
||||
|
||||
<script src="https://unpkg.com/source-map@0.7.3/dist/source-map.js"></script>
|
||||
<script>
|
||||
sourceMap.SourceMapConsumer.initialize({
|
||||
"lib/mappings.wasm": "https://unpkg.com/source-map@0.7.3/lib/mappings.wasm"
|
||||
});
|
||||
</script>
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
<!-- `npm run toc` to regenerate the Table of Contents -->
|
||||
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
## Table of Contents
|
||||
|
||||
- [Examples](#examples)
|
||||
- [Consuming a source map](#consuming-a-source-map)
|
||||
- [Generating a source map](#generating-a-source-map)
|
||||
- [With SourceNode (high level API)](#with-sourcenode-high-level-api)
|
||||
- [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api)
|
||||
- [API](#api)
|
||||
- [SourceMapConsumer](#sourcemapconsumer)
|
||||
- [SourceMapConsumer.initialize(options)](#sourcemapconsumerinitializeoptions)
|
||||
- [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap)
|
||||
- [SourceMapConsumer.with](#sourcemapconsumerwith)
|
||||
- [SourceMapConsumer.prototype.destroy()](#sourcemapconsumerprototypedestroy)
|
||||
- [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans)
|
||||
- [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition)
|
||||
- [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition)
|
||||
- [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition)
|
||||
- [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources)
|
||||
- [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing)
|
||||
- [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order)
|
||||
- [SourceMapGenerator](#sourcemapgenerator)
|
||||
- [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap)
|
||||
- [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer)
|
||||
- [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping)
|
||||
- [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent)
|
||||
- [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath)
|
||||
- [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring)
|
||||
- [SourceNode](#sourcenode)
|
||||
- [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name)
|
||||
- [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath)
|
||||
- [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk)
|
||||
- [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk)
|
||||
- [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent)
|
||||
- [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn)
|
||||
- [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn)
|
||||
- [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep)
|
||||
- [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement)
|
||||
- [SourceNode.prototype.toString()](#sourcenodeprototypetostring)
|
||||
- [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
||||
## Examples
|
||||
|
||||
### Consuming a source map
|
||||
|
||||
```js
|
||||
const rawSourceMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: 'http://example.com/www/js/',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
|
||||
const whatever = await SourceMapConsumer.with(rawSourceMap, null, consumer => {
|
||||
|
||||
console.log(consumer.sources);
|
||||
// [ 'http://example.com/www/js/one.js',
|
||||
// 'http://example.com/www/js/two.js' ]
|
||||
|
||||
console.log(consumer.originalPositionFor({
|
||||
line: 2,
|
||||
column: 28
|
||||
}));
|
||||
// { source: 'http://example.com/www/js/two.js',
|
||||
// line: 2,
|
||||
// column: 10,
|
||||
// name: 'n' }
|
||||
|
||||
console.log(consumer.generatedPositionFor({
|
||||
source: 'http://example.com/www/js/two.js',
|
||||
line: 2,
|
||||
column: 10
|
||||
}));
|
||||
// { line: 2, column: 28 }
|
||||
|
||||
consumer.eachMapping(function (m) {
|
||||
// ...
|
||||
});
|
||||
|
||||
return computeWhatever();
|
||||
});
|
||||
```
|
||||
|
||||
### Generating a source map
|
||||
|
||||
In depth guide:
|
||||
[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)
|
||||
|
||||
#### With SourceNode (high level API)
|
||||
|
||||
```js
|
||||
function compile(ast) {
|
||||
switch (ast.type) {
|
||||
case 'BinaryExpression':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
[compile(ast.left), " + ", compile(ast.right)]
|
||||
);
|
||||
case 'Literal':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
String(ast.value)
|
||||
);
|
||||
// ...
|
||||
default:
|
||||
throw new Error("Bad AST");
|
||||
}
|
||||
}
|
||||
|
||||
var ast = parse("40 + 2", "add.js");
|
||||
console.log(compile(ast).toStringWithSourceMap({
|
||||
file: 'add.js'
|
||||
}));
|
||||
// { code: '40 + 2',
|
||||
// map: [object SourceMapGenerator] }
|
||||
```
|
||||
|
||||
#### With SourceMapGenerator (low level API)
|
||||
|
||||
```js
|
||||
var map = new SourceMapGenerator({
|
||||
file: "source-mapped.js"
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: 10,
|
||||
column: 35
|
||||
},
|
||||
source: "foo.js",
|
||||
original: {
|
||||
line: 33,
|
||||
column: 2
|
||||
},
|
||||
name: "christopher"
|
||||
});
|
||||
|
||||
console.log(map.toString());
|
||||
// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
Get a reference to the module:
|
||||
|
||||
```js
|
||||
// Node.js
|
||||
var sourceMap = require('source-map');
|
||||
|
||||
// Browser builds
|
||||
var sourceMap = window.sourceMap;
|
||||
|
||||
// Inside Firefox
|
||||
const sourceMap = require("devtools/toolkit/sourcemap/source-map.js");
|
||||
```
|
||||
|
||||
### SourceMapConsumer
|
||||
|
||||
A `SourceMapConsumer` instance represents a parsed source map which we can query
|
||||
for information about the original file positions by giving it a file position
|
||||
in the generated source.
|
||||
|
||||
#### SourceMapConsumer.initialize(options)
|
||||
|
||||
When using `SourceMapConsumer` outside of node.js, for example on the Web, it
|
||||
needs to know from what URL to load `lib/mappings.wasm`. You must inform it by
|
||||
calling `initialize` before constructing any `SourceMapConsumer`s.
|
||||
|
||||
The options object has the following properties:
|
||||
|
||||
* `"lib/mappings.wasm"`: A `String` containing the URL of the
|
||||
`lib/mappings.wasm` file.
|
||||
|
||||
```js
|
||||
sourceMap.SourceMapConsumer.initialize({
|
||||
"lib/mappings.wasm": "https://example.com/source-map/lib/mappings.wasm"
|
||||
});
|
||||
```
|
||||
|
||||
#### new SourceMapConsumer(rawSourceMap)
|
||||
|
||||
The only parameter is the raw source map (either as a string which can be
|
||||
`JSON.parse`'d, or an object). According to the spec, source maps have the
|
||||
following attributes:
|
||||
|
||||
* `version`: Which version of the source map spec this map is following.
|
||||
|
||||
* `sources`: An array of URLs to the original source files.
|
||||
|
||||
* `names`: An array of identifiers which can be referenced by individual
|
||||
mappings.
|
||||
|
||||
* `sourceRoot`: Optional. The URL root from which all sources are relative.
|
||||
|
||||
* `sourcesContent`: Optional. An array of contents of the original source files.
|
||||
|
||||
* `mappings`: A string of base64 VLQs which contain the actual mappings.
|
||||
|
||||
* `file`: Optional. The generated filename this source map is associated with.
|
||||
|
||||
The promise of the constructed souce map consumer is returned.
|
||||
|
||||
When the `SourceMapConsumer` will no longer be used anymore, you must call its
|
||||
`destroy` method.
|
||||
|
||||
```js
|
||||
const consumer = await new sourceMap.SourceMapConsumer(rawSourceMapJsonData);
|
||||
doStuffWith(consumer);
|
||||
consumer.destroy();
|
||||
```
|
||||
|
||||
Alternatively, you can use `SourceMapConsumer.with` to avoid needing to remember
|
||||
to call `destroy`.
|
||||
|
||||
#### SourceMapConsumer.with
|
||||
|
||||
Construct a new `SourceMapConsumer` from `rawSourceMap` and `sourceMapUrl`
|
||||
(see the `SourceMapConsumer` constructor for details. Then, invoke the `async
|
||||
function f(SourceMapConsumer) -> T` with the newly constructed consumer, wait
|
||||
for `f` to complete, call `destroy` on the consumer, and return `f`'s return
|
||||
value.
|
||||
|
||||
You must not use the consumer after `f` completes!
|
||||
|
||||
By using `with`, you do not have to remember to manually call `destroy` on
|
||||
the consumer, since it will be called automatically once `f` completes.
|
||||
|
||||
```js
|
||||
const xSquared = await SourceMapConsumer.with(
|
||||
myRawSourceMap,
|
||||
null,
|
||||
async function (consumer) {
|
||||
// Use `consumer` inside here and don't worry about remembering
|
||||
// to call `destroy`.
|
||||
|
||||
const x = await whatever(consumer);
|
||||
return x * x;
|
||||
}
|
||||
);
|
||||
|
||||
// You may not use that `consumer` anymore out here; it has
|
||||
// been destroyed. But you can use `xSquared`.
|
||||
console.log(xSquared);
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.destroy()
|
||||
|
||||
Free this source map consumer's associated wasm data that is manually-managed.
|
||||
|
||||
```js
|
||||
consumer.destroy();
|
||||
```
|
||||
|
||||
Alternatively, you can use `SourceMapConsumer.with` to avoid needing to remember
|
||||
to call `destroy`.
|
||||
|
||||
#### SourceMapConsumer.prototype.computeColumnSpans()
|
||||
|
||||
Compute the last column for each generated mapping. The last column is
|
||||
inclusive.
|
||||
|
||||
```js
|
||||
// Before:
|
||||
consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" })
|
||||
// [ { line: 2,
|
||||
// column: 1 },
|
||||
// { line: 2,
|
||||
// column: 10 },
|
||||
// { line: 2,
|
||||
// column: 20 } ]
|
||||
|
||||
consumer.computeColumnSpans();
|
||||
|
||||
// After:
|
||||
consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" })
|
||||
// [ { line: 2,
|
||||
// column: 1,
|
||||
// lastColumn: 9 },
|
||||
// { line: 2,
|
||||
// column: 10,
|
||||
// lastColumn: 19 },
|
||||
// { line: 2,
|
||||
// column: 20,
|
||||
// lastColumn: Infinity } ]
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||
|
||||
Returns the original source, line, and column information for the generated
|
||||
source's line and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `line`: The line number in the generated source. Line numbers in
|
||||
this library are 1-based (note that the underlying source map
|
||||
specification uses 0-based line numbers -- this library handles the
|
||||
translation).
|
||||
|
||||
* `column`: The column number in the generated source. Column numbers
|
||||
in this library are 0-based.
|
||||
|
||||
* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or
|
||||
`SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest
|
||||
element that is smaller than or greater than the one we are searching for,
|
||||
respectively, if the exact element cannot be found. Defaults to
|
||||
`SourceMapConsumer.GREATEST_LOWER_BOUND`.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `source`: The original source file, or null if this information is not
|
||||
available.
|
||||
|
||||
* `line`: The line number in the original source, or null if this information is
|
||||
not available. The line number is 1-based.
|
||||
|
||||
* `column`: The column number in the original source, or null if this
|
||||
information is not available. The column number is 0-based.
|
||||
|
||||
* `name`: The original identifier, or null if this information is not available.
|
||||
|
||||
```js
|
||||
consumer.originalPositionFor({ line: 2, column: 10 })
|
||||
// { source: 'foo.coffee',
|
||||
// line: 2,
|
||||
// column: 2,
|
||||
// name: null }
|
||||
|
||||
consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 })
|
||||
// { source: null,
|
||||
// line: null,
|
||||
// column: null,
|
||||
// name: null }
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||
|
||||
Returns the generated line and column information for the original source,
|
||||
line, and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source. The line number is
|
||||
1-based.
|
||||
|
||||
* `column`: The column number in the original source. The column
|
||||
number is 0-based.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null. The line
|
||||
number is 1-based.
|
||||
|
||||
* `column`: The column number in the generated source, or null. The
|
||||
column number is 0-based.
|
||||
|
||||
```js
|
||||
consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 })
|
||||
// { line: 1,
|
||||
// column: 56 }
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||
|
||||
Returns all generated line and column information for the original source, line,
|
||||
and column provided. If no column is provided, returns all mappings
|
||||
corresponding to a either the line we are searching for or the next closest line
|
||||
that has any mappings. Otherwise, returns all mappings corresponding to the
|
||||
given line and either the column we are searching for or the next closest column
|
||||
that has any offsets.
|
||||
|
||||
The only argument is an object with the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source. The line number is
|
||||
1-based.
|
||||
|
||||
* `column`: Optional. The column number in the original source. The
|
||||
column number is 0-based.
|
||||
|
||||
and an array of objects is returned, each with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null. The line
|
||||
number is 1-based.
|
||||
|
||||
* `column`: The column number in the generated source, or null. The
|
||||
column number is 0-based.
|
||||
|
||||
```js
|
||||
consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" })
|
||||
// [ { line: 2,
|
||||
// column: 1 },
|
||||
// { line: 2,
|
||||
// column: 10 },
|
||||
// { line: 2,
|
||||
// column: 20 } ]
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.hasContentsOfAllSources()
|
||||
|
||||
Return true if we have the embedded source content for every source listed in
|
||||
the source map, false otherwise.
|
||||
|
||||
In other words, if this method returns `true`, then
|
||||
`consumer.sourceContentFor(s)` will succeed for every source `s` in
|
||||
`consumer.sources`.
|
||||
|
||||
```js
|
||||
// ...
|
||||
if (consumer.hasContentsOfAllSources()) {
|
||||
consumerReadyCallback(consumer);
|
||||
} else {
|
||||
fetchSources(consumer, consumerReadyCallback);
|
||||
}
|
||||
// ...
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
|
||||
|
||||
Returns the original source content for the source provided. The only
|
||||
argument is the URL of the original source file.
|
||||
|
||||
If the source content for the given source is not found, then an error is
|
||||
thrown. Optionally, pass `true` as the second param to have `null` returned
|
||||
instead.
|
||||
|
||||
```js
|
||||
consumer.sources
|
||||
// [ "my-cool-lib.clj" ]
|
||||
|
||||
consumer.sourceContentFor("my-cool-lib.clj")
|
||||
// "..."
|
||||
|
||||
consumer.sourceContentFor("this is not in the source map");
|
||||
// Error: "this is not in the source map" is not in the source map
|
||||
|
||||
consumer.sourceContentFor("this is not in the source map", true);
|
||||
// null
|
||||
```
|
||||
|
||||
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
|
||||
|
||||
Iterate over each mapping between an original source/line/column and a
|
||||
generated line/column in this source map.
|
||||
|
||||
* `callback`: The function that is called with each mapping. Mappings have the
|
||||
form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,
|
||||
name }`
|
||||
|
||||
* `context`: Optional. If specified, this object will be the value of `this`
|
||||
every time that `callback` is called.
|
||||
|
||||
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
|
||||
the mappings sorted by the generated file's line/column order or the
|
||||
original's source/line/column order, respectively. Defaults to
|
||||
`SourceMapConsumer.GENERATED_ORDER`.
|
||||
|
||||
```js
|
||||
consumer.eachMapping(function (m) { console.log(m); })
|
||||
// ...
|
||||
// { source: 'illmatic.js',
|
||||
// generatedLine: 1,
|
||||
// generatedColumn: 0,
|
||||
// originalLine: 1,
|
||||
// originalColumn: 0,
|
||||
// name: null }
|
||||
// { source: 'illmatic.js',
|
||||
// generatedLine: 2,
|
||||
// generatedColumn: 0,
|
||||
// originalLine: 2,
|
||||
// originalColumn: 0,
|
||||
// name: null }
|
||||
// ...
|
||||
```
|
||||
### SourceMapGenerator
|
||||
|
||||
An instance of the SourceMapGenerator represents a source map which is being
|
||||
built incrementally.
|
||||
|
||||
#### new SourceMapGenerator([startOfSourceMap])
|
||||
|
||||
You may pass an object with the following properties:
|
||||
|
||||
* `file`: The filename of the generated source that this source map is
|
||||
associated with.
|
||||
|
||||
* `sourceRoot`: A root for all relative URLs in this source map.
|
||||
|
||||
* `skipValidation`: Optional. When `true`, disables validation of mappings as
|
||||
they are added. This can improve performance but should be used with
|
||||
discretion, as a last resort. Even then, one should avoid using this flag when
|
||||
running tests, if possible.
|
||||
|
||||
```js
|
||||
var generator = new sourceMap.SourceMapGenerator({
|
||||
file: "my-generated-javascript-file.js",
|
||||
sourceRoot: "http://example.com/app/js/"
|
||||
});
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
|
||||
|
||||
Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance.
|
||||
|
||||
* `sourceMapConsumer` The SourceMap.
|
||||
|
||||
```js
|
||||
var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer);
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||
|
||||
Add a single mapping from original source line and column to the generated
|
||||
source's line and column for this source map being created. The mapping object
|
||||
should have the following properties:
|
||||
|
||||
* `generated`: An object with the generated line and column positions.
|
||||
|
||||
* `original`: An object with the original line and column positions.
|
||||
|
||||
* `source`: The original source file (relative to the sourceRoot).
|
||||
|
||||
* `name`: An optional original token name for this mapping.
|
||||
|
||||
```js
|
||||
generator.addMapping({
|
||||
source: "module-one.scm",
|
||||
original: { line: 128, column: 0 },
|
||||
generated: { line: 3, column: 456 }
|
||||
})
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for an original source file.
|
||||
|
||||
* `sourceFile` the URL of the original source file.
|
||||
|
||||
* `sourceContent` the content of the source file.
|
||||
|
||||
```js
|
||||
generator.setSourceContent("module-one.scm",
|
||||
fs.readFileSync("path/to/module-one.scm"))
|
||||
```
|
||||
|
||||
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||
|
||||
Applies a SourceMap for a source file to the SourceMap.
|
||||
Each mapping to the supplied source file is rewritten using the
|
||||
supplied SourceMap. Note: The resolution for the resulting mappings
|
||||
is the minimum of this map and the supplied map.
|
||||
|
||||
* `sourceMapConsumer`: The SourceMap to be applied.
|
||||
|
||||
* `sourceFile`: Optional. The filename of the source file.
|
||||
If omitted, sourceMapConsumer.file will be used, if it exists.
|
||||
Otherwise an error will be thrown.
|
||||
|
||||
* `sourceMapPath`: Optional. The dirname of the path to the SourceMap
|
||||
to be applied. If relative, it is relative to the SourceMap.
|
||||
|
||||
This parameter is needed when the two SourceMaps aren't in the same
|
||||
directory, and the SourceMap to be applied contains relative source
|
||||
paths. If so, those relative source paths need to be rewritten
|
||||
relative to the SourceMap.
|
||||
|
||||
If omitted, it is assumed that both SourceMaps are in the same directory,
|
||||
thus not needing any rewriting. (Supplying `'.'` has the same effect.)
|
||||
|
||||
#### SourceMapGenerator.prototype.toString()
|
||||
|
||||
Renders the source map being generated to a string.
|
||||
|
||||
```js
|
||||
generator.toString()
|
||||
// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}'
|
||||
```
|
||||
|
||||
### SourceNode
|
||||
|
||||
SourceNodes provide a way to abstract over interpolating and/or concatenating
|
||||
snippets of generated JavaScript source code, while maintaining the line and
|
||||
column information associated between those snippets and the original source
|
||||
code. This is useful as the final intermediate representation a compiler might
|
||||
use before outputting the generated JS and source map.
|
||||
|
||||
#### new SourceNode([line, column, source[, chunk[, name]]])
|
||||
|
||||
* `line`: The original line number associated with this source node, or null if
|
||||
it isn't associated with an original line. The line number is 1-based.
|
||||
|
||||
* `column`: The original column number associated with this source node, or null
|
||||
if it isn't associated with an original column. The column number
|
||||
is 0-based.
|
||||
|
||||
* `source`: The original source's filename; null if no filename is provided.
|
||||
|
||||
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
|
||||
below.
|
||||
|
||||
* `name`: Optional. The original identifier.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.cpp", [
|
||||
new SourceNode(3, 4, "b.cpp", "extern int status;\n"),
|
||||
new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"),
|
||||
new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"),
|
||||
]);
|
||||
```
|
||||
|
||||
#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])
|
||||
|
||||
Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
|
||||
* `code`: The generated code
|
||||
|
||||
* `sourceMapConsumer` The SourceMap for the generated code
|
||||
|
||||
* `relativePath` The optional path that relative sources in `sourceMapConsumer`
|
||||
should be relative to.
|
||||
|
||||
```js
|
||||
const consumer = await new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8"));
|
||||
const node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"), consumer);
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.add(chunk)
|
||||
|
||||
Add a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
```js
|
||||
node.add(" + ");
|
||||
node.add(otherNode);
|
||||
node.add([leftHandOperandNode, " + ", rightHandOperandNode]);
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.prepend(chunk)
|
||||
|
||||
Prepend a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
```js
|
||||
node.prepend("/** Build Id: f783haef86324gf **/\n\n");
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for a source file. This will be added to the
|
||||
`SourceMap` in the `sourcesContent` field.
|
||||
|
||||
* `sourceFile`: The filename of the source file
|
||||
|
||||
* `sourceContent`: The content of the source file
|
||||
|
||||
```js
|
||||
node.setSourceContent("module-one.scm",
|
||||
fs.readFileSync("path/to/module-one.scm"))
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.walk(fn)
|
||||
|
||||
Walk over the tree of JS snippets in this node and its children. The walking
|
||||
function is called once for each snippet of JS and is passed that snippet and
|
||||
the its original associated source's line/column location.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.js", [
|
||||
new SourceNode(3, 4, "b.js", "uno"),
|
||||
"dos",
|
||||
[
|
||||
"tres",
|
||||
new SourceNode(5, 6, "c.js", "quatro")
|
||||
]
|
||||
]);
|
||||
|
||||
node.walk(function (code, loc) { console.log("WALK:", code, loc); })
|
||||
// WALK: uno { source: 'b.js', line: 3, column: 4, name: null }
|
||||
// WALK: dos { source: 'a.js', line: 1, column: 2, name: null }
|
||||
// WALK: tres { source: 'a.js', line: 1, column: 2, name: null }
|
||||
// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null }
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.walkSourceContents(fn)
|
||||
|
||||
Walk over the tree of SourceNodes. The walking function is called for each
|
||||
source file content and is passed the filename and source content.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
```js
|
||||
var a = new SourceNode(1, 2, "a.js", "generated from a");
|
||||
a.setSourceContent("a.js", "original a");
|
||||
var b = new SourceNode(1, 2, "b.js", "generated from b");
|
||||
b.setSourceContent("b.js", "original b");
|
||||
var c = new SourceNode(1, 2, "c.js", "generated from c");
|
||||
c.setSourceContent("c.js", "original c");
|
||||
|
||||
var node = new SourceNode(null, null, null, [a, b, c]);
|
||||
node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); })
|
||||
// WALK: a.js : original a
|
||||
// WALK: b.js : original b
|
||||
// WALK: c.js : original c
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.join(sep)
|
||||
|
||||
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
|
||||
between each of this source node's children.
|
||||
|
||||
* `sep`: The separator.
|
||||
|
||||
```js
|
||||
var lhs = new SourceNode(1, 2, "a.rs", "my_copy");
|
||||
var operand = new SourceNode(3, 4, "a.rs", "=");
|
||||
var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()");
|
||||
|
||||
var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]);
|
||||
var joinedNode = node.join(" ");
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.replaceRight(pattern, replacement)
|
||||
|
||||
Call `String.prototype.replace` on the very right-most source snippet. Useful
|
||||
for trimming white space from the end of a source node, etc.
|
||||
|
||||
* `pattern`: The pattern to replace.
|
||||
|
||||
* `replacement`: The thing to replace the pattern with.
|
||||
|
||||
```js
|
||||
// Trim trailing white space.
|
||||
node.replaceRight(/\s*$/, "");
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.toString()
|
||||
|
||||
Return the string representation of this source node. Walks over the tree and
|
||||
concatenates all the various snippets together to one string.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.js", [
|
||||
new SourceNode(3, 4, "b.js", "uno"),
|
||||
"dos",
|
||||
[
|
||||
"tres",
|
||||
new SourceNode(5, 6, "c.js", "quatro")
|
||||
]
|
||||
]);
|
||||
|
||||
node.toString()
|
||||
// 'unodostresquatro'
|
||||
```
|
||||
|
||||
#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])
|
||||
|
||||
Returns the string representation of this tree of source nodes, plus a
|
||||
SourceMapGenerator which contains all the mappings between the generated and
|
||||
original sources.
|
||||
|
||||
The arguments are the same as those to `new SourceMapGenerator`.
|
||||
|
||||
```js
|
||||
var node = new SourceNode(1, 2, "a.js", [
|
||||
new SourceNode(3, 4, "b.js", "uno"),
|
||||
"dos",
|
||||
[
|
||||
"tres",
|
||||
new SourceNode(5, 6, "c.js", "quatro")
|
||||
]
|
||||
]);
|
||||
|
||||
node.toStringWithSourceMap({ file: "my-output-file.js" })
|
||||
// { code: 'unodostresquatro',
|
||||
// map: [object SourceMapGenerator] }
|
||||
```
|
3351
node_modules/terser/node_modules/source-map/dist/source-map.js
generated
vendored
3351
node_modules/terser/node_modules/source-map/dist/source-map.js
generated
vendored
File diff suppressed because it is too large
Load Diff
100
node_modules/terser/node_modules/source-map/lib/array-set.js
generated
vendored
100
node_modules/terser/node_modules/source-map/lib/array-set.js
generated
vendored
@@ -1,100 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* A data structure which is a combination of an array and a set. Adding a new
|
||||
* member is O(1), testing for membership is O(1), and finding the index of an
|
||||
* element is O(1). Removing elements from the set is not supported. Only
|
||||
* strings are supported for membership.
|
||||
*/
|
||||
class ArraySet {
|
||||
constructor() {
|
||||
this._array = [];
|
||||
this._set = new Map();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method for creating ArraySet instances from an existing array.
|
||||
*/
|
||||
static fromArray(aArray, aAllowDuplicates) {
|
||||
const set = new ArraySet();
|
||||
for (let i = 0, len = aArray.length; i < len; i++) {
|
||||
set.add(aArray[i], aAllowDuplicates);
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return how many unique items are in this ArraySet. If duplicates have been
|
||||
* added, than those do not count towards the size.
|
||||
*
|
||||
* @returns Number
|
||||
*/
|
||||
size() {
|
||||
return this._set.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the given string to this set.
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
add(aStr, aAllowDuplicates) {
|
||||
const isDuplicate = this.has(aStr);
|
||||
const idx = this._array.length;
|
||||
if (!isDuplicate || aAllowDuplicates) {
|
||||
this._array.push(aStr);
|
||||
}
|
||||
if (!isDuplicate) {
|
||||
this._set.set(aStr, idx);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the given string a member of this set?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
has(aStr) {
|
||||
return this._set.has(aStr);
|
||||
}
|
||||
|
||||
/**
|
||||
* What is the index of the given string in the array?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
indexOf(aStr) {
|
||||
const idx = this._set.get(aStr);
|
||||
if (idx >= 0) {
|
||||
return idx;
|
||||
}
|
||||
throw new Error('"' + aStr + '" is not in the set.');
|
||||
}
|
||||
|
||||
/**
|
||||
* What is the element at the given index?
|
||||
*
|
||||
* @param Number aIdx
|
||||
*/
|
||||
at(aIdx) {
|
||||
if (aIdx >= 0 && aIdx < this._array.length) {
|
||||
return this._array[aIdx];
|
||||
}
|
||||
throw new Error("No element indexed by " + aIdx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the array representation of this set (which has the proper indices
|
||||
* indicated by indexOf). Note that this is a copy of the internal array used
|
||||
* for storing the members so that no one can mess with internal state.
|
||||
*/
|
||||
toArray() {
|
||||
return this._array.slice();
|
||||
}
|
||||
}
|
||||
exports.ArraySet = ArraySet;
|
111
node_modules/terser/node_modules/source-map/lib/base64-vlq.js
generated
vendored
111
node_modules/terser/node_modules/source-map/lib/base64-vlq.js
generated
vendored
@@ -1,111 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*
|
||||
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
||||
*
|
||||
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
const base64 = require("./base64");
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the
|
||||
// continuation bit. The continuation bit tells us whether there are more
|
||||
// digits in this value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
|
||||
const VLQ_BASE_SHIFT = 5;
|
||||
|
||||
// binary: 100000
|
||||
const VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||
|
||||
// binary: 011111
|
||||
const VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||
|
||||
// binary: 100000
|
||||
const VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||
|
||||
/**
|
||||
* Converts from a two-complement value to a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||
*/
|
||||
function toVLQSigned(aValue) {
|
||||
return aValue < 0
|
||||
? ((-aValue) << 1) + 1
|
||||
: (aValue << 1) + 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts to a two-complement value from a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
||||
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function fromVLQSigned(aValue) {
|
||||
const isNegative = (aValue & 1) === 1;
|
||||
const shifted = aValue >> 1;
|
||||
return isNegative
|
||||
? -shifted
|
||||
: shifted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base 64 VLQ encoded value.
|
||||
*/
|
||||
exports.encode = function base64VLQ_encode(aValue) {
|
||||
let encoded = "";
|
||||
let digit;
|
||||
|
||||
let vlq = toVLQSigned(aValue);
|
||||
|
||||
do {
|
||||
digit = vlq & VLQ_BASE_MASK;
|
||||
vlq >>>= VLQ_BASE_SHIFT;
|
||||
if (vlq > 0) {
|
||||
// There are still more digits in this value, so we must make sure the
|
||||
// continuation bit is marked.
|
||||
digit |= VLQ_CONTINUATION_BIT;
|
||||
}
|
||||
encoded += base64.encode(digit);
|
||||
} while (vlq > 0);
|
||||
|
||||
return encoded;
|
||||
};
|
18
node_modules/terser/node_modules/source-map/lib/base64.js
generated
vendored
18
node_modules/terser/node_modules/source-map/lib/base64.js
generated
vendored
@@ -1,18 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
const intToCharMap = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");
|
||||
|
||||
/**
|
||||
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
||||
*/
|
||||
exports.encode = function(number) {
|
||||
if (0 <= number && number < intToCharMap.length) {
|
||||
return intToCharMap[number];
|
||||
}
|
||||
throw new TypeError("Must be between 0 and 63: " + number);
|
||||
};
|
107
node_modules/terser/node_modules/source-map/lib/binary-search.js
generated
vendored
107
node_modules/terser/node_modules/source-map/lib/binary-search.js
generated
vendored
@@ -1,107 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
exports.GREATEST_LOWER_BOUND = 1;
|
||||
exports.LEAST_UPPER_BOUND = 2;
|
||||
|
||||
/**
|
||||
* Recursive implementation of binary search.
|
||||
*
|
||||
* @param aLow Indices here and lower do not contain the needle.
|
||||
* @param aHigh Indices here and higher do not contain the needle.
|
||||
* @param aNeedle The element being searched for.
|
||||
* @param aHaystack The non-empty array being searched.
|
||||
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
||||
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
||||
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||
* closest element that is smaller than or greater than the one we are
|
||||
* searching for, respectively, if the exact element cannot be found.
|
||||
*/
|
||||
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {
|
||||
// This function terminates when one of the following is true:
|
||||
//
|
||||
// 1. We find the exact element we are looking for.
|
||||
//
|
||||
// 2. We did not find the exact element, but we can return the index of
|
||||
// the next-closest element.
|
||||
//
|
||||
// 3. We did not find the exact element, and there is no next-closest
|
||||
// element than the one we are searching for, so we return -1.
|
||||
const mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
||||
const cmp = aCompare(aNeedle, aHaystack[mid], true);
|
||||
if (cmp === 0) {
|
||||
// Found the element we are looking for.
|
||||
return mid;
|
||||
} else if (cmp > 0) {
|
||||
// Our needle is greater than aHaystack[mid].
|
||||
if (aHigh - mid > 1) {
|
||||
// The element is in the upper half.
|
||||
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);
|
||||
}
|
||||
|
||||
// The exact needle element was not found in this haystack. Determine if
|
||||
// we are in termination case (3) or (2) and return the appropriate thing.
|
||||
if (aBias == exports.LEAST_UPPER_BOUND) {
|
||||
return aHigh < aHaystack.length ? aHigh : -1;
|
||||
}
|
||||
return mid;
|
||||
}
|
||||
|
||||
// Our needle is less than aHaystack[mid].
|
||||
if (mid - aLow > 1) {
|
||||
// The element is in the lower half.
|
||||
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);
|
||||
}
|
||||
|
||||
// we are in termination case (3) or (2) and return the appropriate thing.
|
||||
if (aBias == exports.LEAST_UPPER_BOUND) {
|
||||
return mid;
|
||||
}
|
||||
return aLow < 0 ? -1 : aLow;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is an implementation of binary search which will always try and return
|
||||
* the index of the closest element if there is no exact hit. This is because
|
||||
* mappings between original and generated line/col pairs are single points,
|
||||
* and there is an implicit region between each of them, so a miss just means
|
||||
* that you aren't on the very start of a region.
|
||||
*
|
||||
* @param aNeedle The element you are looking for.
|
||||
* @param aHaystack The array that is being searched.
|
||||
* @param aCompare A function which takes the needle and an element in the
|
||||
* array and returns -1, 0, or 1 depending on whether the needle is less
|
||||
* than, equal to, or greater than the element, respectively.
|
||||
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
||||
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||
* closest element that is smaller than or greater than the one we are
|
||||
* searching for, respectively, if the exact element cannot be found.
|
||||
* Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.
|
||||
*/
|
||||
exports.search = function search(aNeedle, aHaystack, aCompare, aBias) {
|
||||
if (aHaystack.length === 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
let index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,
|
||||
aCompare, aBias || exports.GREATEST_LOWER_BOUND);
|
||||
if (index < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// We have found either the exact element, or the next-closest element than
|
||||
// the one we are searching for. However, there may be more than one such
|
||||
// element. Make sure we always return the smallest of these.
|
||||
while (index - 1 >= 0) {
|
||||
if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {
|
||||
break;
|
||||
}
|
||||
--index;
|
||||
}
|
||||
|
||||
return index;
|
||||
};
|
80
node_modules/terser/node_modules/source-map/lib/mapping-list.js
generated
vendored
80
node_modules/terser/node_modules/source-map/lib/mapping-list.js
generated
vendored
@@ -1,80 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2014 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
const util = require("./util");
|
||||
|
||||
/**
|
||||
* Determine whether mappingB is after mappingA with respect to generated
|
||||
* position.
|
||||
*/
|
||||
function generatedPositionAfter(mappingA, mappingB) {
|
||||
// Optimized for most common case
|
||||
const lineA = mappingA.generatedLine;
|
||||
const lineB = mappingB.generatedLine;
|
||||
const columnA = mappingA.generatedColumn;
|
||||
const columnB = mappingB.generatedColumn;
|
||||
return lineB > lineA || lineB == lineA && columnB >= columnA ||
|
||||
util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* A data structure to provide a sorted view of accumulated mappings in a
|
||||
* performance conscious manner. It trades a negligible overhead in general
|
||||
* case for a large speedup in case of mappings being added in order.
|
||||
*/
|
||||
class MappingList {
|
||||
constructor() {
|
||||
this._array = [];
|
||||
this._sorted = true;
|
||||
// Serves as infimum
|
||||
this._last = {generatedLine: -1, generatedColumn: 0};
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through internal items. This method takes the same arguments that
|
||||
* `Array.prototype.forEach` takes.
|
||||
*
|
||||
* NOTE: The order of the mappings is NOT guaranteed.
|
||||
*/
|
||||
unsortedForEach(aCallback, aThisArg) {
|
||||
this._array.forEach(aCallback, aThisArg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the given source mapping.
|
||||
*
|
||||
* @param Object aMapping
|
||||
*/
|
||||
add(aMapping) {
|
||||
if (generatedPositionAfter(this._last, aMapping)) {
|
||||
this._last = aMapping;
|
||||
this._array.push(aMapping);
|
||||
} else {
|
||||
this._sorted = false;
|
||||
this._array.push(aMapping);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the flat, sorted array of mappings. The mappings are sorted by
|
||||
* generated position.
|
||||
*
|
||||
* WARNING: This method returns internal data without copying, for
|
||||
* performance. The return value must NOT be mutated, and should be treated as
|
||||
* an immutable borrow. If you want to take ownership, you must make your own
|
||||
* copy.
|
||||
*/
|
||||
toArray() {
|
||||
if (!this._sorted) {
|
||||
this._array.sort(util.compareByGeneratedPositionsInflated);
|
||||
this._sorted = true;
|
||||
}
|
||||
return this._array;
|
||||
}
|
||||
}
|
||||
|
||||
exports.MappingList = MappingList;
|
BIN
node_modules/terser/node_modules/source-map/lib/mappings.wasm
generated
vendored
BIN
node_modules/terser/node_modules/source-map/lib/mappings.wasm
generated
vendored
Binary file not shown.
40
node_modules/terser/node_modules/source-map/lib/read-wasm.js
generated
vendored
40
node_modules/terser/node_modules/source-map/lib/read-wasm.js
generated
vendored
@@ -1,40 +0,0 @@
|
||||
if (typeof fetch === "function") {
|
||||
// Web version of reading a wasm file into an array buffer.
|
||||
|
||||
let mappingsWasmUrl = null;
|
||||
|
||||
module.exports = function readWasm() {
|
||||
if (typeof mappingsWasmUrl !== "string") {
|
||||
throw new Error("You must provide the URL of lib/mappings.wasm by calling " +
|
||||
"SourceMapConsumer.initialize({ 'lib/mappings.wasm': ... }) " +
|
||||
"before using SourceMapConsumer");
|
||||
}
|
||||
|
||||
return fetch(mappingsWasmUrl)
|
||||
.then(response => response.arrayBuffer());
|
||||
};
|
||||
|
||||
module.exports.initialize = url => mappingsWasmUrl = url;
|
||||
} else {
|
||||
// Node version of reading a wasm file into an array buffer.
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
module.exports = function readWasm() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const wasmPath = path.join(__dirname, "mappings.wasm");
|
||||
fs.readFile(wasmPath, null, (error, data) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(data.buffer);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
module.exports.initialize = _ => {
|
||||
console.debug("SourceMapConsumer.initialize is a no-op when running in node.js");
|
||||
};
|
||||
}
|
1254
node_modules/terser/node_modules/source-map/lib/source-map-consumer.js
generated
vendored
1254
node_modules/terser/node_modules/source-map/lib/source-map-consumer.js
generated
vendored
File diff suppressed because it is too large
Load Diff
413
node_modules/terser/node_modules/source-map/lib/source-map-generator.js
generated
vendored
413
node_modules/terser/node_modules/source-map/lib/source-map-generator.js
generated
vendored
@@ -1,413 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
const base64VLQ = require("./base64-vlq");
|
||||
const util = require("./util");
|
||||
const ArraySet = require("./array-set").ArraySet;
|
||||
const MappingList = require("./mapping-list").MappingList;
|
||||
|
||||
/**
|
||||
* An instance of the SourceMapGenerator represents a source map which is
|
||||
* being built incrementally. You may pass an object with the following
|
||||
* properties:
|
||||
*
|
||||
* - file: The filename of the generated source.
|
||||
* - sourceRoot: A root for all relative URLs in this source map.
|
||||
*/
|
||||
class SourceMapGenerator {
|
||||
constructor(aArgs) {
|
||||
if (!aArgs) {
|
||||
aArgs = {};
|
||||
}
|
||||
this._file = util.getArg(aArgs, "file", null);
|
||||
this._sourceRoot = util.getArg(aArgs, "sourceRoot", null);
|
||||
this._skipValidation = util.getArg(aArgs, "skipValidation", false);
|
||||
this._sources = new ArraySet();
|
||||
this._names = new ArraySet();
|
||||
this._mappings = new MappingList();
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
*
|
||||
* @param aSourceMapConsumer The SourceMap.
|
||||
*/
|
||||
static fromSourceMap(aSourceMapConsumer) {
|
||||
const sourceRoot = aSourceMapConsumer.sourceRoot;
|
||||
const generator = new SourceMapGenerator({
|
||||
file: aSourceMapConsumer.file,
|
||||
sourceRoot
|
||||
});
|
||||
aSourceMapConsumer.eachMapping(function(mapping) {
|
||||
const newMapping = {
|
||||
generated: {
|
||||
line: mapping.generatedLine,
|
||||
column: mapping.generatedColumn
|
||||
}
|
||||
};
|
||||
|
||||
if (mapping.source != null) {
|
||||
newMapping.source = mapping.source;
|
||||
if (sourceRoot != null) {
|
||||
newMapping.source = util.relative(sourceRoot, newMapping.source);
|
||||
}
|
||||
|
||||
newMapping.original = {
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
};
|
||||
|
||||
if (mapping.name != null) {
|
||||
newMapping.name = mapping.name;
|
||||
}
|
||||
}
|
||||
|
||||
generator.addMapping(newMapping);
|
||||
});
|
||||
aSourceMapConsumer.sources.forEach(function(sourceFile) {
|
||||
let sourceRelative = sourceFile;
|
||||
if (sourceRoot !== null) {
|
||||
sourceRelative = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
|
||||
if (!generator._sources.has(sourceRelative)) {
|
||||
generator._sources.add(sourceRelative);
|
||||
}
|
||||
|
||||
const content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
generator.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
return generator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single mapping from original source line and column to the generated
|
||||
* source's line and column for this source map being created. The mapping
|
||||
* object should have the following properties:
|
||||
*
|
||||
* - generated: An object with the generated line and column positions.
|
||||
* - original: An object with the original line and column positions.
|
||||
* - source: The original source file (relative to the sourceRoot).
|
||||
* - name: An optional original token name for this mapping.
|
||||
*/
|
||||
addMapping(aArgs) {
|
||||
const generated = util.getArg(aArgs, "generated");
|
||||
const original = util.getArg(aArgs, "original", null);
|
||||
let source = util.getArg(aArgs, "source", null);
|
||||
let name = util.getArg(aArgs, "name", null);
|
||||
|
||||
if (!this._skipValidation) {
|
||||
this._validateMapping(generated, original, source, name);
|
||||
}
|
||||
|
||||
if (source != null) {
|
||||
source = String(source);
|
||||
if (!this._sources.has(source)) {
|
||||
this._sources.add(source);
|
||||
}
|
||||
}
|
||||
|
||||
if (name != null) {
|
||||
name = String(name);
|
||||
if (!this._names.has(name)) {
|
||||
this._names.add(name);
|
||||
}
|
||||
}
|
||||
|
||||
this._mappings.add({
|
||||
generatedLine: generated.line,
|
||||
generatedColumn: generated.column,
|
||||
originalLine: original != null && original.line,
|
||||
originalColumn: original != null && original.column,
|
||||
source,
|
||||
name
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the source content for a source file.
|
||||
*/
|
||||
setSourceContent(aSourceFile, aSourceContent) {
|
||||
let source = aSourceFile;
|
||||
if (this._sourceRoot != null) {
|
||||
source = util.relative(this._sourceRoot, source);
|
||||
}
|
||||
|
||||
if (aSourceContent != null) {
|
||||
// Add the source content to the _sourcesContents map.
|
||||
// Create a new _sourcesContents map if the property is null.
|
||||
if (!this._sourcesContents) {
|
||||
this._sourcesContents = Object.create(null);
|
||||
}
|
||||
this._sourcesContents[util.toSetString(source)] = aSourceContent;
|
||||
} else if (this._sourcesContents) {
|
||||
// Remove the source file from the _sourcesContents map.
|
||||
// If the _sourcesContents map is empty, set the property to null.
|
||||
delete this._sourcesContents[util.toSetString(source)];
|
||||
if (Object.keys(this._sourcesContents).length === 0) {
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the mappings of a sub-source-map for a specific source file to the
|
||||
* source map being generated. Each mapping to the supplied source file is
|
||||
* rewritten using the supplied source map. Note: The resolution for the
|
||||
* resulting mappings is the minimium of this map and the supplied map.
|
||||
*
|
||||
* @param aSourceMapConsumer The source map to be applied.
|
||||
* @param aSourceFile Optional. The filename of the source file.
|
||||
* If omitted, SourceMapConsumer's file property will be used.
|
||||
* @param aSourceMapPath Optional. The dirname of the path to the source map
|
||||
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
||||
* This parameter is needed when the two source maps aren't in the same
|
||||
* directory, and the source map to be applied contains relative source
|
||||
* paths. If so, those relative source paths need to be rewritten
|
||||
* relative to the SourceMapGenerator.
|
||||
*/
|
||||
applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
|
||||
let sourceFile = aSourceFile;
|
||||
// If aSourceFile is omitted, we will use the file property of the SourceMap
|
||||
if (aSourceFile == null) {
|
||||
if (aSourceMapConsumer.file == null) {
|
||||
throw new Error(
|
||||
"SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, " +
|
||||
'or the source map\'s "file" property. Both were omitted.'
|
||||
);
|
||||
}
|
||||
sourceFile = aSourceMapConsumer.file;
|
||||
}
|
||||
const sourceRoot = this._sourceRoot;
|
||||
// Make "sourceFile" relative if an absolute Url is passed.
|
||||
if (sourceRoot != null) {
|
||||
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
// Applying the SourceMap can add and remove items from the sources and
|
||||
// the names array.
|
||||
const newSources = this._mappings.toArray().length > 0
|
||||
? new ArraySet()
|
||||
: this._sources;
|
||||
const newNames = new ArraySet();
|
||||
|
||||
// Find mappings for the "sourceFile"
|
||||
this._mappings.unsortedForEach(function(mapping) {
|
||||
if (mapping.source === sourceFile && mapping.originalLine != null) {
|
||||
// Check if it can be mapped by the source map, then update the mapping.
|
||||
const original = aSourceMapConsumer.originalPositionFor({
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
});
|
||||
if (original.source != null) {
|
||||
// Copy mapping
|
||||
mapping.source = original.source;
|
||||
if (aSourceMapPath != null) {
|
||||
mapping.source = util.join(aSourceMapPath, mapping.source);
|
||||
}
|
||||
if (sourceRoot != null) {
|
||||
mapping.source = util.relative(sourceRoot, mapping.source);
|
||||
}
|
||||
mapping.originalLine = original.line;
|
||||
mapping.originalColumn = original.column;
|
||||
if (original.name != null) {
|
||||
mapping.name = original.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const source = mapping.source;
|
||||
if (source != null && !newSources.has(source)) {
|
||||
newSources.add(source);
|
||||
}
|
||||
|
||||
const name = mapping.name;
|
||||
if (name != null && !newNames.has(name)) {
|
||||
newNames.add(name);
|
||||
}
|
||||
|
||||
}, this);
|
||||
this._sources = newSources;
|
||||
this._names = newNames;
|
||||
|
||||
// Copy sourcesContents of applied map.
|
||||
aSourceMapConsumer.sources.forEach(function(srcFile) {
|
||||
const content = aSourceMapConsumer.sourceContentFor(srcFile);
|
||||
if (content != null) {
|
||||
if (aSourceMapPath != null) {
|
||||
srcFile = util.join(aSourceMapPath, srcFile);
|
||||
}
|
||||
if (sourceRoot != null) {
|
||||
srcFile = util.relative(sourceRoot, srcFile);
|
||||
}
|
||||
this.setSourceContent(srcFile, content);
|
||||
}
|
||||
}, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* A mapping can have one of the three levels of data:
|
||||
*
|
||||
* 1. Just the generated position.
|
||||
* 2. The Generated position, original position, and original source.
|
||||
* 3. Generated and original position, original source, as well as a name
|
||||
* token.
|
||||
*
|
||||
* To maintain consistency, we validate that any new mapping being added falls
|
||||
* in to one of these categories.
|
||||
*/
|
||||
_validateMapping(aGenerated, aOriginal, aSource, aName) {
|
||||
// When aOriginal is truthy but has empty values for .line and .column,
|
||||
// it is most likely a programmer error. In this case we throw a very
|
||||
// specific error message to try to guide them the right way.
|
||||
// For example: https://github.com/Polymer/polymer-bundler/pull/519
|
||||
if (aOriginal && typeof aOriginal.line !== "number" && typeof aOriginal.column !== "number") {
|
||||
throw new Error(
|
||||
"original.line and original.column are not numbers -- you probably meant to omit " +
|
||||
"the original mapping entirely and only map the generated position. If so, pass " +
|
||||
"null for the original mapping instead of an object with empty or null values."
|
||||
);
|
||||
}
|
||||
|
||||
if (aGenerated && "line" in aGenerated && "column" in aGenerated
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& !aOriginal && !aSource && !aName) {
|
||||
// Case 1.
|
||||
|
||||
} else if (aGenerated && "line" in aGenerated && "column" in aGenerated
|
||||
&& aOriginal && "line" in aOriginal && "column" in aOriginal
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& aOriginal.line > 0 && aOriginal.column >= 0
|
||||
&& aSource) {
|
||||
// Cases 2 and 3.
|
||||
|
||||
} else {
|
||||
throw new Error("Invalid mapping: " + JSON.stringify({
|
||||
generated: aGenerated,
|
||||
source: aSource,
|
||||
original: aOriginal,
|
||||
name: aName
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize the accumulated mappings in to the stream of base 64 VLQs
|
||||
* specified by the source map format.
|
||||
*/
|
||||
_serializeMappings() {
|
||||
let previousGeneratedColumn = 0;
|
||||
let previousGeneratedLine = 1;
|
||||
let previousOriginalColumn = 0;
|
||||
let previousOriginalLine = 0;
|
||||
let previousName = 0;
|
||||
let previousSource = 0;
|
||||
let result = "";
|
||||
let next;
|
||||
let mapping;
|
||||
let nameIdx;
|
||||
let sourceIdx;
|
||||
|
||||
const mappings = this._mappings.toArray();
|
||||
for (let i = 0, len = mappings.length; i < len; i++) {
|
||||
mapping = mappings[i];
|
||||
next = "";
|
||||
|
||||
if (mapping.generatedLine !== previousGeneratedLine) {
|
||||
previousGeneratedColumn = 0;
|
||||
while (mapping.generatedLine !== previousGeneratedLine) {
|
||||
next += ";";
|
||||
previousGeneratedLine++;
|
||||
}
|
||||
} else if (i > 0) {
|
||||
if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {
|
||||
continue;
|
||||
}
|
||||
next += ",";
|
||||
}
|
||||
|
||||
next += base64VLQ.encode(mapping.generatedColumn
|
||||
- previousGeneratedColumn);
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
|
||||
if (mapping.source != null) {
|
||||
sourceIdx = this._sources.indexOf(mapping.source);
|
||||
next += base64VLQ.encode(sourceIdx - previousSource);
|
||||
previousSource = sourceIdx;
|
||||
|
||||
// lines are stored 0-based in SourceMap spec version 3
|
||||
next += base64VLQ.encode(mapping.originalLine - 1
|
||||
- previousOriginalLine);
|
||||
previousOriginalLine = mapping.originalLine - 1;
|
||||
|
||||
next += base64VLQ.encode(mapping.originalColumn
|
||||
- previousOriginalColumn);
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
|
||||
if (mapping.name != null) {
|
||||
nameIdx = this._names.indexOf(mapping.name);
|
||||
next += base64VLQ.encode(nameIdx - previousName);
|
||||
previousName = nameIdx;
|
||||
}
|
||||
}
|
||||
|
||||
result += next;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
_generateSourcesContent(aSources, aSourceRoot) {
|
||||
return aSources.map(function(source) {
|
||||
if (!this._sourcesContents) {
|
||||
return null;
|
||||
}
|
||||
if (aSourceRoot != null) {
|
||||
source = util.relative(aSourceRoot, source);
|
||||
}
|
||||
const key = util.toSetString(source);
|
||||
return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)
|
||||
? this._sourcesContents[key]
|
||||
: null;
|
||||
}, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Externalize the source map.
|
||||
*/
|
||||
toJSON() {
|
||||
const map = {
|
||||
version: this._version,
|
||||
sources: this._sources.toArray(),
|
||||
names: this._names.toArray(),
|
||||
mappings: this._serializeMappings()
|
||||
};
|
||||
if (this._file != null) {
|
||||
map.file = this._file;
|
||||
}
|
||||
if (this._sourceRoot != null) {
|
||||
map.sourceRoot = this._sourceRoot;
|
||||
}
|
||||
if (this._sourcesContents) {
|
||||
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the source map being generated to a string.
|
||||
*/
|
||||
toString() {
|
||||
return JSON.stringify(this.toJSON());
|
||||
}
|
||||
}
|
||||
|
||||
SourceMapGenerator.prototype._version = 3;
|
||||
exports.SourceMapGenerator = SourceMapGenerator;
|
404
node_modules/terser/node_modules/source-map/lib/source-node.js
generated
vendored
404
node_modules/terser/node_modules/source-map/lib/source-node.js
generated
vendored
@@ -1,404 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
const SourceMapGenerator = require("./source-map-generator").SourceMapGenerator;
|
||||
const util = require("./util");
|
||||
|
||||
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
|
||||
// operating systems these days (capturing the result).
|
||||
const REGEX_NEWLINE = /(\r?\n)/;
|
||||
|
||||
// Newline character code for charCodeAt() comparisons
|
||||
const NEWLINE_CODE = 10;
|
||||
|
||||
// Private symbol for identifying `SourceNode`s when multiple versions of
|
||||
// the source-map library are loaded. This MUST NOT CHANGE across
|
||||
// versions!
|
||||
const isSourceNode = "$$$isSourceNode$$$";
|
||||
|
||||
/**
|
||||
* SourceNodes provide a way to abstract over interpolating/concatenating
|
||||
* snippets of generated JavaScript source code while maintaining the line and
|
||||
* column information associated with the original source code.
|
||||
*
|
||||
* @param aLine The original line number.
|
||||
* @param aColumn The original column number.
|
||||
* @param aSource The original source's filename.
|
||||
* @param aChunks Optional. An array of strings which are snippets of
|
||||
* generated JS, or other SourceNodes.
|
||||
* @param aName The original identifier.
|
||||
*/
|
||||
class SourceNode {
|
||||
constructor(aLine, aColumn, aSource, aChunks, aName) {
|
||||
this.children = [];
|
||||
this.sourceContents = {};
|
||||
this.line = aLine == null ? null : aLine;
|
||||
this.column = aColumn == null ? null : aColumn;
|
||||
this.source = aSource == null ? null : aSource;
|
||||
this.name = aName == null ? null : aName;
|
||||
this[isSourceNode] = true;
|
||||
if (aChunks != null) this.add(aChunks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
*
|
||||
* @param aGeneratedCode The generated code
|
||||
* @param aSourceMapConsumer The SourceMap for the generated code
|
||||
* @param aRelativePath Optional. The path that relative sources in the
|
||||
* SourceMapConsumer should be relative to.
|
||||
*/
|
||||
static fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {
|
||||
// The SourceNode we want to fill with the generated code
|
||||
// and the SourceMap
|
||||
const node = new SourceNode();
|
||||
|
||||
// All even indices of this array are one line of the generated code,
|
||||
// while all odd indices are the newlines between two adjacent lines
|
||||
// (since `REGEX_NEWLINE` captures its match).
|
||||
// Processed fragments are accessed by calling `shiftNextLine`.
|
||||
const remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
|
||||
let remainingLinesIndex = 0;
|
||||
const shiftNextLine = function() {
|
||||
const lineContents = getNextLine();
|
||||
// The last line of a file might not have a newline.
|
||||
const newLine = getNextLine() || "";
|
||||
return lineContents + newLine;
|
||||
|
||||
function getNextLine() {
|
||||
return remainingLinesIndex < remainingLines.length ?
|
||||
remainingLines[remainingLinesIndex++] : undefined;
|
||||
}
|
||||
};
|
||||
|
||||
// We need to remember the position of "remainingLines"
|
||||
let lastGeneratedLine = 1, lastGeneratedColumn = 0;
|
||||
|
||||
// The generate SourceNodes we need a code range.
|
||||
// To extract it current and last mapping is used.
|
||||
// Here we store the last mapping.
|
||||
let lastMapping = null;
|
||||
let nextLine;
|
||||
|
||||
aSourceMapConsumer.eachMapping(function(mapping) {
|
||||
if (lastMapping !== null) {
|
||||
// We add the code from "lastMapping" to "mapping":
|
||||
// First check if there is a new line in between.
|
||||
if (lastGeneratedLine < mapping.generatedLine) {
|
||||
// Associate first line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
lastGeneratedColumn = 0;
|
||||
// The remaining code is added without mapping
|
||||
} else {
|
||||
// There is no new line in between.
|
||||
// Associate the code between "lastGeneratedColumn" and
|
||||
// "mapping.generatedColumn" with "lastMapping"
|
||||
nextLine = remainingLines[remainingLinesIndex] || "";
|
||||
const code = nextLine.substr(0, mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
addMappingWithCode(lastMapping, code);
|
||||
// No more remaining code, continue
|
||||
lastMapping = mapping;
|
||||
return;
|
||||
}
|
||||
}
|
||||
// We add the generated code until the first mapping
|
||||
// to the SourceNode without any mapping.
|
||||
// Each line is added as separate string.
|
||||
while (lastGeneratedLine < mapping.generatedLine) {
|
||||
node.add(shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
}
|
||||
if (lastGeneratedColumn < mapping.generatedColumn) {
|
||||
nextLine = remainingLines[remainingLinesIndex] || "";
|
||||
node.add(nextLine.substr(0, mapping.generatedColumn));
|
||||
remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
}
|
||||
lastMapping = mapping;
|
||||
}, this);
|
||||
// We have processed all mappings.
|
||||
if (remainingLinesIndex < remainingLines.length) {
|
||||
if (lastMapping) {
|
||||
// Associate the remaining code in the current line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
}
|
||||
// and add the remaining lines without any mapping
|
||||
node.add(remainingLines.splice(remainingLinesIndex).join(""));
|
||||
}
|
||||
|
||||
// Copy sourcesContent into SourceNode
|
||||
aSourceMapConsumer.sources.forEach(function(sourceFile) {
|
||||
const content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
if (aRelativePath != null) {
|
||||
sourceFile = util.join(aRelativePath, sourceFile);
|
||||
}
|
||||
node.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
|
||||
return node;
|
||||
|
||||
function addMappingWithCode(mapping, code) {
|
||||
if (mapping === null || mapping.source === undefined) {
|
||||
node.add(code);
|
||||
} else {
|
||||
const source = aRelativePath
|
||||
? util.join(aRelativePath, mapping.source)
|
||||
: mapping.source;
|
||||
node.add(new SourceNode(mapping.originalLine,
|
||||
mapping.originalColumn,
|
||||
source,
|
||||
code,
|
||||
mapping.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
add(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
aChunk.forEach(function(chunk) {
|
||||
this.add(chunk);
|
||||
}, this);
|
||||
} else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||
if (aChunk) {
|
||||
this.children.push(aChunk);
|
||||
}
|
||||
} else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to the beginning of this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
prepend(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
for (let i = aChunk.length - 1; i >= 0; i--) {
|
||||
this.prepend(aChunk[i]);
|
||||
}
|
||||
} else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||
this.children.unshift(aChunk);
|
||||
} else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Walk over the tree of JS snippets in this node and its children. The
|
||||
* walking function is called once for each snippet of JS and is passed that
|
||||
* snippet and the its original associated source's line/column location.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
walk(aFn) {
|
||||
let chunk;
|
||||
for (let i = 0, len = this.children.length; i < len; i++) {
|
||||
chunk = this.children[i];
|
||||
if (chunk[isSourceNode]) {
|
||||
chunk.walk(aFn);
|
||||
} else if (chunk !== "") {
|
||||
aFn(chunk, { source: this.source,
|
||||
line: this.line,
|
||||
column: this.column,
|
||||
name: this.name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
|
||||
* each of `this.children`.
|
||||
*
|
||||
* @param aSep The separator.
|
||||
*/
|
||||
join(aSep) {
|
||||
let newChildren;
|
||||
let i;
|
||||
const len = this.children.length;
|
||||
if (len > 0) {
|
||||
newChildren = [];
|
||||
for (i = 0; i < len - 1; i++) {
|
||||
newChildren.push(this.children[i]);
|
||||
newChildren.push(aSep);
|
||||
}
|
||||
newChildren.push(this.children[i]);
|
||||
this.children = newChildren;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call String.prototype.replace on the very right-most source snippet. Useful
|
||||
* for trimming whitespace from the end of a source node, etc.
|
||||
*
|
||||
* @param aPattern The pattern to replace.
|
||||
* @param aReplacement The thing to replace the pattern with.
|
||||
*/
|
||||
replaceRight(aPattern, aReplacement) {
|
||||
const lastChild = this.children[this.children.length - 1];
|
||||
if (lastChild[isSourceNode]) {
|
||||
lastChild.replaceRight(aPattern, aReplacement);
|
||||
} else if (typeof lastChild === "string") {
|
||||
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
|
||||
} else {
|
||||
this.children.push("".replace(aPattern, aReplacement));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the source content for a source file. This will be added to the SourceMapGenerator
|
||||
* in the sourcesContent field.
|
||||
*
|
||||
* @param aSourceFile The filename of the source file
|
||||
* @param aSourceContent The content of the source file
|
||||
*/
|
||||
setSourceContent(aSourceFile, aSourceContent) {
|
||||
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Walk over the tree of SourceNodes. The walking function is called for each
|
||||
* source file content and is passed the filename and source content.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
walkSourceContents(aFn) {
|
||||
for (let i = 0, len = this.children.length; i < len; i++) {
|
||||
if (this.children[i][isSourceNode]) {
|
||||
this.children[i].walkSourceContents(aFn);
|
||||
}
|
||||
}
|
||||
|
||||
const sources = Object.keys(this.sourceContents);
|
||||
for (let i = 0, len = sources.length; i < len; i++) {
|
||||
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the string representation of this source node. Walks over the tree
|
||||
* and concatenates all the various snippets together to one string.
|
||||
*/
|
||||
toString() {
|
||||
let str = "";
|
||||
this.walk(function(chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the string representation of this source node along with a source
|
||||
* map.
|
||||
*/
|
||||
toStringWithSourceMap(aArgs) {
|
||||
const generated = {
|
||||
code: "",
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
const map = new SourceMapGenerator(aArgs);
|
||||
let sourceMappingActive = false;
|
||||
let lastOriginalSource = null;
|
||||
let lastOriginalLine = null;
|
||||
let lastOriginalColumn = null;
|
||||
let lastOriginalName = null;
|
||||
this.walk(function(chunk, original) {
|
||||
generated.code += chunk;
|
||||
if (original.source !== null
|
||||
&& original.line !== null
|
||||
&& original.column !== null) {
|
||||
if (lastOriginalSource !== original.source
|
||||
|| lastOriginalLine !== original.line
|
||||
|| lastOriginalColumn !== original.column
|
||||
|| lastOriginalName !== original.name) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
lastOriginalSource = original.source;
|
||||
lastOriginalLine = original.line;
|
||||
lastOriginalColumn = original.column;
|
||||
lastOriginalName = original.name;
|
||||
sourceMappingActive = true;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
}
|
||||
});
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
}
|
||||
for (let idx = 0, length = chunk.length; idx < length; idx++) {
|
||||
if (chunk.charCodeAt(idx) === NEWLINE_CODE) {
|
||||
generated.line++;
|
||||
generated.column = 0;
|
||||
// Mappings end at eol
|
||||
if (idx + 1 === length) {
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
} else {
|
||||
generated.column++;
|
||||
}
|
||||
}
|
||||
});
|
||||
this.walkSourceContents(function(sourceFile, sourceContent) {
|
||||
map.setSourceContent(sourceFile, sourceContent);
|
||||
});
|
||||
|
||||
return { code: generated.code, map };
|
||||
}
|
||||
}
|
||||
|
||||
exports.SourceNode = SourceNode;
|
546
node_modules/terser/node_modules/source-map/lib/util.js
generated
vendored
546
node_modules/terser/node_modules/source-map/lib/util.js
generated
vendored
@@ -1,546 +0,0 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* This is a helper function for getting values from parameter/options
|
||||
* objects.
|
||||
*
|
||||
* @param args The object we are extracting values from
|
||||
* @param name The name of the property we are getting.
|
||||
* @param defaultValue An optional value to return if the property is missing
|
||||
* from the object. If this is not specified and the property is missing, an
|
||||
* error will be thrown.
|
||||
*/
|
||||
function getArg(aArgs, aName, aDefaultValue) {
|
||||
if (aName in aArgs) {
|
||||
return aArgs[aName];
|
||||
} else if (arguments.length === 3) {
|
||||
return aDefaultValue;
|
||||
}
|
||||
throw new Error('"' + aName + '" is a required argument.');
|
||||
|
||||
}
|
||||
exports.getArg = getArg;
|
||||
|
||||
const urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;
|
||||
const dataUrlRegexp = /^data:.+\,.+$/;
|
||||
|
||||
function urlParse(aUrl) {
|
||||
const match = aUrl.match(urlRegexp);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
scheme: match[1],
|
||||
auth: match[2],
|
||||
host: match[3],
|
||||
port: match[4],
|
||||
path: match[5]
|
||||
};
|
||||
}
|
||||
exports.urlParse = urlParse;
|
||||
|
||||
function urlGenerate(aParsedUrl) {
|
||||
let url = "";
|
||||
if (aParsedUrl.scheme) {
|
||||
url += aParsedUrl.scheme + ":";
|
||||
}
|
||||
url += "//";
|
||||
if (aParsedUrl.auth) {
|
||||
url += aParsedUrl.auth + "@";
|
||||
}
|
||||
if (aParsedUrl.host) {
|
||||
url += aParsedUrl.host;
|
||||
}
|
||||
if (aParsedUrl.port) {
|
||||
url += ":" + aParsedUrl.port;
|
||||
}
|
||||
if (aParsedUrl.path) {
|
||||
url += aParsedUrl.path;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
exports.urlGenerate = urlGenerate;
|
||||
|
||||
const MAX_CACHED_INPUTS = 32;
|
||||
|
||||
/**
|
||||
* Takes some function `f(input) -> result` and returns a memoized version of
|
||||
* `f`.
|
||||
*
|
||||
* We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The
|
||||
* memoization is a dumb-simple, linear least-recently-used cache.
|
||||
*/
|
||||
function lruMemoize(f) {
|
||||
const cache = [];
|
||||
|
||||
return function(input) {
|
||||
for (let i = 0; i < cache.length; i++) {
|
||||
if (cache[i].input === input) {
|
||||
const temp = cache[0];
|
||||
cache[0] = cache[i];
|
||||
cache[i] = temp;
|
||||
return cache[0].result;
|
||||
}
|
||||
}
|
||||
|
||||
const result = f(input);
|
||||
|
||||
cache.unshift({
|
||||
input,
|
||||
result,
|
||||
});
|
||||
|
||||
if (cache.length > MAX_CACHED_INPUTS) {
|
||||
cache.pop();
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a path, or the path portion of a URL:
|
||||
*
|
||||
* - Replaces consecutive slashes with one slash.
|
||||
* - Removes unnecessary '.' parts.
|
||||
* - Removes unnecessary '<dir>/..' parts.
|
||||
*
|
||||
* Based on code in the Node.js 'path' core module.
|
||||
*
|
||||
* @param aPath The path or url to normalize.
|
||||
*/
|
||||
const normalize = lruMemoize(function normalize(aPath) {
|
||||
let path = aPath;
|
||||
const url = urlParse(aPath);
|
||||
if (url) {
|
||||
if (!url.path) {
|
||||
return aPath;
|
||||
}
|
||||
path = url.path;
|
||||
}
|
||||
const isAbsolute = exports.isAbsolute(path);
|
||||
|
||||
// Split the path into parts between `/` characters. This is much faster than
|
||||
// using `.split(/\/+/g)`.
|
||||
const parts = [];
|
||||
let start = 0;
|
||||
let i = 0;
|
||||
while (true) {
|
||||
start = i;
|
||||
i = path.indexOf("/", start);
|
||||
if (i === -1) {
|
||||
parts.push(path.slice(start));
|
||||
break;
|
||||
} else {
|
||||
parts.push(path.slice(start, i));
|
||||
while (i < path.length && path[i] === "/") {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let up = 0;
|
||||
for (i = parts.length - 1; i >= 0; i--) {
|
||||
const part = parts[i];
|
||||
if (part === ".") {
|
||||
parts.splice(i, 1);
|
||||
} else if (part === "..") {
|
||||
up++;
|
||||
} else if (up > 0) {
|
||||
if (part === "") {
|
||||
// The first part is blank if the path is absolute. Trying to go
|
||||
// above the root is a no-op. Therefore we can remove all '..' parts
|
||||
// directly after the root.
|
||||
parts.splice(i + 1, up);
|
||||
up = 0;
|
||||
} else {
|
||||
parts.splice(i, 2);
|
||||
up--;
|
||||
}
|
||||
}
|
||||
}
|
||||
path = parts.join("/");
|
||||
|
||||
if (path === "") {
|
||||
path = isAbsolute ? "/" : ".";
|
||||
}
|
||||
|
||||
if (url) {
|
||||
url.path = path;
|
||||
return urlGenerate(url);
|
||||
}
|
||||
return path;
|
||||
});
|
||||
exports.normalize = normalize;
|
||||
|
||||
/**
|
||||
* Joins two paths/URLs.
|
||||
*
|
||||
* @param aRoot The root path or URL.
|
||||
* @param aPath The path or URL to be joined with the root.
|
||||
*
|
||||
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
||||
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
||||
* first.
|
||||
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
||||
* is updated with the result and aRoot is returned. Otherwise the result
|
||||
* is returned.
|
||||
* - If aPath is absolute, the result is aPath.
|
||||
* - Otherwise the two paths are joined with a slash.
|
||||
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
||||
*/
|
||||
function join(aRoot, aPath) {
|
||||
if (aRoot === "") {
|
||||
aRoot = ".";
|
||||
}
|
||||
if (aPath === "") {
|
||||
aPath = ".";
|
||||
}
|
||||
const aPathUrl = urlParse(aPath);
|
||||
const aRootUrl = urlParse(aRoot);
|
||||
if (aRootUrl) {
|
||||
aRoot = aRootUrl.path || "/";
|
||||
}
|
||||
|
||||
// `join(foo, '//www.example.org')`
|
||||
if (aPathUrl && !aPathUrl.scheme) {
|
||||
if (aRootUrl) {
|
||||
aPathUrl.scheme = aRootUrl.scheme;
|
||||
}
|
||||
return urlGenerate(aPathUrl);
|
||||
}
|
||||
|
||||
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
||||
return aPath;
|
||||
}
|
||||
|
||||
// `join('http://', 'www.example.com')`
|
||||
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
||||
aRootUrl.host = aPath;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
|
||||
const joined = aPath.charAt(0) === "/"
|
||||
? aPath
|
||||
: normalize(aRoot.replace(/\/+$/, "") + "/" + aPath);
|
||||
|
||||
if (aRootUrl) {
|
||||
aRootUrl.path = joined;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
return joined;
|
||||
}
|
||||
exports.join = join;
|
||||
|
||||
exports.isAbsolute = function(aPath) {
|
||||
return aPath.charAt(0) === "/" || urlRegexp.test(aPath);
|
||||
};
|
||||
|
||||
/**
|
||||
* Make a path relative to a URL or another path.
|
||||
*
|
||||
* @param aRoot The root path or URL.
|
||||
* @param aPath The path or URL to be made relative to aRoot.
|
||||
*/
|
||||
function relative(aRoot, aPath) {
|
||||
if (aRoot === "") {
|
||||
aRoot = ".";
|
||||
}
|
||||
|
||||
aRoot = aRoot.replace(/\/$/, "");
|
||||
|
||||
// It is possible for the path to be above the root. In this case, simply
|
||||
// checking whether the root is a prefix of the path won't work. Instead, we
|
||||
// need to remove components from the root one by one, until either we find
|
||||
// a prefix that fits, or we run out of components to remove.
|
||||
let level = 0;
|
||||
while (aPath.indexOf(aRoot + "/") !== 0) {
|
||||
const index = aRoot.lastIndexOf("/");
|
||||
if (index < 0) {
|
||||
return aPath;
|
||||
}
|
||||
|
||||
// If the only part of the root that is left is the scheme (i.e. http://,
|
||||
// file:///, etc.), one or more slashes (/), or simply nothing at all, we
|
||||
// have exhausted all components, so the path is not relative to the root.
|
||||
aRoot = aRoot.slice(0, index);
|
||||
if (aRoot.match(/^([^\/]+:\/)?\/*$/)) {
|
||||
return aPath;
|
||||
}
|
||||
|
||||
++level;
|
||||
}
|
||||
|
||||
// Make sure we add a "../" for each component we removed from the root.
|
||||
return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1);
|
||||
}
|
||||
exports.relative = relative;
|
||||
|
||||
const supportsNullProto = (function() {
|
||||
const obj = Object.create(null);
|
||||
return !("__proto__" in obj);
|
||||
}());
|
||||
|
||||
function identity(s) {
|
||||
return s;
|
||||
}
|
||||
|
||||
/**
|
||||
* Because behavior goes wacky when you set `__proto__` on objects, we
|
||||
* have to prefix all the strings in our set with an arbitrary character.
|
||||
*
|
||||
* See https://github.com/mozilla/source-map/pull/31 and
|
||||
* https://github.com/mozilla/source-map/issues/30
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
function toSetString(aStr) {
|
||||
if (isProtoString(aStr)) {
|
||||
return "$" + aStr;
|
||||
}
|
||||
|
||||
return aStr;
|
||||
}
|
||||
exports.toSetString = supportsNullProto ? identity : toSetString;
|
||||
|
||||
function fromSetString(aStr) {
|
||||
if (isProtoString(aStr)) {
|
||||
return aStr.slice(1);
|
||||
}
|
||||
|
||||
return aStr;
|
||||
}
|
||||
exports.fromSetString = supportsNullProto ? identity : fromSetString;
|
||||
|
||||
function isProtoString(s) {
|
||||
if (!s) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const length = s.length;
|
||||
|
||||
if (length < 9 /* "__proto__".length */) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* eslint-disable no-multi-spaces */
|
||||
if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||
|
||||
s.charCodeAt(length - 2) !== 95 /* '_' */ ||
|
||||
s.charCodeAt(length - 3) !== 111 /* 'o' */ ||
|
||||
s.charCodeAt(length - 4) !== 116 /* 't' */ ||
|
||||
s.charCodeAt(length - 5) !== 111 /* 'o' */ ||
|
||||
s.charCodeAt(length - 6) !== 114 /* 'r' */ ||
|
||||
s.charCodeAt(length - 7) !== 112 /* 'p' */ ||
|
||||
s.charCodeAt(length - 8) !== 95 /* '_' */ ||
|
||||
s.charCodeAt(length - 9) !== 95 /* '_' */) {
|
||||
return false;
|
||||
}
|
||||
/* eslint-enable no-multi-spaces */
|
||||
|
||||
for (let i = length - 10; i >= 0; i--) {
|
||||
if (s.charCodeAt(i) !== 36 /* '$' */) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the original positions are compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same original source/line/column, but different generated
|
||||
* line and column the same. Useful when searching for a mapping with a
|
||||
* stubbed out mapping.
|
||||
*/
|
||||
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
||||
let cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp !== 0 || onlyCompareOriginal) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return strcmp(mappingA.name, mappingB.name);
|
||||
}
|
||||
exports.compareByOriginalPositions = compareByOriginalPositions;
|
||||
|
||||
/**
|
||||
* Comparator between two mappings with deflated source and name indices where
|
||||
* the generated positions are compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same generated line and column, but different
|
||||
* source/name/original line and column the same. Useful when searching for a
|
||||
* mapping with a stubbed out mapping.
|
||||
*/
|
||||
function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {
|
||||
let cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
if (cmp !== 0 || onlyCompareGenerated) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return strcmp(mappingA.name, mappingB.name);
|
||||
}
|
||||
exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;
|
||||
|
||||
function strcmp(aStr1, aStr2) {
|
||||
if (aStr1 === aStr2) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (aStr1 === null) {
|
||||
return 1; // aStr2 !== null
|
||||
}
|
||||
|
||||
if (aStr2 === null) {
|
||||
return -1; // aStr1 !== null
|
||||
}
|
||||
|
||||
if (aStr1 > aStr2) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Comparator between two mappings with inflated source and name strings where
|
||||
* the generated positions are compared.
|
||||
*/
|
||||
function compareByGeneratedPositionsInflated(mappingA, mappingB) {
|
||||
let cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return strcmp(mappingA.name, mappingB.name);
|
||||
}
|
||||
exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;
|
||||
|
||||
/**
|
||||
* Strip any JSON XSSI avoidance prefix from the string (as documented
|
||||
* in the source maps specification), and then parse the string as
|
||||
* JSON.
|
||||
*/
|
||||
function parseSourceMapInput(str) {
|
||||
return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, ""));
|
||||
}
|
||||
exports.parseSourceMapInput = parseSourceMapInput;
|
||||
|
||||
/**
|
||||
* Compute the URL of a source given the the source root, the source's
|
||||
* URL, and the source map's URL.
|
||||
*/
|
||||
function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) {
|
||||
sourceURL = sourceURL || "";
|
||||
|
||||
if (sourceRoot) {
|
||||
// This follows what Chrome does.
|
||||
if (sourceRoot[sourceRoot.length - 1] !== "/" && sourceURL[0] !== "/") {
|
||||
sourceRoot += "/";
|
||||
}
|
||||
// The spec says:
|
||||
// Line 4: An optional source root, useful for relocating source
|
||||
// files on a server or removing repeated values in the
|
||||
// “sources” entry. This value is prepended to the individual
|
||||
// entries in the “source” field.
|
||||
sourceURL = sourceRoot + sourceURL;
|
||||
}
|
||||
|
||||
// Historically, SourceMapConsumer did not take the sourceMapURL as
|
||||
// a parameter. This mode is still somewhat supported, which is why
|
||||
// this code block is conditional. However, it's preferable to pass
|
||||
// the source map URL to SourceMapConsumer, so that this function
|
||||
// can implement the source URL resolution algorithm as outlined in
|
||||
// the spec. This block is basically the equivalent of:
|
||||
// new URL(sourceURL, sourceMapURL).toString()
|
||||
// ... except it avoids using URL, which wasn't available in the
|
||||
// older releases of node still supported by this library.
|
||||
//
|
||||
// The spec says:
|
||||
// If the sources are not absolute URLs after prepending of the
|
||||
// “sourceRoot”, the sources are resolved relative to the
|
||||
// SourceMap (like resolving script src in a html document).
|
||||
if (sourceMapURL) {
|
||||
const parsed = urlParse(sourceMapURL);
|
||||
if (!parsed) {
|
||||
throw new Error("sourceMapURL could not be parsed");
|
||||
}
|
||||
if (parsed.path) {
|
||||
// Strip the last path component, but keep the "/".
|
||||
const index = parsed.path.lastIndexOf("/");
|
||||
if (index >= 0) {
|
||||
parsed.path = parsed.path.substring(0, index + 1);
|
||||
}
|
||||
}
|
||||
sourceURL = join(urlGenerate(parsed), sourceURL);
|
||||
}
|
||||
|
||||
return normalize(sourceURL);
|
||||
}
|
||||
exports.computeSourceURL = computeSourceURL;
|
107
node_modules/terser/node_modules/source-map/lib/wasm.js
generated
vendored
107
node_modules/terser/node_modules/source-map/lib/wasm.js
generated
vendored
@@ -1,107 +0,0 @@
|
||||
const readWasm = require("../lib/read-wasm");
|
||||
|
||||
/**
|
||||
* Provide the JIT with a nice shape / hidden class.
|
||||
*/
|
||||
function Mapping() {
|
||||
this.generatedLine = 0;
|
||||
this.generatedColumn = 0;
|
||||
this.lastGeneratedColumn = null;
|
||||
this.source = null;
|
||||
this.originalLine = null;
|
||||
this.originalColumn = null;
|
||||
this.name = null;
|
||||
}
|
||||
|
||||
let cachedWasm = null;
|
||||
|
||||
module.exports = function wasm() {
|
||||
if (cachedWasm) {
|
||||
return cachedWasm;
|
||||
}
|
||||
|
||||
const callbackStack = [];
|
||||
|
||||
cachedWasm = readWasm().then(buffer => {
|
||||
return WebAssembly.instantiate(buffer, {
|
||||
env: {
|
||||
mapping_callback(
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
|
||||
hasLastGeneratedColumn,
|
||||
lastGeneratedColumn,
|
||||
|
||||
hasOriginal,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
|
||||
hasName,
|
||||
name
|
||||
) {
|
||||
const mapping = new Mapping();
|
||||
// JS uses 1-based line numbers, wasm uses 0-based.
|
||||
mapping.generatedLine = generatedLine + 1;
|
||||
mapping.generatedColumn = generatedColumn;
|
||||
|
||||
if (hasLastGeneratedColumn) {
|
||||
// JS uses inclusive last generated column, wasm uses exclusive.
|
||||
mapping.lastGeneratedColumn = lastGeneratedColumn - 1;
|
||||
}
|
||||
|
||||
if (hasOriginal) {
|
||||
mapping.source = source;
|
||||
// JS uses 1-based line numbers, wasm uses 0-based.
|
||||
mapping.originalLine = originalLine + 1;
|
||||
mapping.originalColumn = originalColumn;
|
||||
|
||||
if (hasName) {
|
||||
mapping.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
callbackStack[callbackStack.length - 1](mapping);
|
||||
},
|
||||
|
||||
start_all_generated_locations_for() { console.time("all_generated_locations_for"); },
|
||||
end_all_generated_locations_for() { console.timeEnd("all_generated_locations_for"); },
|
||||
|
||||
start_compute_column_spans() { console.time("compute_column_spans"); },
|
||||
end_compute_column_spans() { console.timeEnd("compute_column_spans"); },
|
||||
|
||||
start_generated_location_for() { console.time("generated_location_for"); },
|
||||
end_generated_location_for() { console.timeEnd("generated_location_for"); },
|
||||
|
||||
start_original_location_for() { console.time("original_location_for"); },
|
||||
end_original_location_for() { console.timeEnd("original_location_for"); },
|
||||
|
||||
start_parse_mappings() { console.time("parse_mappings"); },
|
||||
end_parse_mappings() { console.timeEnd("parse_mappings"); },
|
||||
|
||||
start_sort_by_generated_location() { console.time("sort_by_generated_location"); },
|
||||
end_sort_by_generated_location() { console.timeEnd("sort_by_generated_location"); },
|
||||
|
||||
start_sort_by_original_location() { console.time("sort_by_original_location"); },
|
||||
end_sort_by_original_location() { console.timeEnd("sort_by_original_location"); },
|
||||
}
|
||||
});
|
||||
}).then(Wasm => {
|
||||
return {
|
||||
exports: Wasm.instance.exports,
|
||||
withMappingCallback: (mappingCallback, f) => {
|
||||
callbackStack.push(mappingCallback);
|
||||
try {
|
||||
f();
|
||||
} finally {
|
||||
callbackStack.pop();
|
||||
}
|
||||
}
|
||||
};
|
||||
}).then(null, e => {
|
||||
cachedWasm = null;
|
||||
throw e;
|
||||
});
|
||||
|
||||
return cachedWasm;
|
||||
};
|
229
node_modules/terser/node_modules/source-map/package.json
generated
vendored
229
node_modules/terser/node_modules/source-map/package.json
generated
vendored
@@ -1,229 +0,0 @@
|
||||
{
|
||||
"_from": "source-map@~0.7.2",
|
||||
"_id": "source-map@0.7.3",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==",
|
||||
"_location": "/terser/source-map",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "source-map@~0.7.2",
|
||||
"name": "source-map",
|
||||
"escapedName": "source-map",
|
||||
"rawSpec": "~0.7.2",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "~0.7.2"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/terser"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz",
|
||||
"_shasum": "5302f8169031735226544092e64981f751750383",
|
||||
"_spec": "source-map@~0.7.2",
|
||||
"_where": "D:\\Projects\\minifyfromhtml\\node_modules\\terser",
|
||||
"author": {
|
||||
"name": "Nick Fitzgerald",
|
||||
"email": "nfitzgerald@mozilla.com"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/mozilla/source-map/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Tobias Koppers",
|
||||
"email": "tobias.koppers@googlemail.com"
|
||||
},
|
||||
{
|
||||
"name": "Duncan Beevers",
|
||||
"email": "duncan@dweebd.com"
|
||||
},
|
||||
{
|
||||
"name": "Stephen Crane",
|
||||
"email": "scrane@mozilla.com"
|
||||
},
|
||||
{
|
||||
"name": "Ryan Seddon",
|
||||
"email": "seddon.ryan@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Miles Elam",
|
||||
"email": "miles.elam@deem.com"
|
||||
},
|
||||
{
|
||||
"name": "Mihai Bazon",
|
||||
"email": "mihai.bazon@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Michael Ficarra",
|
||||
"email": "github.public.email@michael.ficarra.me"
|
||||
},
|
||||
{
|
||||
"name": "Todd Wolfson",
|
||||
"email": "todd@twolfson.com"
|
||||
},
|
||||
{
|
||||
"name": "Alexander Solovyov",
|
||||
"email": "alexander@solovyov.net"
|
||||
},
|
||||
{
|
||||
"name": "Felix Gnass",
|
||||
"email": "fgnass@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Conrad Irwin",
|
||||
"email": "conrad.irwin@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "usrbincc",
|
||||
"email": "usrbincc@yahoo.com"
|
||||
},
|
||||
{
|
||||
"name": "David Glasser",
|
||||
"email": "glasser@davidglasser.net"
|
||||
},
|
||||
{
|
||||
"name": "Chase Douglas",
|
||||
"email": "chase@newrelic.com"
|
||||
},
|
||||
{
|
||||
"name": "Evan Wallace",
|
||||
"email": "evan.exe@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Heather Arthur",
|
||||
"email": "fayearthur@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Hugh Kennedy",
|
||||
"email": "hughskennedy@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "David Glasser",
|
||||
"email": "glasser@davidglasser.net"
|
||||
},
|
||||
{
|
||||
"name": "Simon Lydell",
|
||||
"email": "simon.lydell@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Jmeas Smith",
|
||||
"email": "jellyes2@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Michael Z Goddard",
|
||||
"email": "mzgoddard@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "azu",
|
||||
"email": "azu@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "John Gozde",
|
||||
"email": "john@gozde.ca"
|
||||
},
|
||||
{
|
||||
"name": "Adam Kirkton",
|
||||
"email": "akirkton@truefitinnovation.com"
|
||||
},
|
||||
{
|
||||
"name": "Chris Montgomery",
|
||||
"email": "christopher.montgomery@dowjones.com"
|
||||
},
|
||||
{
|
||||
"name": "J. Ryan Stinnett",
|
||||
"email": "jryans@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Jack Herrington",
|
||||
"email": "jherrington@walmartlabs.com"
|
||||
},
|
||||
{
|
||||
"name": "Chris Truter",
|
||||
"email": "jeffpalentine@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Daniel Espeset",
|
||||
"email": "daniel@danielespeset.com"
|
||||
},
|
||||
{
|
||||
"name": "Jamie Wong",
|
||||
"email": "jamie.lf.wong@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Eddy Bruël",
|
||||
"email": "ejpbruel@mozilla.com"
|
||||
},
|
||||
{
|
||||
"name": "Hawken Rives",
|
||||
"email": "hawkrives@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Gilad Peleg",
|
||||
"email": "giladp007@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "djchie",
|
||||
"email": "djchie.dev@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Gary Ye",
|
||||
"email": "garysye@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Nicolas Lalevée",
|
||||
"email": "nicolas.lalevee@hibnet.org"
|
||||
}
|
||||
],
|
||||
"deprecated": false,
|
||||
"description": "Generates and consumes source maps",
|
||||
"devDependencies": {
|
||||
"doctoc": "^0.15.0",
|
||||
"eslint": "^4.19.1",
|
||||
"live-server": "^1.2.0",
|
||||
"npm-run-all": "^4.1.2",
|
||||
"nyc": "^11.7.1",
|
||||
"watch": "^1.0.2",
|
||||
"webpack": "^3.10"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
},
|
||||
"files": [
|
||||
"source-map.js",
|
||||
"source-map.d.ts",
|
||||
"lib/",
|
||||
"dist/source-map.js"
|
||||
],
|
||||
"homepage": "https://github.com/mozilla/source-map",
|
||||
"license": "BSD-3-Clause",
|
||||
"main": "./source-map.js",
|
||||
"name": "source-map",
|
||||
"nyc": {
|
||||
"reporter": "html"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+ssh://git@github.com/mozilla/source-map.git"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "webpack --color",
|
||||
"clean": "rm -rf coverage .nyc_output",
|
||||
"coverage": "nyc node test/run-tests.js",
|
||||
"dev": "npm-run-all -p --silent dev:*",
|
||||
"dev:live": "live-server --port=4103 --ignorePattern='(js|css|png)$' coverage",
|
||||
"dev:watch": "watch 'npm run coverage' lib/ test/",
|
||||
"lint": "eslint *.js lib/ test/",
|
||||
"prebuild": "npm run lint",
|
||||
"precoverage": "npm run build",
|
||||
"predev": "npm run setup",
|
||||
"pretest": "npm run build",
|
||||
"setup": "mkdir -p coverage && cp -n .waiting.html coverage/index.html || true",
|
||||
"test": "node test/run-tests.js",
|
||||
"toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md"
|
||||
},
|
||||
"types": "./source-map.d.ts",
|
||||
"typings": "source-map",
|
||||
"version": "0.7.3"
|
||||
}
|
369
node_modules/terser/node_modules/source-map/source-map.d.ts
generated
vendored
369
node_modules/terser/node_modules/source-map/source-map.d.ts
generated
vendored
@@ -1,369 +0,0 @@
|
||||
// Type definitions for source-map 0.7
|
||||
// Project: https://github.com/mozilla/source-map
|
||||
// Definitions by: Morten Houston Ludvigsen <https://github.com/MortenHoustonLudvigsen>,
|
||||
// Ron Buckton <https://github.com/rbuckton>,
|
||||
// John Vilk <https://github.com/jvilk>
|
||||
// Definitions: https://github.com/mozilla/source-map
|
||||
export type SourceMapUrl = string;
|
||||
|
||||
export interface StartOfSourceMap {
|
||||
file?: string;
|
||||
sourceRoot?: string;
|
||||
skipValidation?: boolean;
|
||||
}
|
||||
|
||||
export interface RawSourceMap {
|
||||
version: number;
|
||||
sources: string[];
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sourcesContent?: string[];
|
||||
mappings: string;
|
||||
file: string;
|
||||
}
|
||||
|
||||
export interface RawIndexMap extends StartOfSourceMap {
|
||||
version: number;
|
||||
sections: RawSection[];
|
||||
}
|
||||
|
||||
export interface RawSection {
|
||||
offset: Position;
|
||||
map: RawSourceMap;
|
||||
}
|
||||
|
||||
export interface Position {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
|
||||
export interface NullablePosition {
|
||||
line: number | null;
|
||||
column: number | null;
|
||||
lastColumn: number | null;
|
||||
}
|
||||
|
||||
export interface MappedPosition {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export interface NullableMappedPosition {
|
||||
source: string | null;
|
||||
line: number | null;
|
||||
column: number | null;
|
||||
name: string | null;
|
||||
}
|
||||
|
||||
export interface MappingItem {
|
||||
source: string;
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface Mapping {
|
||||
generated: Position;
|
||||
original: Position;
|
||||
source: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export interface CodeWithSourceMap {
|
||||
code: string;
|
||||
map: SourceMapGenerator;
|
||||
}
|
||||
|
||||
export interface SourceMapConsumer {
|
||||
/**
|
||||
* Compute the last column for each generated mapping. The last column is
|
||||
* inclusive.
|
||||
*/
|
||||
computeColumnSpans(): void;
|
||||
|
||||
/**
|
||||
* Returns the original source, line, and column information for the generated
|
||||
* source's line and column positions provided. The only argument is an object
|
||||
* with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source.
|
||||
* - column: The column number in the generated source.
|
||||
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
||||
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||
* closest element that is smaller than or greater than the one we are
|
||||
* searching for, respectively, if the exact element cannot be found.
|
||||
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - source: The original source file, or null.
|
||||
* - line: The line number in the original source, or null.
|
||||
* - column: The column number in the original source, or null.
|
||||
* - name: The original identifier, or null.
|
||||
*/
|
||||
originalPositionFor(generatedPosition: Position & { bias?: number }): NullableMappedPosition;
|
||||
|
||||
/**
|
||||
* Returns the generated line and column information for the original source,
|
||||
* line, and column positions provided. The only argument is an object with
|
||||
* the following properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
* - column: The column number in the original source.
|
||||
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
||||
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||
* closest element that is smaller than or greater than the one we are
|
||||
* searching for, respectively, if the exact element cannot be found.
|
||||
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
generatedPositionFor(originalPosition: MappedPosition & { bias?: number }): NullablePosition;
|
||||
|
||||
/**
|
||||
* Returns all generated line and column information for the original source,
|
||||
* line, and column provided. If no column is provided, returns all mappings
|
||||
* corresponding to a either the line we are searching for or the next
|
||||
* closest line that has any mappings. Otherwise, returns all mappings
|
||||
* corresponding to the given line and either the column we are searching for
|
||||
* or the next closest column that has any offsets.
|
||||
*
|
||||
* The only argument is an object with the following properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
* - column: Optional. the column number in the original source.
|
||||
*
|
||||
* and an array of objects is returned, each with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
allGeneratedPositionsFor(originalPosition: MappedPosition): NullablePosition[];
|
||||
|
||||
/**
|
||||
* Return true if we have the source content for every source in the source
|
||||
* map, false otherwise.
|
||||
*/
|
||||
hasContentsOfAllSources(): boolean;
|
||||
|
||||
/**
|
||||
* Returns the original source content. The only argument is the url of the
|
||||
* original source file. Returns null if no original source content is
|
||||
* available.
|
||||
*/
|
||||
sourceContentFor(source: string, returnNullOnMissing?: boolean): string | null;
|
||||
|
||||
/**
|
||||
* Iterate over each mapping between an original source/line/column and a
|
||||
* generated line/column in this source map.
|
||||
*
|
||||
* @param callback
|
||||
* The function that is called with each mapping.
|
||||
* @param context
|
||||
* Optional. If specified, this object will be the value of `this` every
|
||||
* time that `aCallback` is called.
|
||||
* @param order
|
||||
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
||||
* iterate over the mappings sorted by the generated file's line/column
|
||||
* order or the original's source/line/column order, respectively. Defaults to
|
||||
* `SourceMapConsumer.GENERATED_ORDER`.
|
||||
*/
|
||||
eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void;
|
||||
/**
|
||||
* Free this source map consumer's associated wasm data that is manually-managed.
|
||||
* Alternatively, you can use SourceMapConsumer.with to avoid needing to remember to call destroy.
|
||||
*/
|
||||
destroy(): void;
|
||||
}
|
||||
|
||||
export interface SourceMapConsumerConstructor {
|
||||
prototype: SourceMapConsumer;
|
||||
|
||||
GENERATED_ORDER: number;
|
||||
ORIGINAL_ORDER: number;
|
||||
GREATEST_LOWER_BOUND: number;
|
||||
LEAST_UPPER_BOUND: number;
|
||||
|
||||
new (rawSourceMap: RawSourceMap, sourceMapUrl?: SourceMapUrl): Promise<BasicSourceMapConsumer>;
|
||||
new (rawSourceMap: RawIndexMap, sourceMapUrl?: SourceMapUrl): Promise<IndexedSourceMapConsumer>;
|
||||
new (rawSourceMap: RawSourceMap | RawIndexMap | string, sourceMapUrl?: SourceMapUrl): Promise<BasicSourceMapConsumer | IndexedSourceMapConsumer>;
|
||||
|
||||
/**
|
||||
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
|
||||
*
|
||||
* @param sourceMap
|
||||
* The source map that will be consumed.
|
||||
*/
|
||||
fromSourceMap(sourceMap: SourceMapGenerator, sourceMapUrl?: SourceMapUrl): Promise<BasicSourceMapConsumer>;
|
||||
|
||||
/**
|
||||
* Construct a new `SourceMapConsumer` from `rawSourceMap` and `sourceMapUrl`
|
||||
* (see the `SourceMapConsumer` constructor for details. Then, invoke the `async
|
||||
* function f(SourceMapConsumer) -> T` with the newly constructed consumer, wait
|
||||
* for `f` to complete, call `destroy` on the consumer, and return `f`'s return
|
||||
* value.
|
||||
*
|
||||
* You must not use the consumer after `f` completes!
|
||||
*
|
||||
* By using `with`, you do not have to remember to manually call `destroy` on
|
||||
* the consumer, since it will be called automatically once `f` completes.
|
||||
*
|
||||
* ```js
|
||||
* const xSquared = await SourceMapConsumer.with(
|
||||
* myRawSourceMap,
|
||||
* null,
|
||||
* async function (consumer) {
|
||||
* // Use `consumer` inside here and don't worry about remembering
|
||||
* // to call `destroy`.
|
||||
*
|
||||
* const x = await whatever(consumer);
|
||||
* return x * x;
|
||||
* }
|
||||
* );
|
||||
*
|
||||
* // You may not use that `consumer` anymore out here; it has
|
||||
* // been destroyed. But you can use `xSquared`.
|
||||
* console.log(xSquared);
|
||||
* ```
|
||||
*/
|
||||
with<T>(rawSourceMap: RawSourceMap | RawIndexMap | string, sourceMapUrl: SourceMapUrl | null | undefined, callback: (consumer: BasicSourceMapConsumer | IndexedSourceMapConsumer) => Promise<T> | T): Promise<T>;
|
||||
}
|
||||
|
||||
export const SourceMapConsumer: SourceMapConsumerConstructor;
|
||||
|
||||
export interface BasicSourceMapConsumer extends SourceMapConsumer {
|
||||
file: string;
|
||||
sourceRoot: string;
|
||||
sources: string[];
|
||||
sourcesContent: string[];
|
||||
}
|
||||
|
||||
export interface BasicSourceMapConsumerConstructor {
|
||||
prototype: BasicSourceMapConsumer;
|
||||
|
||||
new (rawSourceMap: RawSourceMap | string): Promise<BasicSourceMapConsumer>;
|
||||
|
||||
/**
|
||||
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
|
||||
*
|
||||
* @param sourceMap
|
||||
* The source map that will be consumed.
|
||||
*/
|
||||
fromSourceMap(sourceMap: SourceMapGenerator): Promise<BasicSourceMapConsumer>;
|
||||
}
|
||||
|
||||
export const BasicSourceMapConsumer: BasicSourceMapConsumerConstructor;
|
||||
|
||||
export interface IndexedSourceMapConsumer extends SourceMapConsumer {
|
||||
sources: string[];
|
||||
}
|
||||
|
||||
export interface IndexedSourceMapConsumerConstructor {
|
||||
prototype: IndexedSourceMapConsumer;
|
||||
|
||||
new (rawSourceMap: RawIndexMap | string): Promise<IndexedSourceMapConsumer>;
|
||||
}
|
||||
|
||||
export const IndexedSourceMapConsumer: IndexedSourceMapConsumerConstructor;
|
||||
|
||||
export class SourceMapGenerator {
|
||||
constructor(startOfSourceMap?: StartOfSourceMap);
|
||||
|
||||
/**
|
||||
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
*
|
||||
* @param sourceMapConsumer The SourceMap.
|
||||
*/
|
||||
static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator;
|
||||
|
||||
/**
|
||||
* Add a single mapping from original source line and column to the generated
|
||||
* source's line and column for this source map being created. The mapping
|
||||
* object should have the following properties:
|
||||
*
|
||||
* - generated: An object with the generated line and column positions.
|
||||
* - original: An object with the original line and column positions.
|
||||
* - source: The original source file (relative to the sourceRoot).
|
||||
* - name: An optional original token name for this mapping.
|
||||
*/
|
||||
addMapping(mapping: Mapping): void;
|
||||
|
||||
/**
|
||||
* Set the source content for a source file.
|
||||
*/
|
||||
setSourceContent(sourceFile: string, sourceContent: string): void;
|
||||
|
||||
/**
|
||||
* Applies the mappings of a sub-source-map for a specific source file to the
|
||||
* source map being generated. Each mapping to the supplied source file is
|
||||
* rewritten using the supplied source map. Note: The resolution for the
|
||||
* resulting mappings is the minimium of this map and the supplied map.
|
||||
*
|
||||
* @param sourceMapConsumer The source map to be applied.
|
||||
* @param sourceFile Optional. The filename of the source file.
|
||||
* If omitted, SourceMapConsumer's file property will be used.
|
||||
* @param sourceMapPath Optional. The dirname of the path to the source map
|
||||
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
||||
* This parameter is needed when the two source maps aren't in the same
|
||||
* directory, and the source map to be applied contains relative source
|
||||
* paths. If so, those relative source paths need to be rewritten
|
||||
* relative to the SourceMapGenerator.
|
||||
*/
|
||||
applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void;
|
||||
|
||||
toString(): string;
|
||||
|
||||
toJSON(): RawSourceMap;
|
||||
}
|
||||
|
||||
export class SourceNode {
|
||||
children: SourceNode[];
|
||||
sourceContents: any;
|
||||
line: number;
|
||||
column: number;
|
||||
source: string;
|
||||
name: string;
|
||||
|
||||
constructor();
|
||||
constructor(
|
||||
line: number | null,
|
||||
column: number | null,
|
||||
source: string | null,
|
||||
chunks?: Array<(string | SourceNode)> | SourceNode | string,
|
||||
name?: string
|
||||
);
|
||||
|
||||
static fromStringWithSourceMap(
|
||||
code: string,
|
||||
sourceMapConsumer: SourceMapConsumer,
|
||||
relativePath?: string
|
||||
): SourceNode;
|
||||
|
||||
add(chunk: Array<(string | SourceNode)> | SourceNode | string): SourceNode;
|
||||
|
||||
prepend(chunk: Array<(string | SourceNode)> | SourceNode | string): SourceNode;
|
||||
|
||||
setSourceContent(sourceFile: string, sourceContent: string): void;
|
||||
|
||||
walk(fn: (chunk: string, mapping: MappedPosition) => void): void;
|
||||
|
||||
walkSourceContents(fn: (file: string, content: string) => void): void;
|
||||
|
||||
join(sep: string): SourceNode;
|
||||
|
||||
replaceRight(pattern: string, replacement: string): SourceNode;
|
||||
|
||||
toString(): string;
|
||||
|
||||
toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap;
|
||||
}
|
8
node_modules/terser/node_modules/source-map/source-map.js
generated
vendored
8
node_modules/terser/node_modules/source-map/source-map.js
generated
vendored
@@ -1,8 +0,0 @@
|
||||
/*
|
||||
* Copyright 2009-2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE.txt or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
exports.SourceMapGenerator = require("./lib/source-map-generator").SourceMapGenerator;
|
||||
exports.SourceMapConsumer = require("./lib/source-map-consumer").SourceMapConsumer;
|
||||
exports.SourceNode = require("./lib/source-node").SourceNode;
|
48
node_modules/terser/package.json
generated
vendored
48
node_modules/terser/package.json
generated
vendored
@@ -1,27 +1,27 @@
|
||||
{
|
||||
"_from": "terser@5.7.0",
|
||||
"_id": "terser@5.7.0",
|
||||
"_from": "terser@^5.7.0",
|
||||
"_id": "terser@5.14.2",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g==",
|
||||
"_integrity": "sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA==",
|
||||
"_location": "/terser",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "version",
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "terser@5.7.0",
|
||||
"raw": "terser@^5.7.0",
|
||||
"name": "terser",
|
||||
"escapedName": "terser",
|
||||
"rawSpec": "5.7.0",
|
||||
"rawSpec": "^5.7.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "5.7.0"
|
||||
"fetchSpec": "^5.7.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"#USER",
|
||||
"/"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/terser/-/terser-5.7.0.tgz",
|
||||
"_shasum": "a761eeec206bc87b605ab13029876ead938ae693",
|
||||
"_spec": "terser@5.7.0",
|
||||
"_resolved": "https://registry.npmjs.org/terser/-/terser-5.14.2.tgz",
|
||||
"_shasum": "9ac9f22b06994d736174f4091aa368db896f1c10",
|
||||
"_spec": "terser@^5.7.0",
|
||||
"_where": "D:\\Projects\\minifyfromhtml",
|
||||
"author": {
|
||||
"name": "Mihai Bazon",
|
||||
@@ -36,24 +36,25 @@
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {
|
||||
"@jridgewell/source-map": "^0.3.2",
|
||||
"acorn": "^8.5.0",
|
||||
"commander": "^2.20.0",
|
||||
"source-map": "~0.7.2",
|
||||
"source-map-support": "~0.5.19"
|
||||
"source-map-support": "~0.5.20"
|
||||
},
|
||||
"deprecated": false,
|
||||
"description": "JavaScript parser, mangler/compressor and beautifier toolkit for ES6+",
|
||||
"devDependencies": {
|
||||
"@ls-lint/ls-lint": "^1.9.2",
|
||||
"acorn": "^8.0.5",
|
||||
"astring": "^1.6.2",
|
||||
"eslint": "^7.19.0",
|
||||
"eslump": "^2.0.0",
|
||||
"@ls-lint/ls-lint": "^1.10.0",
|
||||
"astring": "^1.7.5",
|
||||
"eslint": "^7.32.0",
|
||||
"eslump": "^3.0.0",
|
||||
"esm": "^3.2.25",
|
||||
"mocha": "^8.2.1",
|
||||
"mocha": "^9.2.0",
|
||||
"pre-commit": "^1.2.2",
|
||||
"rimraf": "^3.0.2",
|
||||
"rollup": "2.38.4",
|
||||
"semver": "^7.3.4"
|
||||
"rollup": "2.56.3",
|
||||
"semver": "^7.3.4",
|
||||
"source-map": "~0.8.0-beta.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
@@ -61,7 +62,7 @@
|
||||
"eslintConfig": {
|
||||
"parserOptions": {
|
||||
"sourceType": "module",
|
||||
"ecmaVersion": "2020"
|
||||
"ecmaVersion": 2020
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
@@ -72,6 +73,8 @@
|
||||
"describe": false,
|
||||
"it": false,
|
||||
"require": false,
|
||||
"before": false,
|
||||
"after": false,
|
||||
"global": false,
|
||||
"process": false
|
||||
},
|
||||
@@ -112,6 +115,7 @@
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./tools/terser.d.ts",
|
||||
"import": "./main.js",
|
||||
"require": "./dist/bundle.min.js"
|
||||
},
|
||||
@@ -185,5 +189,5 @@
|
||||
},
|
||||
"type": "module",
|
||||
"types": "tools/terser.d.ts",
|
||||
"version": "5.7.0"
|
||||
"version": "5.14.2"
|
||||
}
|
||||
|
12
node_modules/terser/tools/domprops.js
generated
vendored
12
node_modules/terser/tools/domprops.js
generated
vendored
@@ -320,6 +320,7 @@ export var domprops = [
|
||||
"COMMENT_NODE",
|
||||
"COMPARE_REF_TO_TEXTURE",
|
||||
"COMPILE_STATUS",
|
||||
"COMPLETION_STATUS_KHR",
|
||||
"COMPRESSED_RGBA_S3TC_DXT1_EXT",
|
||||
"COMPRESSED_RGBA_S3TC_DXT3_EXT",
|
||||
"COMPRESSED_RGBA_S3TC_DXT5_EXT",
|
||||
@@ -2979,6 +2980,7 @@ export var domprops = [
|
||||
"applyElement",
|
||||
"arc",
|
||||
"arcTo",
|
||||
"architecture",
|
||||
"archive",
|
||||
"areas",
|
||||
"arguments",
|
||||
@@ -3153,6 +3155,7 @@ export var domprops = [
|
||||
"bindTexture",
|
||||
"bindTransformFeedback",
|
||||
"bindVertexArray",
|
||||
"bitness",
|
||||
"blendColor",
|
||||
"blendEquation",
|
||||
"blendEquationSeparate",
|
||||
@@ -3314,6 +3317,8 @@ export var domprops = [
|
||||
"boxDecorationBreak",
|
||||
"boxShadow",
|
||||
"boxSizing",
|
||||
"brand",
|
||||
"brands",
|
||||
"break-after",
|
||||
"break-before",
|
||||
"break-inside",
|
||||
@@ -4312,6 +4317,7 @@ export var domprops = [
|
||||
"fround",
|
||||
"fullPath",
|
||||
"fullScreen",
|
||||
"fullVersionList",
|
||||
"fullscreen",
|
||||
"fullscreenElement",
|
||||
"fullscreenEnabled",
|
||||
@@ -4437,6 +4443,7 @@ export var domprops = [
|
||||
"getFrequencyResponse",
|
||||
"getFullYear",
|
||||
"getGamepads",
|
||||
"getHighEntropyValues",
|
||||
"getHitTestResults",
|
||||
"getHitTestResultsForTransientInput",
|
||||
"getHours",
|
||||
@@ -5277,7 +5284,9 @@ export var domprops = [
|
||||
"mix-blend-mode",
|
||||
"mixBlendMode",
|
||||
"mm",
|
||||
"mobile",
|
||||
"mode",
|
||||
"model",
|
||||
"modify",
|
||||
"mount",
|
||||
"move",
|
||||
@@ -6183,6 +6192,7 @@ export var domprops = [
|
||||
"placeItems",
|
||||
"placeSelf",
|
||||
"placeholder",
|
||||
"platformVersion",
|
||||
"platform",
|
||||
"platforms",
|
||||
"play",
|
||||
@@ -7421,6 +7431,7 @@ export var domprops = [
|
||||
"user-select",
|
||||
"userActivation",
|
||||
"userAgent",
|
||||
"userAgentData",
|
||||
"userChoice",
|
||||
"userHandle",
|
||||
"userHint",
|
||||
@@ -7734,6 +7745,7 @@ export var domprops = [
|
||||
"wordSpacing",
|
||||
"wordWrap",
|
||||
"workerStart",
|
||||
"wow64",
|
||||
"wrap",
|
||||
"wrapKey",
|
||||
"writable",
|
||||
|
46
node_modules/terser/tools/terser.d.ts
generated
vendored
46
node_modules/terser/tools/terser.d.ts
generated
vendored
@@ -1,11 +1,12 @@
|
||||
/// <reference lib="es2015" />
|
||||
|
||||
import { RawSourceMap } from 'source-map';
|
||||
import { SectionedSourceMapInput, EncodedSourceMap, DecodedSourceMap } from '@jridgewell/source-map';
|
||||
|
||||
export type ECMA = 5 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020;
|
||||
|
||||
export interface ParseOptions {
|
||||
bare_returns?: boolean;
|
||||
/** @deprecated legacy option. Currently, all supported EcmaScript is valid to parse. */
|
||||
ecma?: ECMA;
|
||||
html5_comments?: boolean;
|
||||
shebang?: boolean;
|
||||
@@ -80,16 +81,52 @@ export interface MangleOptions {
|
||||
keep_classnames?: boolean | RegExp;
|
||||
keep_fnames?: boolean | RegExp;
|
||||
module?: boolean;
|
||||
nth_identifier?: SimpleIdentifierMangler | WeightedIdentifierMangler;
|
||||
properties?: boolean | ManglePropertiesOptions;
|
||||
reserved?: string[];
|
||||
safari10?: boolean;
|
||||
toplevel?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* An identifier mangler for which the output is invariant with respect to the source code.
|
||||
*/
|
||||
export interface SimpleIdentifierMangler {
|
||||
/**
|
||||
* Obtains the nth most favored (usually shortest) identifier to rename a variable to.
|
||||
* The mangler will increment n and retry until the return value is not in use in scope, and is not a reserved word.
|
||||
* This function is expected to be stable; Evaluating get(n) === get(n) should always return true.
|
||||
* @param n The ordinal of the identifier.
|
||||
*/
|
||||
get(n: number): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An identifier mangler that leverages character frequency analysis to determine identifier precedence.
|
||||
*/
|
||||
export interface WeightedIdentifierMangler extends SimpleIdentifierMangler {
|
||||
/**
|
||||
* Modifies the internal weighting of the input characters by the specified delta.
|
||||
* Will be invoked on the entire printed AST, and then deduct mangleable identifiers.
|
||||
* @param chars The characters to modify the weighting of.
|
||||
* @param delta The numeric weight to add to the characters.
|
||||
*/
|
||||
consider(chars: string, delta: number): number;
|
||||
/**
|
||||
* Resets character weights.
|
||||
*/
|
||||
reset(): void;
|
||||
/**
|
||||
* Sorts identifiers by character frequency, in preparation for calls to get(n).
|
||||
*/
|
||||
sort(): void;
|
||||
}
|
||||
|
||||
export interface ManglePropertiesOptions {
|
||||
builtins?: boolean;
|
||||
debug?: boolean;
|
||||
keep_quoted?: boolean | 'strict';
|
||||
nth_identifier?: SimpleIdentifierMangler | WeightedIdentifierMangler;
|
||||
regex?: RegExp | string;
|
||||
reserved?: string[];
|
||||
}
|
||||
@@ -108,6 +145,7 @@ export interface FormatOptions {
|
||||
}) => boolean );
|
||||
ecma?: ECMA;
|
||||
ie8?: boolean;
|
||||
keep_numbers?: boolean;
|
||||
indent_level?: number;
|
||||
indent_start?: number;
|
||||
inline_script?: boolean;
|
||||
@@ -138,6 +176,7 @@ export enum OutputQuoteStyle {
|
||||
export interface MinifyOptions {
|
||||
compress?: boolean | CompressOptions;
|
||||
ecma?: ECMA;
|
||||
enclose?: boolean | string;
|
||||
ie8?: boolean;
|
||||
keep_classnames?: boolean | RegExp;
|
||||
keep_fnames?: boolean | RegExp;
|
||||
@@ -155,12 +194,13 @@ export interface MinifyOptions {
|
||||
|
||||
export interface MinifyOutput {
|
||||
code?: string;
|
||||
map?: RawSourceMap | string;
|
||||
map?: EncodedSourceMap | string;
|
||||
decoded_map?: DecodedSourceMap | null;
|
||||
}
|
||||
|
||||
export interface SourceMapOptions {
|
||||
/** Source map object, 'inline' or source map file content */
|
||||
content?: RawSourceMap | string;
|
||||
content?: SectionedSourceMapInput | string;
|
||||
includeSources?: boolean;
|
||||
filename?: string;
|
||||
root?: string;
|
||||
|
Reference in New Issue
Block a user