This commit is contained in:
parent
b76b953fc2
commit
30ce160b8f
15
.github/workflows/main.yml
vendored
15
.github/workflows/main.yml
vendored
@ -25,3 +25,18 @@ jobs:
|
|||||||
uses: actions/setup-node@v2
|
uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
|
- run: npm ci
|
||||||
|
- run: npm run build
|
||||||
|
- run: npm test
|
||||||
|
- name: Publish to release
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
files: dist/*
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
body: |
|
||||||
|
Changes in this Release
|
||||||
|
- Added new feature
|
||||||
|
- Fixed some bugs
|
||||||
|
draft: false
|
||||||
|
prerelease: false
|
||||||
|
tag: v1.0.0
|
||||||
|
15
node_modules/.bin/acorn
generated
vendored
Normal file
15
node_modules/.bin/acorn
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../acorn/bin/acorn" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../acorn/bin/acorn" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/acorn.cmd
generated
vendored
Normal file
7
node_modules/.bin/acorn.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\acorn\bin\acorn" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\acorn\bin\acorn" %*
|
||||||
|
)
|
15
node_modules/.bin/ts-node
generated
vendored
Normal file
15
node_modules/.bin/ts-node
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../ts-node/dist/bin.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../ts-node/dist/bin.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
15
node_modules/.bin/ts-node-cwd
generated
vendored
Normal file
15
node_modules/.bin/ts-node-cwd
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../ts-node/dist/bin-cwd.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../ts-node/dist/bin-cwd.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/ts-node-cwd.cmd
generated
vendored
Normal file
7
node_modules/.bin/ts-node-cwd.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\ts-node\dist\bin-cwd.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\ts-node\dist\bin-cwd.js" %*
|
||||||
|
)
|
15
node_modules/.bin/ts-node-esm
generated
vendored
Normal file
15
node_modules/.bin/ts-node-esm
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../ts-node/dist/bin-esm.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../ts-node/dist/bin-esm.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/ts-node-esm.cmd
generated
vendored
Normal file
7
node_modules/.bin/ts-node-esm.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\ts-node\dist\bin-esm.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\ts-node\dist\bin-esm.js" %*
|
||||||
|
)
|
15
node_modules/.bin/ts-node-script
generated
vendored
Normal file
15
node_modules/.bin/ts-node-script
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../ts-node/dist/bin-script.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../ts-node/dist/bin-script.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/ts-node-script.cmd
generated
vendored
Normal file
7
node_modules/.bin/ts-node-script.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\ts-node\dist\bin-script.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\ts-node\dist\bin-script.js" %*
|
||||||
|
)
|
15
node_modules/.bin/ts-node-transpile-only
generated
vendored
Normal file
15
node_modules/.bin/ts-node-transpile-only
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../ts-node/dist/bin-transpile.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../ts-node/dist/bin-transpile.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/ts-node-transpile-only.cmd
generated
vendored
Normal file
7
node_modules/.bin/ts-node-transpile-only.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\ts-node\dist\bin-transpile.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\ts-node\dist\bin-transpile.js" %*
|
||||||
|
)
|
7
node_modules/.bin/ts-node.cmd
generated
vendored
Normal file
7
node_modules/.bin/ts-node.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\ts-node\dist\bin.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\ts-node\dist\bin.js" %*
|
||||||
|
)
|
15
node_modules/.bin/ts-script
generated
vendored
Normal file
15
node_modules/.bin/ts-script
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../ts-node/dist/bin-script-deprecated.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../ts-node/dist/bin-script-deprecated.js" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/ts-script.cmd
generated
vendored
Normal file
7
node_modules/.bin/ts-script.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\ts-node\dist\bin-script-deprecated.js" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\ts-node\dist\bin-script-deprecated.js" %*
|
||||||
|
)
|
15
node_modules/.bin/tsc
generated
vendored
Normal file
15
node_modules/.bin/tsc
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../typescript/bin/tsc" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../typescript/bin/tsc" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/tsc.cmd
generated
vendored
Normal file
7
node_modules/.bin/tsc.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\typescript\bin\tsc" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\typescript\bin\tsc" %*
|
||||||
|
)
|
15
node_modules/.bin/tsserver
generated
vendored
Normal file
15
node_modules/.bin/tsserver
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||||
|
|
||||||
|
case `uname` in
|
||||||
|
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ -x "$basedir/node" ]; then
|
||||||
|
"$basedir/node" "$basedir/../typescript/bin/tsserver" "$@"
|
||||||
|
ret=$?
|
||||||
|
else
|
||||||
|
node "$basedir/../typescript/bin/tsserver" "$@"
|
||||||
|
ret=$?
|
||||||
|
fi
|
||||||
|
exit $ret
|
7
node_modules/.bin/tsserver.cmd
generated
vendored
Normal file
7
node_modules/.bin/tsserver.cmd
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
@IF EXIST "%~dp0\node.exe" (
|
||||||
|
"%~dp0\node.exe" "%~dp0\..\typescript\bin\tsserver" %*
|
||||||
|
) ELSE (
|
||||||
|
@SETLOCAL
|
||||||
|
@SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||||
|
node "%~dp0\..\typescript\bin\tsserver" %*
|
||||||
|
)
|
34
node_modules/.yarn-integrity
generated
vendored
Normal file
34
node_modules/.yarn-integrity
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"systemParams": "win32-x64-108",
|
||||||
|
"modulesFolders": [
|
||||||
|
"node_modules"
|
||||||
|
],
|
||||||
|
"flags": [],
|
||||||
|
"linkedModules": [],
|
||||||
|
"topLevelPatterns": [
|
||||||
|
"ts-node@^10.9.2",
|
||||||
|
"typescript@^5.3.3"
|
||||||
|
],
|
||||||
|
"lockfileEntries": {
|
||||||
|
"@cspotcode/source-map-support@^0.8.0": "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1",
|
||||||
|
"@jridgewell/resolve-uri@^3.0.3": "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6",
|
||||||
|
"@jridgewell/sourcemap-codec@^1.4.10": "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32",
|
||||||
|
"@jridgewell/trace-mapping@0.3.9": "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9",
|
||||||
|
"@tsconfig/node10@^1.0.7": "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2",
|
||||||
|
"@tsconfig/node12@^1.0.7": "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d",
|
||||||
|
"@tsconfig/node14@^1.0.0": "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1",
|
||||||
|
"@tsconfig/node16@^1.0.2": "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9",
|
||||||
|
"acorn-walk@^8.1.1": "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa",
|
||||||
|
"acorn@^8.4.1": "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a",
|
||||||
|
"arg@^4.1.0": "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089",
|
||||||
|
"create-require@^1.1.0": "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333",
|
||||||
|
"diff@^4.0.1": "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d",
|
||||||
|
"make-error@^1.1.1": "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2",
|
||||||
|
"ts-node@^10.9.2": "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.2.tgz#70f021c9e185bccdca820e26dc413805c101c71f",
|
||||||
|
"typescript@^5.3.3": "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37",
|
||||||
|
"v8-compile-cache-lib@^3.0.1": "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf",
|
||||||
|
"yn@3.1.1": "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
||||||
|
},
|
||||||
|
"files": [],
|
||||||
|
"artifacts": {}
|
||||||
|
}
|
21
node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
Normal file
21
node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Evan Wallace
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
289
node_modules/@cspotcode/source-map-support/README.md
generated
vendored
Normal file
289
node_modules/@cspotcode/source-map-support/README.md
generated
vendored
Normal file
@ -0,0 +1,289 @@
|
|||||||
|
# Source Map Support
|
||||||
|
|
||||||
|
[![NPM version](https://img.shields.io/npm/v/@cspotcode/source-map-support.svg?style=flat)](https://npmjs.org/package/@cspotcode/source-map-support)
|
||||||
|
[![NPM downloads](https://img.shields.io/npm/dm/@cspotcode/source-map-support.svg?style=flat)](https://npmjs.org/package/@cspotcode/source-map-support)
|
||||||
|
[![Build status](https://img.shields.io/github/workflow/status/cspotcode/node-source-map-support/Continuous%20Integration)](https://github.com/cspotcode/node-source-map-support/actions?query=workflow%3A%22Continuous+Integration%22)
|
||||||
|
|
||||||
|
This module provides source map support for stack traces in node via the [V8 stack trace API](https://github.com/v8/v8/wiki/Stack-Trace-API). It uses the [source-map](https://github.com/mozilla/source-map) module to replace the paths and line numbers of source-mapped files with their original paths and line numbers. The output mimics node's stack trace format with the goal of making every compile-to-JS language more of a first-class citizen. Source maps are completely general (not specific to any one language) so you can use source maps with multiple compile-to-JS languages in the same node process.
|
||||||
|
|
||||||
|
## Installation and Usage
|
||||||
|
|
||||||
|
#### Node support
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install @cspotcode/source-map-support
|
||||||
|
```
|
||||||
|
|
||||||
|
Source maps can be generated using libraries such as [source-map-index-generator](https://github.com/twolfson/source-map-index-generator). Once you have a valid source map, place a source mapping comment somewhere in the file (usually done automatically or with an option by your transpiler):
|
||||||
|
|
||||||
|
```
|
||||||
|
//# sourceMappingURL=path/to/source.map
|
||||||
|
```
|
||||||
|
|
||||||
|
If multiple sourceMappingURL comments exist in one file, the last sourceMappingURL comment will be
|
||||||
|
respected (e.g. if a file mentions the comment in code, or went through multiple transpilers).
|
||||||
|
The path should either be absolute or relative to the compiled file.
|
||||||
|
|
||||||
|
From here you have two options.
|
||||||
|
|
||||||
|
##### CLI Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
node -r @cspotcode/source-map-support/register compiled.js
|
||||||
|
# Or to enable hookRequire
|
||||||
|
node -r @cspotcode/source-map-support/register-hook-require compiled.js
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Programmatic Usage
|
||||||
|
|
||||||
|
Put the following line at the top of the compiled file.
|
||||||
|
|
||||||
|
```js
|
||||||
|
require('@cspotcode/source-map-support').install();
|
||||||
|
```
|
||||||
|
|
||||||
|
It is also possible to install the source map support directly by
|
||||||
|
requiring the `register` module which can be handy with ES6:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import '@cspotcode/source-map-support/register'
|
||||||
|
|
||||||
|
// Instead of:
|
||||||
|
import sourceMapSupport from '@cspotcode/source-map-support'
|
||||||
|
sourceMapSupport.install()
|
||||||
|
```
|
||||||
|
Note: if you're using babel-register, it includes source-map-support already.
|
||||||
|
|
||||||
|
It is also very useful with Mocha:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ mocha --require @cspotcode/source-map-support/register tests/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Browser support
|
||||||
|
|
||||||
|
This library also works in Chrome. While the DevTools console already supports source maps, the V8 engine doesn't and `Error.prototype.stack` will be incorrect without this library. Everything will just work if you deploy your source files using [browserify](http://browserify.org/). Just make sure to pass the `--debug` flag to the browserify command so your source maps are included in the bundled code.
|
||||||
|
|
||||||
|
This library also works if you use another build process or just include the source files directly. In this case, include the file `browser-source-map-support.js` in your page and call `sourceMapSupport.install()`. It contains the whole library already bundled for the browser using browserify.
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script src="browser-source-map-support.js"></script>
|
||||||
|
<script>sourceMapSupport.install();</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
This library also works if you use AMD (Asynchronous Module Definition), which is used in tools like [RequireJS](http://requirejs.org/). Just list `browser-source-map-support` as a dependency:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script>
|
||||||
|
define(['browser-source-map-support'], function(sourceMapSupport) {
|
||||||
|
sourceMapSupport.install();
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Options
|
||||||
|
|
||||||
|
This module installs two things: a change to the `stack` property on `Error` objects and a handler for uncaught exceptions that mimics node's default exception handler (the handler can be seen in the demos below). You may want to disable the handler if you have your own uncaught exception handler. This can be done by passing an argument to the installer:
|
||||||
|
|
||||||
|
```js
|
||||||
|
require('@cspotcode/source-map-support').install({
|
||||||
|
handleUncaughtExceptions: false
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
This module loads source maps from the filesystem by default. You can provide alternate loading behavior through a callback as shown below. For example, [Meteor](https://github.com/meteor) keeps all source maps cached in memory to avoid disk access.
|
||||||
|
|
||||||
|
```js
|
||||||
|
require('@cspotcode/source-map-support').install({
|
||||||
|
retrieveSourceMap: function(source) {
|
||||||
|
if (source === 'compiled.js') {
|
||||||
|
return {
|
||||||
|
url: 'original.js',
|
||||||
|
map: fs.readFileSync('compiled.js.map', 'utf8')
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
The module will by default assume a browser environment if XMLHttpRequest and window are defined. If either of these do not exist it will instead assume a node environment.
|
||||||
|
In some rare cases, e.g. when running a browser emulation and where both variables are also set, you can explictly specify the environment to be either 'browser' or 'node'.
|
||||||
|
|
||||||
|
```js
|
||||||
|
require('@cspotcode/source-map-support').install({
|
||||||
|
environment: 'node'
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
To support files with inline source maps, the `hookRequire` options can be specified, which will monitor all source files for inline source maps.
|
||||||
|
|
||||||
|
|
||||||
|
```js
|
||||||
|
require('@cspotcode/source-map-support').install({
|
||||||
|
hookRequire: true
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
This monkey patches the `require` module loading chain, so is not enabled by default and is not recommended for any sort of production usage.
|
||||||
|
|
||||||
|
## Demos
|
||||||
|
|
||||||
|
#### Basic Demo
|
||||||
|
|
||||||
|
original.js:
|
||||||
|
|
||||||
|
```js
|
||||||
|
throw new Error('test'); // This is the original code
|
||||||
|
```
|
||||||
|
|
||||||
|
compiled.js:
|
||||||
|
|
||||||
|
```js
|
||||||
|
require('@cspotcode/source-map-support').install();
|
||||||
|
|
||||||
|
throw new Error('test'); // This is the compiled code
|
||||||
|
// The next line defines the sourceMapping.
|
||||||
|
//# sourceMappingURL=compiled.js.map
|
||||||
|
```
|
||||||
|
|
||||||
|
compiled.js.map:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": 3,
|
||||||
|
"file": "compiled.js",
|
||||||
|
"sources": ["original.js"],
|
||||||
|
"names": [],
|
||||||
|
"mappings": ";;AAAA,MAAM,IAAI"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Run compiled.js using node (notice how the stack trace uses original.js instead of compiled.js):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ node compiled.js
|
||||||
|
|
||||||
|
original.js:1
|
||||||
|
throw new Error('test'); // This is the original code
|
||||||
|
^
|
||||||
|
Error: test
|
||||||
|
at Object.<anonymous> (original.js:1:7)
|
||||||
|
at Module._compile (module.js:456:26)
|
||||||
|
at Object.Module._extensions..js (module.js:474:10)
|
||||||
|
at Module.load (module.js:356:32)
|
||||||
|
at Function.Module._load (module.js:312:12)
|
||||||
|
at Function.Module.runMain (module.js:497:10)
|
||||||
|
at startup (node.js:119:16)
|
||||||
|
at node.js:901:3
|
||||||
|
```
|
||||||
|
|
||||||
|
#### TypeScript Demo
|
||||||
|
|
||||||
|
demo.ts:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
declare function require(name: string);
|
||||||
|
require('@cspotcode/source-map-support').install();
|
||||||
|
class Foo {
|
||||||
|
constructor() { this.bar(); }
|
||||||
|
bar() { throw new Error('this is a demo'); }
|
||||||
|
}
|
||||||
|
new Foo();
|
||||||
|
```
|
||||||
|
|
||||||
|
Compile and run the file using the TypeScript compiler from the terminal:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install source-map-support typescript
|
||||||
|
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
||||||
|
$ node demo.js
|
||||||
|
|
||||||
|
demo.ts:5
|
||||||
|
bar() { throw new Error('this is a demo'); }
|
||||||
|
^
|
||||||
|
Error: this is a demo
|
||||||
|
at Foo.bar (demo.ts:5:17)
|
||||||
|
at new Foo (demo.ts:4:24)
|
||||||
|
at Object.<anonymous> (demo.ts:7:1)
|
||||||
|
at Module._compile (module.js:456:26)
|
||||||
|
at Object.Module._extensions..js (module.js:474:10)
|
||||||
|
at Module.load (module.js:356:32)
|
||||||
|
at Function.Module._load (module.js:312:12)
|
||||||
|
at Function.Module.runMain (module.js:497:10)
|
||||||
|
at startup (node.js:119:16)
|
||||||
|
at node.js:901:3
|
||||||
|
```
|
||||||
|
|
||||||
|
There is also the option to use `-r source-map-support/register` with typescript, without the need add the `require('@cspotcode/source-map-support').install()` in the code base:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install source-map-support typescript
|
||||||
|
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
||||||
|
$ node -r source-map-support/register demo.js
|
||||||
|
|
||||||
|
demo.ts:5
|
||||||
|
bar() { throw new Error('this is a demo'); }
|
||||||
|
^
|
||||||
|
Error: this is a demo
|
||||||
|
at Foo.bar (demo.ts:5:17)
|
||||||
|
at new Foo (demo.ts:4:24)
|
||||||
|
at Object.<anonymous> (demo.ts:7:1)
|
||||||
|
at Module._compile (module.js:456:26)
|
||||||
|
at Object.Module._extensions..js (module.js:474:10)
|
||||||
|
at Module.load (module.js:356:32)
|
||||||
|
at Function.Module._load (module.js:312:12)
|
||||||
|
at Function.Module.runMain (module.js:497:10)
|
||||||
|
at startup (node.js:119:16)
|
||||||
|
at node.js:901:3
|
||||||
|
```
|
||||||
|
|
||||||
|
#### CoffeeScript Demo
|
||||||
|
|
||||||
|
demo.coffee:
|
||||||
|
|
||||||
|
```coffee
|
||||||
|
require('@cspotcode/source-map-support').install()
|
||||||
|
foo = ->
|
||||||
|
bar = -> throw new Error 'this is a demo'
|
||||||
|
bar()
|
||||||
|
foo()
|
||||||
|
```
|
||||||
|
|
||||||
|
Compile and run the file using the CoffeeScript compiler from the terminal:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install @cspotcode/source-map-support coffeescript
|
||||||
|
$ node_modules/.bin/coffee --map --compile demo.coffee
|
||||||
|
$ node demo.js
|
||||||
|
|
||||||
|
demo.coffee:3
|
||||||
|
bar = -> throw new Error 'this is a demo'
|
||||||
|
^
|
||||||
|
Error: this is a demo
|
||||||
|
at bar (demo.coffee:3:22)
|
||||||
|
at foo (demo.coffee:4:3)
|
||||||
|
at Object.<anonymous> (demo.coffee:5:1)
|
||||||
|
at Object.<anonymous> (demo.coffee:1:1)
|
||||||
|
at Module._compile (module.js:456:26)
|
||||||
|
at Object.Module._extensions..js (module.js:474:10)
|
||||||
|
at Module.load (module.js:356:32)
|
||||||
|
at Function.Module._load (module.js:312:12)
|
||||||
|
at Function.Module.runMain (module.js:497:10)
|
||||||
|
at startup (node.js:119:16)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
This repo contains both automated tests for node and manual tests for the browser. The automated tests can be run using mocha (type `mocha` in the root directory). To run the manual tests:
|
||||||
|
|
||||||
|
* Build the tests using `build.js`
|
||||||
|
* Launch the HTTP server (`npm run serve-tests`) and visit
|
||||||
|
* http://127.0.0.1:1336/amd-test
|
||||||
|
* http://127.0.0.1:1336/browser-test
|
||||||
|
* http://127.0.0.1:1336/browserify-test - **Currently not working** due to a bug with browserify (see [pull request #66](https://github.com/evanw/node-source-map-support/pull/66) for details).
|
||||||
|
* For `header-test`, run `server.js` inside that directory and visit http://127.0.0.1:1337/
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This code is available under the [MIT license](http://opensource.org/licenses/MIT).
|
114
node_modules/@cspotcode/source-map-support/browser-source-map-support.js
generated
vendored
Normal file
114
node_modules/@cspotcode/source-map-support/browser-source-map-support.js
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
/*
|
||||||
|
* Support for source maps in V8 stack traces
|
||||||
|
* https://github.com/evanw/node-source-map-support
|
||||||
|
*/
|
||||||
|
/*
|
||||||
|
The buffer module from node.js, for the browser.
|
||||||
|
|
||||||
|
@author Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
|
||||||
|
license MIT
|
||||||
|
*/
|
||||||
|
(this.define||function(R,U){this.sourceMapSupport=U()})("browser-source-map-support",function(R){(function e(C,J,A){function p(f,c){if(!J[f]){if(!C[f]){var l="function"==typeof require&&require;if(!c&&l)return l(f,!0);if(t)return t(f,!0);throw Error("Cannot find module '"+f+"'");}l=J[f]={exports:{}};C[f][0].call(l.exports,function(q){var r=C[f][1][q];return p(r?r:q)},l,l.exports,e,C,J,A)}return J[f].exports}for(var t="function"==typeof require&&require,m=0;m<A.length;m++)p(A[m]);return p})({1:[function(C,
|
||||||
|
J,A){R=C("./source-map-support")},{"./source-map-support":21}],2:[function(C,J,A){(function(e){function p(m){m=m.charCodeAt(0);if(43===m)return 62;if(47===m)return 63;if(48>m)return-1;if(58>m)return m-48+52;if(91>m)return m-65;if(123>m)return m-97+26}var t="undefined"!==typeof Uint8Array?Uint8Array:Array;e.toByteArray=function(m){function f(d){q[k++]=d}if(0<m.length%4)throw Error("Invalid string. Length must be a multiple of 4");var c=m.length;var l="="===m.charAt(c-2)?2:"="===m.charAt(c-1)?1:0;var q=
|
||||||
|
new t(3*m.length/4-l);var r=0<l?m.length-4:m.length;var k=0;for(c=0;c<r;c+=4){var u=p(m.charAt(c))<<18|p(m.charAt(c+1))<<12|p(m.charAt(c+2))<<6|p(m.charAt(c+3));f((u&16711680)>>16);f((u&65280)>>8);f(u&255)}2===l?(u=p(m.charAt(c))<<2|p(m.charAt(c+1))>>4,f(u&255)):1===l&&(u=p(m.charAt(c))<<10|p(m.charAt(c+1))<<4|p(m.charAt(c+2))>>2,f(u>>8&255),f(u&255));return q};e.fromByteArray=function(m){var f=m.length%3,c="",l;var q=0;for(l=m.length-f;q<l;q+=3){var r=(m[q]<<16)+(m[q+1]<<8)+m[q+2];r="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>
|
||||||
|
18&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>12&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>6&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r&63);c+=r}switch(f){case 1:r=m[m.length-1];c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>2);c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r<<4&63);c+="==";break;case 2:r=(m[m.length-2]<<8)+
|
||||||
|
m[m.length-1],c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>10),c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>4&63),c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r<<2&63),c+="="}return c}})("undefined"===typeof A?this.base64js={}:A)},{}],3:[function(C,J,A){},{}],4:[function(C,J,A){(function(e){var p=Object.prototype.toString,t="function"===typeof e.alloc&&"function"===typeof e.allocUnsafe&&"function"===
|
||||||
|
typeof e.from;J.exports=function(m,f,c){if("number"===typeof m)throw new TypeError('"value" argument must not be a number');if("ArrayBuffer"===p.call(m).slice(8,-1)){f>>>=0;var l=m.byteLength-f;if(0>l)throw new RangeError("'offset' is out of bounds");if(void 0===c)c=l;else if(c>>>=0,c>l)throw new RangeError("'length' is out of bounds");return t?e.from(m.slice(f,f+c)):new e(new Uint8Array(m.slice(f,f+c)))}if("string"===typeof m){c=f;if("string"!==typeof c||""===c)c="utf8";if(!e.isEncoding(c))throw new TypeError('"encoding" must be a valid string encoding');
|
||||||
|
return t?e.from(m,c):new e(m,c)}return t?e.from(m):new e(m)}}).call(this,C("buffer").Buffer)},{buffer:5}],5:[function(C,J,A){function e(a,b,h){if(!(this instanceof e))return new e(a,b,h);var w=typeof a;if("number"===w)var y=0<a?a>>>0:0;else if("string"===w){if("base64"===b)for(a=(a.trim?a.trim():a.replace(/^\s+|\s+$/g,"")).replace(L,"");0!==a.length%4;)a+="=";y=e.byteLength(a,b)}else if("object"===w&&null!==a)"Buffer"===a.type&&z(a.data)&&(a=a.data),y=0<+a.length?Math.floor(+a.length):0;else throw new TypeError("must start with number, buffer, array or string");
|
||||||
|
if(this.length>G)throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+G.toString(16)+" bytes");if(e.TYPED_ARRAY_SUPPORT)var I=e._augment(new Uint8Array(y));else I=this,I.length=y,I._isBuffer=!0;if(e.TYPED_ARRAY_SUPPORT&&"number"===typeof a.byteLength)I._set(a);else{var K=a;if(z(K)||e.isBuffer(K)||K&&"object"===typeof K&&"number"===typeof K.length)if(e.isBuffer(a))for(b=0;b<y;b++)I[b]=a.readUInt8(b);else for(b=0;b<y;b++)I[b]=(a[b]%256+256)%256;else if("string"===w)I.write(a,
|
||||||
|
0,b);else if("number"===w&&!e.TYPED_ARRAY_SUPPORT&&!h)for(b=0;b<y;b++)I[b]=0}return I}function p(a,b,h){var w="";for(h=Math.min(a.length,h);b<h;b++)w+=String.fromCharCode(a[b]);return w}function t(a,b,h){if(0!==a%1||0>a)throw new RangeError("offset is not uint");if(a+b>h)throw new RangeError("Trying to access beyond buffer length");}function m(a,b,h,w,y,I){if(!e.isBuffer(a))throw new TypeError("buffer must be a Buffer instance");if(b>y||b<I)throw new TypeError("value is out of bounds");if(h+w>a.length)throw new TypeError("index out of range");
|
||||||
|
}function f(a,b,h,w){0>b&&(b=65535+b+1);for(var y=0,I=Math.min(a.length-h,2);y<I;y++)a[h+y]=(b&255<<8*(w?y:1-y))>>>8*(w?y:1-y)}function c(a,b,h,w){0>b&&(b=4294967295+b+1);for(var y=0,I=Math.min(a.length-h,4);y<I;y++)a[h+y]=b>>>8*(w?y:3-y)&255}function l(a,b,h,w,y,I){if(b>y||b<I)throw new TypeError("value is out of bounds");if(h+w>a.length)throw new TypeError("index out of range");}function q(a,b,h,w,y){y||l(a,b,h,4,3.4028234663852886E38,-3.4028234663852886E38);v.write(a,b,h,w,23,4);return h+4}function r(a,
|
||||||
|
b,h,w,y){y||l(a,b,h,8,1.7976931348623157E308,-1.7976931348623157E308);v.write(a,b,h,w,52,8);return h+8}function k(a){for(var b=[],h=0;h<a.length;h++){var w=a.charCodeAt(h);if(127>=w)b.push(w);else{var y=h;55296<=w&&57343>=w&&h++;w=encodeURIComponent(a.slice(y,h+1)).substr(1).split("%");for(y=0;y<w.length;y++)b.push(parseInt(w[y],16))}}return b}function u(a){for(var b=[],h=0;h<a.length;h++)b.push(a.charCodeAt(h)&255);return b}function d(a,b,h,w,y){y&&(w-=w%y);for(y=0;y<w&&!(y+h>=b.length||y>=a.length);y++)b[y+
|
||||||
|
h]=a[y];return y}function g(a){try{return decodeURIComponent(a)}catch(b){return String.fromCharCode(65533)}}var n=C("base64-js"),v=C("ieee754"),z=C("is-array");A.Buffer=e;A.SlowBuffer=e;A.INSPECT_MAX_BYTES=50;e.poolSize=8192;var G=1073741823;e.TYPED_ARRAY_SUPPORT=function(){try{var a=new ArrayBuffer(0),b=new Uint8Array(a);b.foo=function(){return 42};return 42===b.foo()&&"function"===typeof b.subarray&&0===(new Uint8Array(1)).subarray(1,1).byteLength}catch(h){return!1}}();e.isBuffer=function(a){return!(null==
|
||||||
|
a||!a._isBuffer)};e.compare=function(a,b){if(!e.isBuffer(a)||!e.isBuffer(b))throw new TypeError("Arguments must be Buffers");for(var h=a.length,w=b.length,y=0,I=Math.min(h,w);y<I&&a[y]===b[y];y++);y!==I&&(h=a[y],w=b[y]);return h<w?-1:w<h?1:0};e.isEncoding=function(a){switch(String(a).toLowerCase()){case "hex":case "utf8":case "utf-8":case "ascii":case "binary":case "base64":case "raw":case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":return!0;default:return!1}};e.concat=function(a,b){if(!z(a))throw new TypeError("Usage: Buffer.concat(list[, length])");
|
||||||
|
if(0===a.length)return new e(0);if(1===a.length)return a[0];var h;if(void 0===b)for(h=b=0;h<a.length;h++)b+=a[h].length;var w=new e(b),y=0;for(h=0;h<a.length;h++){var I=a[h];I.copy(w,y);y+=I.length}return w};e.byteLength=function(a,b){a+="";switch(b||"utf8"){case "ascii":case "binary":case "raw":var h=a.length;break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":h=2*a.length;break;case "hex":h=a.length>>>1;break;case "utf8":case "utf-8":h=k(a).length;break;case "base64":h=n.toByteArray(a).length;
|
||||||
|
break;default:h=a.length}return h};e.prototype.length=void 0;e.prototype.parent=void 0;e.prototype.toString=function(a,b,h){var w=!1;b>>>=0;h=void 0===h||Infinity===h?this.length:h>>>0;a||(a="utf8");0>b&&(b=0);h>this.length&&(h=this.length);if(h<=b)return"";for(;;)switch(a){case "hex":a=b;b=h;h=this.length;if(!a||0>a)a=0;if(!b||0>b||b>h)b=h;w="";for(h=a;h<b;h++)a=w,w=this[h],w=16>w?"0"+w.toString(16):w.toString(16),w=a+w;return w;case "utf8":case "utf-8":w=a="";for(h=Math.min(this.length,h);b<h;b++)127>=
|
||||||
|
this[b]?(a+=g(w)+String.fromCharCode(this[b]),w=""):w+="%"+this[b].toString(16);return a+g(w);case "ascii":return p(this,b,h);case "binary":return p(this,b,h);case "base64":return b=0===b&&h===this.length?n.fromByteArray(this):n.fromByteArray(this.slice(b,h)),b;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":b=this.slice(b,h);h="";for(a=0;a<b.length;a+=2)h+=String.fromCharCode(b[a]+256*b[a+1]);return h;default:if(w)throw new TypeError("Unknown encoding: "+a);a=(a+"").toLowerCase();w=!0}};
|
||||||
|
e.prototype.equals=function(a){if(!e.isBuffer(a))throw new TypeError("Argument must be a Buffer");return 0===e.compare(this,a)};e.prototype.inspect=function(){var a="",b=A.INSPECT_MAX_BYTES;0<this.length&&(a=this.toString("hex",0,b).match(/.{2}/g).join(" "),this.length>b&&(a+=" ... "));return"<Buffer "+a+">"};e.prototype.compare=function(a){if(!e.isBuffer(a))throw new TypeError("Argument must be a Buffer");return e.compare(this,a)};e.prototype.get=function(a){console.log(".get() is deprecated. Access using array indexes instead.");
|
||||||
|
return this.readUInt8(a)};e.prototype.set=function(a,b){console.log(".set() is deprecated. Access using array indexes instead.");return this.writeUInt8(a,b)};e.prototype.write=function(a,b,h,w){if(isFinite(b))isFinite(h)||(w=h,h=void 0);else{var y=w;w=b;b=h;h=y}b=Number(b)||0;y=this.length-b;h?(h=Number(h),h>y&&(h=y)):h=y;w=String(w||"utf8").toLowerCase();switch(w){case "hex":b=Number(b)||0;w=this.length-b;h?(h=Number(h),h>w&&(h=w)):h=w;w=a.length;if(0!==w%2)throw Error("Invalid hex string");h>w/
|
||||||
|
2&&(h=w/2);for(w=0;w<h;w++){y=parseInt(a.substr(2*w,2),16);if(isNaN(y))throw Error("Invalid hex string");this[b+w]=y}a=w;break;case "utf8":case "utf-8":a=d(k(a),this,b,h);break;case "ascii":a=d(u(a),this,b,h);break;case "binary":a=d(u(a),this,b,h);break;case "base64":a=d(n.toByteArray(a),this,b,h);break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":y=[];for(var I=0;I<a.length;I++){var K=a.charCodeAt(I);w=K>>8;K%=256;y.push(K);y.push(w)}a=d(y,this,b,h,2);break;default:throw new TypeError("Unknown encoding: "+
|
||||||
|
w);}return a};e.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};e.prototype.slice=function(a,b){var h=this.length;a=~~a;b=void 0===b?h:~~b;0>a?(a+=h,0>a&&(a=0)):a>h&&(a=h);0>b?(b+=h,0>b&&(b=0)):b>h&&(b=h);b<a&&(b=a);if(e.TYPED_ARRAY_SUPPORT)return e._augment(this.subarray(a,b));h=b-a;for(var w=new e(h,void 0,!0),y=0;y<h;y++)w[y]=this[y+a];return w};e.prototype.readUInt8=function(a,b){b||t(a,1,this.length);return this[a]};e.prototype.readUInt16LE=
|
||||||
|
function(a,b){b||t(a,2,this.length);return this[a]|this[a+1]<<8};e.prototype.readUInt16BE=function(a,b){b||t(a,2,this.length);return this[a]<<8|this[a+1]};e.prototype.readUInt32LE=function(a,b){b||t(a,4,this.length);return(this[a]|this[a+1]<<8|this[a+2]<<16)+16777216*this[a+3]};e.prototype.readUInt32BE=function(a,b){b||t(a,4,this.length);return 16777216*this[a]+(this[a+1]<<16|this[a+2]<<8|this[a+3])};e.prototype.readInt8=function(a,b){b||t(a,1,this.length);return this[a]&128?-1*(255-this[a]+1):this[a]};
|
||||||
|
e.prototype.readInt16LE=function(a,b){b||t(a,2,this.length);var h=this[a]|this[a+1]<<8;return h&32768?h|4294901760:h};e.prototype.readInt16BE=function(a,b){b||t(a,2,this.length);var h=this[a+1]|this[a]<<8;return h&32768?h|4294901760:h};e.prototype.readInt32LE=function(a,b){b||t(a,4,this.length);return this[a]|this[a+1]<<8|this[a+2]<<16|this[a+3]<<24};e.prototype.readInt32BE=function(a,b){b||t(a,4,this.length);return this[a]<<24|this[a+1]<<16|this[a+2]<<8|this[a+3]};e.prototype.readFloatLE=function(a,
|
||||||
|
b){b||t(a,4,this.length);return v.read(this,a,!0,23,4)};e.prototype.readFloatBE=function(a,b){b||t(a,4,this.length);return v.read(this,a,!1,23,4)};e.prototype.readDoubleLE=function(a,b){b||t(a,8,this.length);return v.read(this,a,!0,52,8)};e.prototype.readDoubleBE=function(a,b){b||t(a,8,this.length);return v.read(this,a,!1,52,8)};e.prototype.writeUInt8=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,1,255,0);e.TYPED_ARRAY_SUPPORT||(a=Math.floor(a));this[b]=a;return b+1};e.prototype.writeUInt16LE=function(a,
|
||||||
|
b,h){a=+a;b>>>=0;h||m(this,a,b,2,65535,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8):f(this,a,b,!0);return b+2};e.prototype.writeUInt16BE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,2,65535,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>8,this[b+1]=a):f(this,a,b,!1);return b+2};e.prototype.writeUInt32LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,4294967295,0);e.TYPED_ARRAY_SUPPORT?(this[b+3]=a>>>24,this[b+2]=a>>>16,this[b+1]=a>>>8,this[b]=a):c(this,a,b,!0);return b+4};e.prototype.writeUInt32BE=function(a,
|
||||||
|
b,h){a=+a;b>>>=0;h||m(this,a,b,4,4294967295,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>24,this[b+1]=a>>>16,this[b+2]=a>>>8,this[b+3]=a):c(this,a,b,!1);return b+4};e.prototype.writeInt8=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,1,127,-128);e.TYPED_ARRAY_SUPPORT||(a=Math.floor(a));0>a&&(a=255+a+1);this[b]=a;return b+1};e.prototype.writeInt16LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,2,32767,-32768);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8):f(this,a,b,!0);return b+2};e.prototype.writeInt16BE=function(a,
|
||||||
|
b,h){a=+a;b>>>=0;h||m(this,a,b,2,32767,-32768);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>8,this[b+1]=a):f(this,a,b,!1);return b+2};e.prototype.writeInt32LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,2147483647,-2147483648);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8,this[b+2]=a>>>16,this[b+3]=a>>>24):c(this,a,b,!0);return b+4};e.prototype.writeInt32BE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,2147483647,-2147483648);0>a&&(a=4294967295+a+1);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>24,this[b+1]=a>>>16,this[b+
|
||||||
|
2]=a>>>8,this[b+3]=a):c(this,a,b,!1);return b+4};e.prototype.writeFloatLE=function(a,b,h){return q(this,a,b,!0,h)};e.prototype.writeFloatBE=function(a,b,h){return q(this,a,b,!1,h)};e.prototype.writeDoubleLE=function(a,b,h){return r(this,a,b,!0,h)};e.prototype.writeDoubleBE=function(a,b,h){return r(this,a,b,!1,h)};e.prototype.copy=function(a,b,h,w){h||(h=0);w||0===w||(w=this.length);b||(b=0);if(w!==h&&0!==a.length&&0!==this.length){if(w<h)throw new TypeError("sourceEnd < sourceStart");if(0>b||b>=a.length)throw new TypeError("targetStart out of bounds");
|
||||||
|
if(0>h||h>=this.length)throw new TypeError("sourceStart out of bounds");if(0>w||w>this.length)throw new TypeError("sourceEnd out of bounds");w>this.length&&(w=this.length);a.length-b<w-h&&(w=a.length-b+h);w-=h;if(1E3>w||!e.TYPED_ARRAY_SUPPORT)for(var y=0;y<w;y++)a[y+b]=this[y+h];else a._set(this.subarray(h,h+w),b)}};e.prototype.fill=function(a,b,h){a||(a=0);b||(b=0);h||(h=this.length);if(h<b)throw new TypeError("end < start");if(h!==b&&0!==this.length){if(0>b||b>=this.length)throw new TypeError("start out of bounds");
|
||||||
|
if(0>h||h>this.length)throw new TypeError("end out of bounds");if("number"===typeof a)for(;b<h;b++)this[b]=a;else{a=k(a.toString());for(var w=a.length;b<h;b++)this[b]=a[b%w]}return this}};e.prototype.toArrayBuffer=function(){if("undefined"!==typeof Uint8Array){if(e.TYPED_ARRAY_SUPPORT)return(new e(this)).buffer;for(var a=new Uint8Array(this.length),b=0,h=a.length;b<h;b+=1)a[b]=this[b];return a.buffer}throw new TypeError("Buffer.toArrayBuffer not supported in this browser");};var D=e.prototype;e._augment=
|
||||||
|
function(a){a.constructor=e;a._isBuffer=!0;a._get=a.get;a._set=a.set;a.get=D.get;a.set=D.set;a.write=D.write;a.toString=D.toString;a.toLocaleString=D.toString;a.toJSON=D.toJSON;a.equals=D.equals;a.compare=D.compare;a.copy=D.copy;a.slice=D.slice;a.readUInt8=D.readUInt8;a.readUInt16LE=D.readUInt16LE;a.readUInt16BE=D.readUInt16BE;a.readUInt32LE=D.readUInt32LE;a.readUInt32BE=D.readUInt32BE;a.readInt8=D.readInt8;a.readInt16LE=D.readInt16LE;a.readInt16BE=D.readInt16BE;a.readInt32LE=D.readInt32LE;a.readInt32BE=
|
||||||
|
D.readInt32BE;a.readFloatLE=D.readFloatLE;a.readFloatBE=D.readFloatBE;a.readDoubleLE=D.readDoubleLE;a.readDoubleBE=D.readDoubleBE;a.writeUInt8=D.writeUInt8;a.writeUInt16LE=D.writeUInt16LE;a.writeUInt16BE=D.writeUInt16BE;a.writeUInt32LE=D.writeUInt32LE;a.writeUInt32BE=D.writeUInt32BE;a.writeInt8=D.writeInt8;a.writeInt16LE=D.writeInt16LE;a.writeInt16BE=D.writeInt16BE;a.writeInt32LE=D.writeInt32LE;a.writeInt32BE=D.writeInt32BE;a.writeFloatLE=D.writeFloatLE;a.writeFloatBE=D.writeFloatBE;a.writeDoubleLE=
|
||||||
|
D.writeDoubleLE;a.writeDoubleBE=D.writeDoubleBE;a.fill=D.fill;a.inspect=D.inspect;a.toArrayBuffer=D.toArrayBuffer;return a};var L=/[^+\/0-9A-z]/g},{"base64-js":2,ieee754:6,"is-array":7}],6:[function(C,J,A){A.read=function(e,p,t,m,f){var c=8*f-m-1;var l=(1<<c)-1,q=l>>1,r=-7;f=t?f-1:0;var k=t?-1:1,u=e[p+f];f+=k;t=u&(1<<-r)-1;u>>=-r;for(r+=c;0<r;t=256*t+e[p+f],f+=k,r-=8);c=t&(1<<-r)-1;t>>=-r;for(r+=m;0<r;c=256*c+e[p+f],f+=k,r-=8);if(0===t)t=1-q;else{if(t===l)return c?NaN:Infinity*(u?-1:1);c+=Math.pow(2,
|
||||||
|
m);t-=q}return(u?-1:1)*c*Math.pow(2,t-m)};A.write=function(e,p,t,m,f,c){var l,q=8*c-f-1,r=(1<<q)-1,k=r>>1,u=23===f?Math.pow(2,-24)-Math.pow(2,-77):0;c=m?0:c-1;var d=m?1:-1,g=0>p||0===p&&0>1/p?1:0;p=Math.abs(p);isNaN(p)||Infinity===p?(p=isNaN(p)?1:0,m=r):(m=Math.floor(Math.log(p)/Math.LN2),1>p*(l=Math.pow(2,-m))&&(m--,l*=2),p=1<=m+k?p+u/l:p+u*Math.pow(2,1-k),2<=p*l&&(m++,l/=2),m+k>=r?(p=0,m=r):1<=m+k?(p=(p*l-1)*Math.pow(2,f),m+=k):(p=p*Math.pow(2,k-1)*Math.pow(2,f),m=0));for(;8<=f;e[t+c]=p&255,c+=
|
||||||
|
d,p/=256,f-=8);m=m<<f|p;for(q+=f;0<q;e[t+c]=m&255,c+=d,m/=256,q-=8);e[t+c-d]|=128*g}},{}],7:[function(C,J,A){var e=Object.prototype.toString;J.exports=Array.isArray||function(p){return!!p&&"[object Array]"==e.call(p)}},{}],8:[function(C,J,A){(function(e){function p(c,l){for(var q=0,r=c.length-1;0<=r;r--){var k=c[r];"."===k?c.splice(r,1):".."===k?(c.splice(r,1),q++):q&&(c.splice(r,1),q--)}if(l)for(;q--;q)c.unshift("..");return c}function t(c,l){if(c.filter)return c.filter(l);for(var q=[],r=0;r<c.length;r++)l(c[r],
|
||||||
|
r,c)&&q.push(c[r]);return q}var m=/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/;A.resolve=function(){for(var c="",l=!1,q=arguments.length-1;-1<=q&&!l;q--){var r=0<=q?arguments[q]:e.cwd();if("string"!==typeof r)throw new TypeError("Arguments to path.resolve must be strings");r&&(c=r+"/"+c,l="/"===r.charAt(0))}c=p(t(c.split("/"),function(k){return!!k}),!l).join("/");return(l?"/":"")+c||"."};A.normalize=function(c){var l=A.isAbsolute(c),q="/"===f(c,-1);(c=p(t(c.split("/"),function(r){return!!r}),
|
||||||
|
!l).join("/"))||l||(c=".");c&&q&&(c+="/");return(l?"/":"")+c};A.isAbsolute=function(c){return"/"===c.charAt(0)};A.join=function(){var c=Array.prototype.slice.call(arguments,0);return A.normalize(t(c,function(l,q){if("string"!==typeof l)throw new TypeError("Arguments to path.join must be strings");return l}).join("/"))};A.relative=function(c,l){function q(n){for(var v=0;v<n.length&&""===n[v];v++);for(var z=n.length-1;0<=z&&""===n[z];z--);return v>z?[]:n.slice(v,z-v+1)}c=A.resolve(c).substr(1);l=A.resolve(l).substr(1);
|
||||||
|
for(var r=q(c.split("/")),k=q(l.split("/")),u=Math.min(r.length,k.length),d=u,g=0;g<u;g++)if(r[g]!==k[g]){d=g;break}u=[];for(g=d;g<r.length;g++)u.push("..");u=u.concat(k.slice(d));return u.join("/")};A.sep="/";A.delimiter=":";A.dirname=function(c){var l=m.exec(c).slice(1);c=l[0];l=l[1];if(!c&&!l)return".";l&&(l=l.substr(0,l.length-1));return c+l};A.basename=function(c,l){var q=m.exec(c).slice(1)[2];l&&q.substr(-1*l.length)===l&&(q=q.substr(0,q.length-l.length));return q};A.extname=function(c){return m.exec(c).slice(1)[3]};
|
||||||
|
var f="b"==="ab".substr(-1)?function(c,l,q){return c.substr(l,q)}:function(c,l,q){0>l&&(l=c.length+l);return c.substr(l,q)}}).call(this,C("g5I+bs"))},{"g5I+bs":9}],9:[function(C,J,A){function e(){}C=J.exports={};C.nextTick=function(){if("undefined"!==typeof window&&window.setImmediate)return function(t){return window.setImmediate(t)};if("undefined"!==typeof window&&window.postMessage&&window.addEventListener){var p=[];window.addEventListener("message",function(t){var m=t.source;m!==window&&null!==
|
||||||
|
m||"process-tick"!==t.data||(t.stopPropagation(),0<p.length&&p.shift()())},!0);return function(t){p.push(t);window.postMessage("process-tick","*")}}return function(t){setTimeout(t,0)}}();C.title="browser";C.browser=!0;C.env={};C.argv=[];C.on=e;C.addListener=e;C.once=e;C.off=e;C.removeListener=e;C.removeAllListeners=e;C.emit=e;C.binding=function(p){throw Error("process.binding is not supported");};C.cwd=function(){return"/"};C.chdir=function(p){throw Error("process.chdir is not supported");}},{}],
|
||||||
|
10:[function(C,J,A){function e(){this._array=[];this._set=m?new Map:Object.create(null)}var p=C("./util"),t=Object.prototype.hasOwnProperty,m="undefined"!==typeof Map;e.fromArray=function(f,c){for(var l=new e,q=0,r=f.length;q<r;q++)l.add(f[q],c);return l};e.prototype.size=function(){return m?this._set.size:Object.getOwnPropertyNames(this._set).length};e.prototype.add=function(f,c){var l=m?f:p.toSetString(f),q=m?this.has(f):t.call(this._set,l),r=this._array.length;q&&!c||this._array.push(f);q||(m?
|
||||||
|
this._set.set(f,r):this._set[l]=r)};e.prototype.has=function(f){if(m)return this._set.has(f);f=p.toSetString(f);return t.call(this._set,f)};e.prototype.indexOf=function(f){if(m){var c=this._set.get(f);if(0<=c)return c}else if(c=p.toSetString(f),t.call(this._set,c))return this._set[c];throw Error('"'+f+'" is not in the set.');};e.prototype.at=function(f){if(0<=f&&f<this._array.length)return this._array[f];throw Error("No element indexed by "+f);};e.prototype.toArray=function(){return this._array.slice()};
|
||||||
|
A.ArraySet=e},{"./util":19}],11:[function(C,J,A){var e=C("./base64");A.encode=function(p){var t="",m=0>p?(-p<<1)+1:p<<1;do p=m&31,m>>>=5,0<m&&(p|=32),t+=e.encode(p);while(0<m);return t};A.decode=function(p,t,m){var f=p.length,c=0,l=0;do{if(t>=f)throw Error("Expected more digits in base 64 VLQ value.");var q=e.decode(p.charCodeAt(t++));if(-1===q)throw Error("Invalid base64 digit: "+p.charAt(t-1));var r=!!(q&32);q&=31;c+=q<<l;l+=5}while(r);p=c>>1;m.value=1===(c&1)?-p:p;m.rest=t}},{"./base64":12}],12:[function(C,
|
||||||
|
J,A){var e="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");A.encode=function(p){if(0<=p&&p<e.length)return e[p];throw new TypeError("Must be between 0 and 63: "+p);};A.decode=function(p){return 65<=p&&90>=p?p-65:97<=p&&122>=p?p-97+26:48<=p&&57>=p?p-48+52:43==p?62:47==p?63:-1}},{}],13:[function(C,J,A){function e(p,t,m,f,c,l){var q=Math.floor((t-p)/2)+p,r=c(m,f[q],!0);return 0===r?q:0<r?1<t-q?e(q,t,m,f,c,l):l==A.LEAST_UPPER_BOUND?t<f.length?t:-1:q:1<q-p?e(p,q,m,f,c,l):l==
|
||||||
|
A.LEAST_UPPER_BOUND?q:0>p?-1:p}A.GREATEST_LOWER_BOUND=1;A.LEAST_UPPER_BOUND=2;A.search=function(p,t,m,f){if(0===t.length)return-1;p=e(-1,t.length,p,t,m,f||A.GREATEST_LOWER_BOUND);if(0>p)return-1;for(;0<=p-1&&0===m(t[p],t[p-1],!0);)--p;return p}},{}],14:[function(C,J,A){function e(){this._array=[];this._sorted=!0;this._last={generatedLine:-1,generatedColumn:0}}var p=C("./util");e.prototype.unsortedForEach=function(t,m){this._array.forEach(t,m)};e.prototype.add=function(t){var m=this._last,f=m.generatedLine,
|
||||||
|
c=t.generatedLine,l=m.generatedColumn,q=t.generatedColumn;c>f||c==f&&q>=l||0>=p.compareByGeneratedPositionsInflated(m,t)?this._last=t:this._sorted=!1;this._array.push(t)};e.prototype.toArray=function(){this._sorted||(this._array.sort(p.compareByGeneratedPositionsInflated),this._sorted=!0);return this._array};A.MappingList=e},{"./util":19}],15:[function(C,J,A){function e(t,m,f){var c=t[m];t[m]=t[f];t[f]=c}function p(t,m,f,c){if(f<c){var l=f-1;e(t,Math.round(f+Math.random()*(c-f)),c);for(var q=t[c],
|
||||||
|
r=f;r<c;r++)0>=m(t[r],q)&&(l+=1,e(t,l,r));e(t,l+1,r);l+=1;p(t,m,f,l-1);p(t,m,l+1,c)}}A.quickSort=function(t,m){p(t,m,0,t.length-1)}},{}],16:[function(C,J,A){function e(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));return null!=d.sections?new m(d,u):new p(d,u)}function p(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));var g=f.getArg(d,"version"),n=f.getArg(d,"sources"),v=f.getArg(d,"names",[]),z=f.getArg(d,"sourceRoot",null),G=f.getArg(d,"sourcesContent",null),D=f.getArg(d,
|
||||||
|
"mappings");d=f.getArg(d,"file",null);if(g!=this._version)throw Error("Unsupported version: "+g);z&&(z=f.normalize(z));n=n.map(String).map(f.normalize).map(function(L){return z&&f.isAbsolute(z)&&f.isAbsolute(L)?f.relative(z,L):L});this._names=l.fromArray(v.map(String),!0);this._sources=l.fromArray(n,!0);this.sourceRoot=z;this.sourcesContent=G;this._mappings=D;this._sourceMapURL=u;this.file=d}function t(){this.generatedColumn=this.generatedLine=0;this.name=this.originalColumn=this.originalLine=this.source=
|
||||||
|
null}function m(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));var g=f.getArg(d,"version");d=f.getArg(d,"sections");if(g!=this._version)throw Error("Unsupported version: "+g);this._sources=new l;this._names=new l;var n={line:-1,column:0};this._sections=d.map(function(v){if(v.url)throw Error("Support for url field in sections not implemented.");var z=f.getArg(v,"offset"),G=f.getArg(z,"line"),D=f.getArg(z,"column");if(G<n.line||G===n.line&&D<n.column)throw Error("Section offsets must be ordered and non-overlapping.");
|
||||||
|
n=z;return{generatedOffset:{generatedLine:G+1,generatedColumn:D+1},consumer:new e(f.getArg(v,"map"),u)}})}var f=C("./util"),c=C("./binary-search"),l=C("./array-set").ArraySet,q=C("./base64-vlq"),r=C("./quick-sort").quickSort;e.fromSourceMap=function(k){return p.fromSourceMap(k)};e.prototype._version=3;e.prototype.__generatedMappings=null;Object.defineProperty(e.prototype,"_generatedMappings",{configurable:!0,enumerable:!0,get:function(){this.__generatedMappings||this._parseMappings(this._mappings,
|
||||||
|
this.sourceRoot);return this.__generatedMappings}});e.prototype.__originalMappings=null;Object.defineProperty(e.prototype,"_originalMappings",{configurable:!0,enumerable:!0,get:function(){this.__originalMappings||this._parseMappings(this._mappings,this.sourceRoot);return this.__originalMappings}});e.prototype._charIsMappingSeparator=function(k,u){var d=k.charAt(u);return";"===d||","===d};e.prototype._parseMappings=function(k,u){throw Error("Subclasses must implement _parseMappings");};e.GENERATED_ORDER=
|
||||||
|
1;e.ORIGINAL_ORDER=2;e.GREATEST_LOWER_BOUND=1;e.LEAST_UPPER_BOUND=2;e.prototype.eachMapping=function(k,u,d){u=u||null;switch(d||e.GENERATED_ORDER){case e.GENERATED_ORDER:d=this._generatedMappings;break;case e.ORIGINAL_ORDER:d=this._originalMappings;break;default:throw Error("Unknown order of iteration.");}var g=this.sourceRoot;d.map(function(n){var v=null===n.source?null:this._sources.at(n.source);v=f.computeSourceURL(g,v,this._sourceMapURL);return{source:v,generatedLine:n.generatedLine,generatedColumn:n.generatedColumn,
|
||||||
|
originalLine:n.originalLine,originalColumn:n.originalColumn,name:null===n.name?null:this._names.at(n.name)}},this).forEach(k,u)};e.prototype.allGeneratedPositionsFor=function(k){var u=f.getArg(k,"line"),d={source:f.getArg(k,"source"),originalLine:u,originalColumn:f.getArg(k,"column",0)};null!=this.sourceRoot&&(d.source=f.relative(this.sourceRoot,d.source));if(!this._sources.has(d.source))return[];d.source=this._sources.indexOf(d.source);var g=[];d=this._findMapping(d,this._originalMappings,"originalLine",
|
||||||
|
"originalColumn",f.compareByOriginalPositions,c.LEAST_UPPER_BOUND);if(0<=d){var n=this._originalMappings[d];if(void 0===k.column)for(u=n.originalLine;n&&n.originalLine===u;)g.push({line:f.getArg(n,"generatedLine",null),column:f.getArg(n,"generatedColumn",null),lastColumn:f.getArg(n,"lastGeneratedColumn",null)}),n=this._originalMappings[++d];else for(k=n.originalColumn;n&&n.originalLine===u&&n.originalColumn==k;)g.push({line:f.getArg(n,"generatedLine",null),column:f.getArg(n,"generatedColumn",null),
|
||||||
|
lastColumn:f.getArg(n,"lastGeneratedColumn",null)}),n=this._originalMappings[++d]}return g};A.SourceMapConsumer=e;p.prototype=Object.create(e.prototype);p.prototype.consumer=e;p.fromSourceMap=function(k,u){var d=Object.create(p.prototype),g=d._names=l.fromArray(k._names.toArray(),!0),n=d._sources=l.fromArray(k._sources.toArray(),!0);d.sourceRoot=k._sourceRoot;d.sourcesContent=k._generateSourcesContent(d._sources.toArray(),d.sourceRoot);d.file=k._file;d._sourceMapURL=u;for(var v=k._mappings.toArray().slice(),
|
||||||
|
z=d.__generatedMappings=[],G=d.__originalMappings=[],D=0,L=v.length;D<L;D++){var a=v[D],b=new t;b.generatedLine=a.generatedLine;b.generatedColumn=a.generatedColumn;a.source&&(b.source=n.indexOf(a.source),b.originalLine=a.originalLine,b.originalColumn=a.originalColumn,a.name&&(b.name=g.indexOf(a.name)),G.push(b));z.push(b)}r(d.__originalMappings,f.compareByOriginalPositions);return d};p.prototype._version=3;Object.defineProperty(p.prototype,"sources",{get:function(){return this._sources.toArray().map(function(k){return f.computeSourceURL(this.sourceRoot,
|
||||||
|
k,this._sourceMapURL)},this)}});p.prototype._parseMappings=function(k,u){for(var d=1,g=0,n=0,v=0,z=0,G=0,D=k.length,L=0,a={},b={},h=[],w=[],y,I,K,N,P;L<D;)if(";"===k.charAt(L))d++,L++,g=0;else if(","===k.charAt(L))L++;else{y=new t;y.generatedLine=d;for(N=L;N<D&&!this._charIsMappingSeparator(k,N);N++);I=k.slice(L,N);if(K=a[I])L+=I.length;else{for(K=[];L<N;)q.decode(k,L,b),P=b.value,L=b.rest,K.push(P);if(2===K.length)throw Error("Found a source, but no line and column");if(3===K.length)throw Error("Found a source and line, but no column");
|
||||||
|
a[I]=K}y.generatedColumn=g+K[0];g=y.generatedColumn;1<K.length&&(y.source=z+K[1],z+=K[1],y.originalLine=n+K[2],n=y.originalLine,y.originalLine+=1,y.originalColumn=v+K[3],v=y.originalColumn,4<K.length&&(y.name=G+K[4],G+=K[4]));w.push(y);"number"===typeof y.originalLine&&h.push(y)}r(w,f.compareByGeneratedPositionsDeflated);this.__generatedMappings=w;r(h,f.compareByOriginalPositions);this.__originalMappings=h};p.prototype._findMapping=function(k,u,d,g,n,v){if(0>=k[d])throw new TypeError("Line must be greater than or equal to 1, got "+
|
||||||
|
k[d]);if(0>k[g])throw new TypeError("Column must be greater than or equal to 0, got "+k[g]);return c.search(k,u,n,v)};p.prototype.computeColumnSpans=function(){for(var k=0;k<this._generatedMappings.length;++k){var u=this._generatedMappings[k];if(k+1<this._generatedMappings.length){var d=this._generatedMappings[k+1];if(u.generatedLine===d.generatedLine){u.lastGeneratedColumn=d.generatedColumn-1;continue}}u.lastGeneratedColumn=Infinity}};p.prototype.originalPositionFor=function(k){var u={generatedLine:f.getArg(k,
|
||||||
|
"line"),generatedColumn:f.getArg(k,"column")};k=this._findMapping(u,this._generatedMappings,"generatedLine","generatedColumn",f.compareByGeneratedPositionsDeflated,f.getArg(k,"bias",e.GREATEST_LOWER_BOUND));if(0<=k&&(k=this._generatedMappings[k],k.generatedLine===u.generatedLine)){u=f.getArg(k,"source",null);null!==u&&(u=this._sources.at(u),u=f.computeSourceURL(this.sourceRoot,u,this._sourceMapURL));var d=f.getArg(k,"name",null);null!==d&&(d=this._names.at(d));return{source:u,line:f.getArg(k,"originalLine",
|
||||||
|
null),column:f.getArg(k,"originalColumn",null),name:d}}return{source:null,line:null,column:null,name:null}};p.prototype.hasContentsOfAllSources=function(){return this.sourcesContent?this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some(function(k){return null==k}):!1};p.prototype.sourceContentFor=function(k,u){if(!this.sourcesContent)return null;var d=k;null!=this.sourceRoot&&(d=f.relative(this.sourceRoot,d));if(this._sources.has(d))return this.sourcesContent[this._sources.indexOf(d)];
|
||||||
|
var g=this.sources,n;for(n=0;n<g.length;++n)if(g[n]==k)return this.sourcesContent[n];var v;if(null!=this.sourceRoot&&(v=f.urlParse(this.sourceRoot))){g=d.replace(/^file:\/\//,"");if("file"==v.scheme&&this._sources.has(g))return this.sourcesContent[this._sources.indexOf(g)];if((!v.path||"/"==v.path)&&this._sources.has("/"+d))return this.sourcesContent[this._sources.indexOf("/"+d)]}if(u)return null;throw Error('"'+d+'" is not in the SourceMap.');};p.prototype.generatedPositionFor=function(k){var u=
|
||||||
|
f.getArg(k,"source");null!=this.sourceRoot&&(u=f.relative(this.sourceRoot,u));if(!this._sources.has(u))return{line:null,column:null,lastColumn:null};u=this._sources.indexOf(u);u={source:u,originalLine:f.getArg(k,"line"),originalColumn:f.getArg(k,"column")};k=this._findMapping(u,this._originalMappings,"originalLine","originalColumn",f.compareByOriginalPositions,f.getArg(k,"bias",e.GREATEST_LOWER_BOUND));return 0<=k&&(k=this._originalMappings[k],k.source===u.source)?{line:f.getArg(k,"generatedLine",
|
||||||
|
null),column:f.getArg(k,"generatedColumn",null),lastColumn:f.getArg(k,"lastGeneratedColumn",null)}:{line:null,column:null,lastColumn:null}};A.BasicSourceMapConsumer=p;m.prototype=Object.create(e.prototype);m.prototype.constructor=e;m.prototype._version=3;Object.defineProperty(m.prototype,"sources",{get:function(){for(var k=[],u=0;u<this._sections.length;u++)for(var d=0;d<this._sections[u].consumer.sources.length;d++)k.push(this._sections[u].consumer.sources[d]);return k}});m.prototype.originalPositionFor=
|
||||||
|
function(k){var u={generatedLine:f.getArg(k,"line"),generatedColumn:f.getArg(k,"column")},d=c.search(u,this._sections,function(g,n){var v=g.generatedLine-n.generatedOffset.generatedLine;return v?v:g.generatedColumn-n.generatedOffset.generatedColumn});return(d=this._sections[d])?d.consumer.originalPositionFor({line:u.generatedLine-(d.generatedOffset.generatedLine-1),column:u.generatedColumn-(d.generatedOffset.generatedLine===u.generatedLine?d.generatedOffset.generatedColumn-1:0),bias:k.bias}):{source:null,
|
||||||
|
line:null,column:null,name:null}};m.prototype.hasContentsOfAllSources=function(){return this._sections.every(function(k){return k.consumer.hasContentsOfAllSources()})};m.prototype.sourceContentFor=function(k,u){for(var d=0;d<this._sections.length;d++){var g=this._sections[d].consumer.sourceContentFor(k,!0);if(g)return g}if(u)return null;throw Error('"'+k+'" is not in the SourceMap.');};m.prototype.generatedPositionFor=function(k){for(var u=0;u<this._sections.length;u++){var d=this._sections[u];if(-1!==
|
||||||
|
d.consumer.sources.indexOf(f.getArg(k,"source"))){var g=d.consumer.generatedPositionFor(k);if(g)return{line:g.line+(d.generatedOffset.generatedLine-1),column:g.column+(d.generatedOffset.generatedLine===g.line?d.generatedOffset.generatedColumn-1:0)}}}return{line:null,column:null}};m.prototype._parseMappings=function(k,u){this.__generatedMappings=[];this.__originalMappings=[];for(var d=0;d<this._sections.length;d++)for(var g=this._sections[d],n=g.consumer._generatedMappings,v=0;v<n.length;v++){var z=
|
||||||
|
n[v],G=g.consumer._sources.at(z.source);G=f.computeSourceURL(g.consumer.sourceRoot,G,this._sourceMapURL);this._sources.add(G);G=this._sources.indexOf(G);var D=null;z.name&&(D=g.consumer._names.at(z.name),this._names.add(D),D=this._names.indexOf(D));z={source:G,generatedLine:z.generatedLine+(g.generatedOffset.generatedLine-1),generatedColumn:z.generatedColumn+(g.generatedOffset.generatedLine===z.generatedLine?g.generatedOffset.generatedColumn-1:0),originalLine:z.originalLine,originalColumn:z.originalColumn,
|
||||||
|
name:D};this.__generatedMappings.push(z);"number"===typeof z.originalLine&&this.__originalMappings.push(z)}r(this.__generatedMappings,f.compareByGeneratedPositionsDeflated);r(this.__originalMappings,f.compareByOriginalPositions)};A.IndexedSourceMapConsumer=m},{"./array-set":10,"./base64-vlq":11,"./binary-search":13,"./quick-sort":15,"./util":19}],17:[function(C,J,A){function e(c){c||(c={});this._file=t.getArg(c,"file",null);this._sourceRoot=t.getArg(c,"sourceRoot",null);this._skipValidation=t.getArg(c,
|
||||||
|
"skipValidation",!1);this._sources=new m;this._names=new m;this._mappings=new f;this._sourcesContents=null}var p=C("./base64-vlq"),t=C("./util"),m=C("./array-set").ArraySet,f=C("./mapping-list").MappingList;e.prototype._version=3;e.fromSourceMap=function(c){var l=c.sourceRoot,q=new e({file:c.file,sourceRoot:l});c.eachMapping(function(r){var k={generated:{line:r.generatedLine,column:r.generatedColumn}};null!=r.source&&(k.source=r.source,null!=l&&(k.source=t.relative(l,k.source)),k.original={line:r.originalLine,
|
||||||
|
column:r.originalColumn},null!=r.name&&(k.name=r.name));q.addMapping(k)});c.sources.forEach(function(r){var k=r;null!==l&&(k=t.relative(l,r));q._sources.has(k)||q._sources.add(k);k=c.sourceContentFor(r);null!=k&&q.setSourceContent(r,k)});return q};e.prototype.addMapping=function(c){var l=t.getArg(c,"generated"),q=t.getArg(c,"original",null),r=t.getArg(c,"source",null);c=t.getArg(c,"name",null);this._skipValidation||this._validateMapping(l,q,r,c);null!=r&&(r=String(r),this._sources.has(r)||this._sources.add(r));
|
||||||
|
null!=c&&(c=String(c),this._names.has(c)||this._names.add(c));this._mappings.add({generatedLine:l.line,generatedColumn:l.column,originalLine:null!=q&&q.line,originalColumn:null!=q&&q.column,source:r,name:c})};e.prototype.setSourceContent=function(c,l){var q=c;null!=this._sourceRoot&&(q=t.relative(this._sourceRoot,q));null!=l?(this._sourcesContents||(this._sourcesContents=Object.create(null)),this._sourcesContents[t.toSetString(q)]=l):this._sourcesContents&&(delete this._sourcesContents[t.toSetString(q)],
|
||||||
|
0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))};e.prototype.applySourceMap=function(c,l,q){var r=l;if(null==l){if(null==c.file)throw Error('SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, or the source map\'s "file" property. Both were omitted.');r=c.file}var k=this._sourceRoot;null!=k&&(r=t.relative(k,r));var u=new m,d=new m;this._mappings.unsortedForEach(function(g){if(g.source===r&&null!=g.originalLine){var n=c.originalPositionFor({line:g.originalLine,
|
||||||
|
column:g.originalColumn});null!=n.source&&(g.source=n.source,null!=q&&(g.source=t.join(q,g.source)),null!=k&&(g.source=t.relative(k,g.source)),g.originalLine=n.line,g.originalColumn=n.column,null!=n.name&&(g.name=n.name))}n=g.source;null==n||u.has(n)||u.add(n);g=g.name;null==g||d.has(g)||d.add(g)},this);this._sources=u;this._names=d;c.sources.forEach(function(g){var n=c.sourceContentFor(g);null!=n&&(null!=q&&(g=t.join(q,g)),null!=k&&(g=t.relative(k,g)),this.setSourceContent(g,n))},this)};e.prototype._validateMapping=
|
||||||
|
function(c,l,q,r){if(l&&"number"!==typeof l.line&&"number"!==typeof l.column)throw Error("original.line and original.column are not numbers -- you probably meant to omit the original mapping entirely and only map the generated position. If so, pass null for the original mapping instead of an object with empty or null values.");if(!(c&&"line"in c&&"column"in c&&0<c.line&&0<=c.column&&!l&&!q&&!r||c&&"line"in c&&"column"in c&&l&&"line"in l&&"column"in l&&0<c.line&&0<=c.column&&0<l.line&&0<=l.column&&
|
||||||
|
q))throw Error("Invalid mapping: "+JSON.stringify({generated:c,source:q,original:l,name:r}));};e.prototype._serializeMappings=function(){for(var c=0,l=1,q=0,r=0,k=0,u=0,d="",g,n,v,z=this._mappings.toArray(),G=0,D=z.length;G<D;G++){n=z[G];g="";if(n.generatedLine!==l)for(c=0;n.generatedLine!==l;)g+=";",l++;else if(0<G){if(!t.compareByGeneratedPositionsInflated(n,z[G-1]))continue;g+=","}g+=p.encode(n.generatedColumn-c);c=n.generatedColumn;null!=n.source&&(v=this._sources.indexOf(n.source),g+=p.encode(v-
|
||||||
|
u),u=v,g+=p.encode(n.originalLine-1-r),r=n.originalLine-1,g+=p.encode(n.originalColumn-q),q=n.originalColumn,null!=n.name&&(n=this._names.indexOf(n.name),g+=p.encode(n-k),k=n));d+=g}return d};e.prototype._generateSourcesContent=function(c,l){return c.map(function(q){if(!this._sourcesContents)return null;null!=l&&(q=t.relative(l,q));q=t.toSetString(q);return Object.prototype.hasOwnProperty.call(this._sourcesContents,q)?this._sourcesContents[q]:null},this)};e.prototype.toJSON=function(){var c={version:this._version,
|
||||||
|
sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};null!=this._file&&(c.file=this._file);null!=this._sourceRoot&&(c.sourceRoot=this._sourceRoot);this._sourcesContents&&(c.sourcesContent=this._generateSourcesContent(c.sources,c.sourceRoot));return c};e.prototype.toString=function(){return JSON.stringify(this.toJSON())};A.SourceMapGenerator=e},{"./array-set":10,"./base64-vlq":11,"./mapping-list":14,"./util":19}],18:[function(C,J,A){function e(f,c,l,q,r){this.children=
|
||||||
|
[];this.sourceContents={};this.line=null==f?null:f;this.column=null==c?null:c;this.source=null==l?null:l;this.name=null==r?null:r;this.$$$isSourceNode$$$=!0;null!=q&&this.add(q)}var p=C("./source-map-generator").SourceMapGenerator,t=C("./util"),m=/(\r?\n)/;e.fromStringWithSourceMap=function(f,c,l){function q(z,G){if(null===z||void 0===z.source)r.add(G);else{var D=l?t.join(l,z.source):z.source;r.add(new e(z.originalLine,z.originalColumn,D,G,z.name))}}var r=new e,k=f.split(m),u=0,d=function(){var z=
|
||||||
|
u<k.length?k[u++]:void 0,G=(u<k.length?k[u++]:void 0)||"";return z+G},g=1,n=0,v=null;c.eachMapping(function(z){if(null!==v)if(g<z.generatedLine)q(v,d()),g++,n=0;else{var G=k[u]||"",D=G.substr(0,z.generatedColumn-n);k[u]=G.substr(z.generatedColumn-n);n=z.generatedColumn;q(v,D);v=z;return}for(;g<z.generatedLine;)r.add(d()),g++;n<z.generatedColumn&&(G=k[u]||"",r.add(G.substr(0,z.generatedColumn)),k[u]=G.substr(z.generatedColumn),n=z.generatedColumn);v=z},this);u<k.length&&(v&&q(v,d()),r.add(k.splice(u).join("")));
|
||||||
|
c.sources.forEach(function(z){var G=c.sourceContentFor(z);null!=G&&(null!=l&&(z=t.join(l,z)),r.setSourceContent(z,G))});return r};e.prototype.add=function(f){if(Array.isArray(f))f.forEach(function(c){this.add(c)},this);else if(f.$$$isSourceNode$$$||"string"===typeof f)f&&this.children.push(f);else throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+f);return this};e.prototype.prepend=function(f){if(Array.isArray(f))for(var c=f.length-1;0<=c;c--)this.prepend(f[c]);
|
||||||
|
else if(f.$$$isSourceNode$$$||"string"===typeof f)this.children.unshift(f);else throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+f);return this};e.prototype.walk=function(f){for(var c,l=0,q=this.children.length;l<q;l++)c=this.children[l],c.$$$isSourceNode$$$?c.walk(f):""!==c&&f(c,{source:this.source,line:this.line,column:this.column,name:this.name})};e.prototype.join=function(f){var c,l=this.children.length;if(0<l){var q=[];for(c=0;c<l-1;c++)q.push(this.children[c]),
|
||||||
|
q.push(f);q.push(this.children[c]);this.children=q}return this};e.prototype.replaceRight=function(f,c){var l=this.children[this.children.length-1];l.$$$isSourceNode$$$?l.replaceRight(f,c):"string"===typeof l?this.children[this.children.length-1]=l.replace(f,c):this.children.push("".replace(f,c));return this};e.prototype.setSourceContent=function(f,c){this.sourceContents[t.toSetString(f)]=c};e.prototype.walkSourceContents=function(f){for(var c=0,l=this.children.length;c<l;c++)this.children[c].$$$isSourceNode$$$&&
|
||||||
|
this.children[c].walkSourceContents(f);var q=Object.keys(this.sourceContents);c=0;for(l=q.length;c<l;c++)f(t.fromSetString(q[c]),this.sourceContents[q[c]])};e.prototype.toString=function(){var f="";this.walk(function(c){f+=c});return f};e.prototype.toStringWithSourceMap=function(f){var c="",l=1,q=0,r=new p(f),k=!1,u=null,d=null,g=null,n=null;this.walk(function(v,z){c+=v;null!==z.source&&null!==z.line&&null!==z.column?(u===z.source&&d===z.line&&g===z.column&&n===z.name||r.addMapping({source:z.source,
|
||||||
|
original:{line:z.line,column:z.column},generated:{line:l,column:q},name:z.name}),u=z.source,d=z.line,g=z.column,n=z.name,k=!0):k&&(r.addMapping({generated:{line:l,column:q}}),u=null,k=!1);for(var G=0,D=v.length;G<D;G++)10===v.charCodeAt(G)?(l++,q=0,G+1===D?(u=null,k=!1):k&&r.addMapping({source:z.source,original:{line:z.line,column:z.column},generated:{line:l,column:q},name:z.name})):q++});this.walkSourceContents(function(v,z){r.setSourceContent(v,z)});return{code:c,map:r}};A.SourceNode=e},{"./source-map-generator":17,
|
||||||
|
"./util":19}],19:[function(C,J,A){function e(d){return(d=d.match(k))?{scheme:d[1],auth:d[2],host:d[3],port:d[4],path:d[5]}:null}function p(d){var g="";d.scheme&&(g+=d.scheme+":");g+="//";d.auth&&(g+=d.auth+"@");d.host&&(g+=d.host);d.port&&(g+=":"+d.port);d.path&&(g+=d.path);return g}function t(d){var g=d,n=e(d);if(n){if(!n.path)return d;g=n.path}d=A.isAbsolute(g);g=g.split(/\/+/);for(var v,z=0,G=g.length-1;0<=G;G--)v=g[G],"."===v?g.splice(G,1):".."===v?z++:0<z&&(""===v?(g.splice(G+1,z),z=0):(g.splice(G,
|
||||||
|
2),z--));g=g.join("/");""===g&&(g=d?"/":".");return n?(n.path=g,p(n)):g}function m(d,g){""===d&&(d=".");""===g&&(g=".");var n=e(g),v=e(d);v&&(d=v.path||"/");if(n&&!n.scheme)return v&&(n.scheme=v.scheme),p(n);if(n||g.match(u))return g;if(v&&!v.host&&!v.path)return v.host=g,p(v);n="/"===g.charAt(0)?g:t(d.replace(/\/+$/,"")+"/"+g);return v?(v.path=n,p(v)):n}function f(d){return d}function c(d){return q(d)?"$"+d:d}function l(d){return q(d)?d.slice(1):d}function q(d){if(!d)return!1;var g=d.length;if(9>
|
||||||
|
g||95!==d.charCodeAt(g-1)||95!==d.charCodeAt(g-2)||111!==d.charCodeAt(g-3)||116!==d.charCodeAt(g-4)||111!==d.charCodeAt(g-5)||114!==d.charCodeAt(g-6)||112!==d.charCodeAt(g-7)||95!==d.charCodeAt(g-8)||95!==d.charCodeAt(g-9))return!1;for(g-=10;0<=g;g--)if(36!==d.charCodeAt(g))return!1;return!0}function r(d,g){return d===g?0:null===d?1:null===g?-1:d>g?1:-1}A.getArg=function(d,g,n){if(g in d)return d[g];if(3===arguments.length)return n;throw Error('"'+g+'" is a required argument.');};var k=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/,
|
||||||
|
u=/^data:.+,.+$/;A.urlParse=e;A.urlGenerate=p;A.normalize=t;A.join=m;A.isAbsolute=function(d){return"/"===d.charAt(0)||k.test(d)};A.relative=function(d,g){""===d&&(d=".");d=d.replace(/\/$/,"");for(var n=0;0!==g.indexOf(d+"/");){var v=d.lastIndexOf("/");if(0>v)return g;d=d.slice(0,v);if(d.match(/^([^\/]+:\/)?\/*$/))return g;++n}return Array(n+1).join("../")+g.substr(d.length+1)};C=!("__proto__"in Object.create(null));A.toSetString=C?f:c;A.fromSetString=C?f:l;A.compareByOriginalPositions=function(d,
|
||||||
|
g,n){var v=r(d.source,g.source);if(0!==v)return v;v=d.originalLine-g.originalLine;if(0!==v)return v;v=d.originalColumn-g.originalColumn;if(0!==v||n)return v;v=d.generatedColumn-g.generatedColumn;if(0!==v)return v;v=d.generatedLine-g.generatedLine;return 0!==v?v:r(d.name,g.name)};A.compareByGeneratedPositionsDeflated=function(d,g,n){var v=d.generatedLine-g.generatedLine;if(0!==v)return v;v=d.generatedColumn-g.generatedColumn;if(0!==v||n)return v;v=r(d.source,g.source);if(0!==v)return v;v=d.originalLine-
|
||||||
|
g.originalLine;if(0!==v)return v;v=d.originalColumn-g.originalColumn;return 0!==v?v:r(d.name,g.name)};A.compareByGeneratedPositionsInflated=function(d,g){var n=d.generatedLine-g.generatedLine;if(0!==n)return n;n=d.generatedColumn-g.generatedColumn;if(0!==n)return n;n=r(d.source,g.source);if(0!==n)return n;n=d.originalLine-g.originalLine;if(0!==n)return n;n=d.originalColumn-g.originalColumn;return 0!==n?n:r(d.name,g.name)};A.parseSourceMapInput=function(d){return JSON.parse(d.replace(/^\)]}'[^\n]*\n/,
|
||||||
|
""))};A.computeSourceURL=function(d,g,n){g=g||"";d&&("/"!==d[d.length-1]&&"/"!==g[0]&&(d+="/"),g=d+g);if(n){d=e(n);if(!d)throw Error("sourceMapURL could not be parsed");d.path&&(n=d.path.lastIndexOf("/"),0<=n&&(d.path=d.path.substring(0,n+1)));g=m(p(d),g)}return t(g)}},{}],20:[function(C,J,A){A.SourceMapGenerator=C("./lib/source-map-generator").SourceMapGenerator;A.SourceMapConsumer=C("./lib/source-map-consumer").SourceMapConsumer;A.SourceNode=C("./lib/source-node").SourceNode},{"./lib/source-map-consumer":16,
|
||||||
|
"./lib/source-map-generator":17,"./lib/source-node":18}],21:[function(C,J,A){(function(e){function p(){return"browser"===a?!0:"node"===a?!1:"undefined"!==typeof window&&"function"===typeof XMLHttpRequest&&!(window.require&&window.module&&window.process&&"renderer"===window.process.type)}function t(x){return function(B){for(var F=0;F<x.length;F++){var E=x[F](B);if(E)return E}return null}}function m(x,B){if(!x)return B;var F=n.dirname(x),E=/^\w+:\/\/[^\/]*/.exec(F);E=E?E[0]:"";var H=F.slice(E.length);
|
||||||
|
return E&&/^\/\w:/.test(H)?(E+="/",E+n.resolve(F.slice(E.length),B).replace(/\\/g,"/")):E+n.resolve(F.slice(E.length),B)}function f(x){var B=h[x.source];if(!B){var F=N(x.source);F?(B=h[x.source]={url:F.url,map:new g(F.map)},B.map.sourcesContent&&B.map.sources.forEach(function(E,H){var M=B.map.sourcesContent[H];if(M){var S=m(B.url,E);b[S]=M}})):B=h[x.source]={url:null,map:null}}return B&&B.map&&"function"===typeof B.map.originalPositionFor&&(F=B.map.originalPositionFor(x),null!==F.source)?(F.source=
|
||||||
|
m(B.url,F.source),F):x}function c(x){var B=/^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(x);return B?(x=f({source:B[2],line:+B[3],column:B[4]-1}),"eval at "+B[1]+" ("+x.source+":"+x.line+":"+(x.column+1)+")"):(B=/^eval at ([^(]+) \((.+)\)$/.exec(x))?"eval at "+B[1]+" ("+c(B[2])+")":x}function l(){var x="";if(this.isNative())x="native";else{var B=this.getScriptNameOrSourceURL();!B&&this.isEval()&&(x=this.getEvalOrigin(),x+=", ");x=B?x+B:x+"<anonymous>";B=this.getLineNumber();null!=B&&(x+=":"+B,(B=
|
||||||
|
this.getColumnNumber())&&(x+=":"+B))}B="";var F=this.getFunctionName(),E=!0,H=this.isConstructor();if(this.isToplevel()||H)H?B+="new "+(F||"<anonymous>"):F?B+=F:(B+=x,E=!1);else{H=this.getTypeName();"[object Object]"===H&&(H="null");var M=this.getMethodName();F?(H&&0!=F.indexOf(H)&&(B+=H+"."),B+=F,M&&F.indexOf("."+M)!=F.length-M.length-1&&(B+=" [as "+M+"]")):B+=H+"."+(M||"<anonymous>")}E&&(B+=" ("+x+")");return B}function q(x){var B={};Object.getOwnPropertyNames(Object.getPrototypeOf(x)).forEach(function(F){B[F]=
|
||||||
|
/^(?:is|get)/.test(F)?function(){return x[F].call(x)}:x[F]});B.toString=l;return B}function r(x,B){void 0===B&&(B={nextPosition:null,curPosition:null});if(x.isNative())return B.curPosition=null,x;var F=x.getFileName()||x.getScriptNameOrSourceURL();if(F){var E=x.getLineNumber(),H=x.getColumnNumber()-1,M=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/,S=M.test;var V="object"===typeof e&&null!==e?e.version:"";M=S.call(M,V)?0:62;1===E&&H>M&&!p()&&!x.isEval()&&(H-=M);var O=
|
||||||
|
f({source:F,line:E,column:H});B.curPosition=O;x=q(x);var T=x.getFunctionName;x.getFunctionName=function(){return null==B.nextPosition?T():B.nextPosition.name||T()};x.getFileName=function(){return O.source};x.getLineNumber=function(){return O.line};x.getColumnNumber=function(){return O.column+1};x.getScriptNameOrSourceURL=function(){return O.source};return x}var Q=x.isEval()&&x.getEvalOrigin();Q&&(Q=c(Q),x=q(x),x.getEvalOrigin=function(){return Q});return x}function k(x,B){L&&(b={},h={});for(var F=
|
||||||
|
(x.name||"Error")+": "+(x.message||""),E={nextPosition:null,curPosition:null},H=[],M=B.length-1;0<=M;M--)H.push("\n at "+r(B[M],E)),E.nextPosition=E.curPosition;E.curPosition=E.nextPosition=null;return F+H.reverse().join("")}function u(x){var B=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(x.stack);if(B){x=B[1];var F=+B[2];B=+B[3];var E=b[x];if(!E&&v&&v.existsSync(x))try{E=v.readFileSync(x,"utf8")}catch(H){E=""}if(E&&(E=E.split(/(?:\r\n|\r|\n)/)[F-1]))return x+":"+F+"\n"+E+"\n"+Array(B).join(" ")+
|
||||||
|
"^"}return null}function d(){var x=e.emit;e.emit=function(B){if("uncaughtException"===B){var F=arguments[1]&&arguments[1].stack,E=0<this.listeners(B).length;if(F&&!E){F=arguments[1];E=u(F);var H="object"===typeof e&&null!==e?e.stderr:void 0;H&&H._handle&&H._handle.setBlocking&&H._handle.setBlocking(!0);E&&(console.error(),console.error(E));console.error(F.stack);"object"===typeof e&&null!==e&&"function"===typeof e.exit&&e.exit(1);return}}return x.apply(this,arguments)}}var g=C("source-map").SourceMapConsumer,
|
||||||
|
n=C("path");try{var v=C("fs");v.existsSync&&v.readFileSync||(v=null)}catch(x){}var z=C("buffer-from"),G=!1,D=!1,L=!1,a="auto",b={},h={},w=/^data:application\/json[^,]+base64,/,y=[],I=[],K=t(y);y.push(function(x){x=x.trim();/^file:/.test(x)&&(x=x.replace(/file:\/\/\/(\w:)?/,function(E,H){return H?"":"/"}));if(x in b)return b[x];var B="";try{if(v)v.existsSync(x)&&(B=v.readFileSync(x,"utf8"));else{var F=new XMLHttpRequest;F.open("GET",x,!1);F.send(null);4===F.readyState&&200===F.status&&(B=F.responseText)}}catch(E){}return b[x]=
|
||||||
|
B});var N=t(I);I.push(function(x){a:{if(p())try{var B=new XMLHttpRequest;B.open("GET",x,!1);B.send(null);var F=B.getResponseHeader("SourceMap")||B.getResponseHeader("X-SourceMap");if(F){var E=F;break a}}catch(M){}E=K(x);B=/(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg;for(var H;F=B.exec(E);)H=F;E=H?H[1]:null}if(!E)return null;w.test(E)?(H=E.slice(E.indexOf(",")+1),H=z(H,"base64").toString(),E=x):(E=m(x,E),H=K(E));return H?{url:E,
|
||||||
|
map:H}:null});var P=y.slice(0),W=I.slice(0);A.wrapCallSite=r;A.getErrorSource=u;A.mapSourcePosition=f;A.retrieveSourceMap=N;A.install=function(x){x=x||{};if(x.environment&&(a=x.environment,-1===["node","browser","auto"].indexOf(a)))throw Error("environment "+a+" was unknown. Available options are {auto, browser, node}");x.retrieveFile&&(x.overrideRetrieveFile&&(y.length=0),y.unshift(x.retrieveFile));x.retrieveSourceMap&&(x.overrideRetrieveSourceMap&&(I.length=0),I.unshift(x.retrieveSourceMap));if(x.hookRequire&&
|
||||||
|
!p()){var B=J.require("module"),F=B.prototype._compile;F.__sourceMapSupport||(B.prototype._compile=function(E,H){b[H]=E;h[H]=void 0;return F.call(this,E,H)},B.prototype._compile.__sourceMapSupport=!0)}L||(L="emptyCacheBetweenOperations"in x?x.emptyCacheBetweenOperations:!1);G||(G=!0,Error.prepareStackTrace=k);if(!D){x="handleUncaughtExceptions"in x?x.handleUncaughtExceptions:!0;try{!1===J.require("worker_threads").isMainThread&&(x=!1)}catch(E){}x&&"object"===typeof e&&null!==e&&"function"===typeof e.on&&
|
||||||
|
(D=!0,d())}};A.resetRetrieveHandlers=function(){y.length=0;I.length=0;y=P.slice(0);I=W.slice(0);N=t(I);K=t(y)}}).call(this,C("g5I+bs"))},{"buffer-from":4,fs:3,"g5I+bs":9,path:8,"source-map":20}]},{},[1]);return R});
|
50
node_modules/@cspotcode/source-map-support/package.json
generated
vendored
Normal file
50
node_modules/@cspotcode/source-map-support/package.json
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"name": "@cspotcode/source-map-support",
|
||||||
|
"description": "Fixes stack traces for files with source maps",
|
||||||
|
"version": "0.8.1",
|
||||||
|
"main": "./source-map-support.js",
|
||||||
|
"types": "./source-map-support.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"build": "node build.js",
|
||||||
|
"serve-tests": "http-server -p 1336",
|
||||||
|
"test": "mocha"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"/register.d.ts",
|
||||||
|
"/register.js",
|
||||||
|
"/register-hook-require.d.ts",
|
||||||
|
"/register-hook-require.js",
|
||||||
|
"/source-map-support.d.ts",
|
||||||
|
"/source-map-support.js",
|
||||||
|
"/browser-source-map-support.js"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/trace-mapping": "0.3.9"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/lodash": "^4.14.182",
|
||||||
|
"browserify": "^4.2.3",
|
||||||
|
"coffeescript": "^1.12.7",
|
||||||
|
"http-server": "^0.11.1",
|
||||||
|
"lodash": "^4.17.21",
|
||||||
|
"mocha": "^3.5.3",
|
||||||
|
"semver": "^7.3.7",
|
||||||
|
"source-map": "0.6.1",
|
||||||
|
"webpack": "^1.15.0"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/cspotcode/node-source-map-support"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/cspotcode/node-source-map-support/issues"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"volta": {
|
||||||
|
"node": "16.11.0",
|
||||||
|
"npm": "7.24.2"
|
||||||
|
}
|
||||||
|
}
|
7
node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
Normal file
7
node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
// tslint:disable:no-useless-files
|
||||||
|
|
||||||
|
// For following usage:
|
||||||
|
// import '@cspotcode/source-map-support/register-hook-require'
|
||||||
|
// Instead of:
|
||||||
|
// import sourceMapSupport from '@cspotcode/source-map-support'
|
||||||
|
// sourceMapSupport.install({hookRequire: true})
|
3
node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
Normal file
3
node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
require('./').install({
|
||||||
|
hookRequire: true
|
||||||
|
});
|
7
node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
Normal file
7
node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
// tslint:disable:no-useless-files
|
||||||
|
|
||||||
|
// For following usage:
|
||||||
|
// import '@cspotcode/source-map-support/register'
|
||||||
|
// Instead of:
|
||||||
|
// import sourceMapSupport from '@cspotcode/source-map-support'
|
||||||
|
// sourceMapSupport.install()
|
1
node_modules/@cspotcode/source-map-support/register.js
generated
vendored
Normal file
1
node_modules/@cspotcode/source-map-support/register.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
require('./').install();
|
76
node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
Normal file
76
node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
// Type definitions for source-map-support 0.5
|
||||||
|
// Project: https://github.com/evanw/node-source-map-support
|
||||||
|
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
|
||||||
|
// Jason Cheatham <https://github.com/jason0x43>
|
||||||
|
// Alcedo Nathaniel De Guzman Jr <https://github.com/natealcedo>
|
||||||
|
// Griffin Yourick <https://github.com/tough-griff>
|
||||||
|
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||||
|
|
||||||
|
export interface RawSourceMap {
|
||||||
|
version: 3;
|
||||||
|
sources: string[];
|
||||||
|
names: string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sourcesContent?: string[];
|
||||||
|
mappings: string;
|
||||||
|
file: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Output of retrieveSourceMap().
|
||||||
|
* From source-map-support:
|
||||||
|
* The map field may be either a string or the parsed JSON object (i.e.,
|
||||||
|
* it must be a valid argument to the SourceMapConsumer constructor).
|
||||||
|
*/
|
||||||
|
export interface UrlAndMap {
|
||||||
|
url: string;
|
||||||
|
map: string | RawSourceMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options to install().
|
||||||
|
*/
|
||||||
|
export interface Options {
|
||||||
|
handleUncaughtExceptions?: boolean | undefined;
|
||||||
|
hookRequire?: boolean | undefined;
|
||||||
|
emptyCacheBetweenOperations?: boolean | undefined;
|
||||||
|
environment?: 'auto' | 'browser' | 'node' | undefined;
|
||||||
|
overrideRetrieveFile?: boolean | undefined;
|
||||||
|
overrideRetrieveSourceMap?: boolean | undefined;
|
||||||
|
retrieveFile?(path: string): string;
|
||||||
|
retrieveSourceMap?(source: string): UrlAndMap | null;
|
||||||
|
/**
|
||||||
|
* Set false to disable redirection of require / import `source-map-support` to `@cspotcode/source-map-support`
|
||||||
|
*/
|
||||||
|
redirectConflictingLibrary?: boolean;
|
||||||
|
/**
|
||||||
|
* Callback will be called every time we redirect due to `redirectConflictingLibrary`
|
||||||
|
* This allows consumers to log helpful warnings if they choose.
|
||||||
|
* @param parent NodeJS.Module which made the require() or require.resolve() call
|
||||||
|
* @param options options object internally passed to node's `_resolveFilename` hook
|
||||||
|
*/
|
||||||
|
onConflictingLibraryRedirect?: (request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Position {
|
||||||
|
source: string;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function wrapCallSite(frame: any /* StackFrame */): any /* StackFrame */;
|
||||||
|
export function getErrorSource(error: Error): string | null;
|
||||||
|
export function mapSourcePosition(position: Position): Position;
|
||||||
|
export function retrieveSourceMap(source: string): UrlAndMap | null;
|
||||||
|
export function resetRetrieveHandlers(): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Install SourceMap support.
|
||||||
|
* @param options Can be used to e.g. disable uncaughtException handler.
|
||||||
|
*/
|
||||||
|
export function install(options?: Options): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uninstall SourceMap support.
|
||||||
|
*/
|
||||||
|
export function uninstall(): void;
|
938
node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
Normal file
938
node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
Normal file
@ -0,0 +1,938 @@
|
|||||||
|
const { TraceMap, originalPositionFor, AnyMap } = require('@jridgewell/trace-mapping');
|
||||||
|
var path = require('path');
|
||||||
|
const { fileURLToPath, pathToFileURL } = require('url');
|
||||||
|
var util = require('util');
|
||||||
|
|
||||||
|
var fs;
|
||||||
|
try {
|
||||||
|
fs = require('fs');
|
||||||
|
if (!fs.existsSync || !fs.readFileSync) {
|
||||||
|
// fs doesn't have all methods we need
|
||||||
|
fs = null;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
/* nop */
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Requires a module which is protected against bundler minification.
|
||||||
|
*
|
||||||
|
* @param {NodeModule} mod
|
||||||
|
* @param {string} request
|
||||||
|
*/
|
||||||
|
function dynamicRequire(mod, request) {
|
||||||
|
return mod.require(request);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {{
|
||||||
|
* enabled: boolean;
|
||||||
|
* originalValue: any;
|
||||||
|
* installedValue: any;
|
||||||
|
* }} HookState
|
||||||
|
* Used for installing and uninstalling hooks
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Increment this if the format of sharedData changes in a breaking way.
|
||||||
|
var sharedDataVersion = 1;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @template T
|
||||||
|
* @param {T} defaults
|
||||||
|
* @returns {T}
|
||||||
|
*/
|
||||||
|
function initializeSharedData(defaults) {
|
||||||
|
var sharedDataKey = 'source-map-support/sharedData';
|
||||||
|
if (typeof Symbol !== 'undefined') {
|
||||||
|
sharedDataKey = Symbol.for(sharedDataKey);
|
||||||
|
}
|
||||||
|
var sharedData = this[sharedDataKey];
|
||||||
|
if (!sharedData) {
|
||||||
|
sharedData = { version: sharedDataVersion };
|
||||||
|
if (Object.defineProperty) {
|
||||||
|
Object.defineProperty(this, sharedDataKey, { value: sharedData });
|
||||||
|
} else {
|
||||||
|
this[sharedDataKey] = sharedData;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (sharedDataVersion !== sharedData.version) {
|
||||||
|
throw new Error("Multiple incompatible instances of source-map-support were loaded");
|
||||||
|
}
|
||||||
|
for (var key in defaults) {
|
||||||
|
if (!(key in sharedData)) {
|
||||||
|
sharedData[key] = defaults[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sharedData;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If multiple instances of source-map-support are loaded into the same
|
||||||
|
// context, they shouldn't overwrite each other. By storing handlers, caches,
|
||||||
|
// and other state on a shared object, different instances of
|
||||||
|
// source-map-support can work together in a limited way. This does require
|
||||||
|
// that future versions of source-map-support continue to support the fields on
|
||||||
|
// this object. If this internal contract ever needs to be broken, increment
|
||||||
|
// sharedDataVersion. (This version number is not the same as any of the
|
||||||
|
// package's version numbers, which should reflect the *external* API of
|
||||||
|
// source-map-support.)
|
||||||
|
var sharedData = initializeSharedData({
|
||||||
|
|
||||||
|
// Only install once if called multiple times
|
||||||
|
// Remember how the environment looked before installation so we can restore if able
|
||||||
|
/** @type {HookState} */
|
||||||
|
errorPrepareStackTraceHook: undefined,
|
||||||
|
/** @type {HookState} */
|
||||||
|
processEmitHook: undefined,
|
||||||
|
/** @type {HookState} */
|
||||||
|
moduleResolveFilenameHook: undefined,
|
||||||
|
|
||||||
|
/** @type {Array<(request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void>} */
|
||||||
|
onConflictingLibraryRedirectArr: [],
|
||||||
|
|
||||||
|
// If true, the caches are reset before a stack trace formatting operation
|
||||||
|
emptyCacheBetweenOperations: false,
|
||||||
|
|
||||||
|
// Maps a file path to a string containing the file contents
|
||||||
|
fileContentsCache: Object.create(null),
|
||||||
|
|
||||||
|
// Maps a file path to a source map for that file
|
||||||
|
/** @type {Record<string, {url: string, map: TraceMap}} */
|
||||||
|
sourceMapCache: Object.create(null),
|
||||||
|
|
||||||
|
// Priority list of retrieve handlers
|
||||||
|
retrieveFileHandlers: [],
|
||||||
|
retrieveMapHandlers: [],
|
||||||
|
|
||||||
|
// Priority list of internally-implemented handlers.
|
||||||
|
// When resetting state, we must keep these.
|
||||||
|
internalRetrieveFileHandlers: [],
|
||||||
|
internalRetrieveMapHandlers: [],
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
// Supports {browser, node, auto}
|
||||||
|
var environment = "auto";
|
||||||
|
|
||||||
|
// Regex for detecting source maps
|
||||||
|
var reSourceMap = /^data:application\/json[^,]+base64,/;
|
||||||
|
|
||||||
|
function isInBrowser() {
|
||||||
|
if (environment === "browser")
|
||||||
|
return true;
|
||||||
|
if (environment === "node")
|
||||||
|
return false;
|
||||||
|
return ((typeof window !== 'undefined') && (typeof XMLHttpRequest === 'function') && !(window.require && window.module && window.process && window.process.type === "renderer"));
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasGlobalProcessEventEmitter() {
|
||||||
|
return ((typeof process === 'object') && (process !== null) && (typeof process.on === 'function'));
|
||||||
|
}
|
||||||
|
|
||||||
|
function tryFileURLToPath(v) {
|
||||||
|
if(isFileUrl(v)) {
|
||||||
|
return fileURLToPath(v);
|
||||||
|
}
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO un-copy these from resolve-uri; see if they can be exported from that lib
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
|
||||||
|
// #region Caches
|
||||||
|
/** @param {string} pathOrFileUrl */
|
||||||
|
function getCacheKey(pathOrFileUrl) {
|
||||||
|
if(pathOrFileUrl.startsWith('node:')) return pathOrFileUrl;
|
||||||
|
if(isFileUrl(pathOrFileUrl)) {
|
||||||
|
// Must normalize spaces to %20, stuff like that
|
||||||
|
return new URL(pathOrFileUrl).toString();
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
return pathToFileURL(pathOrFileUrl).toString();
|
||||||
|
} catch {
|
||||||
|
return pathOrFileUrl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function getFileContentsCache(key) {
|
||||||
|
return sharedData.fileContentsCache[getCacheKey(key)];
|
||||||
|
}
|
||||||
|
function hasFileContentsCacheFromKey(key) {
|
||||||
|
return Object.prototype.hasOwnProperty.call(sharedData.fileContentsCache, key);
|
||||||
|
}
|
||||||
|
function getFileContentsCacheFromKey(key) {
|
||||||
|
return sharedData.fileContentsCache[key];
|
||||||
|
}
|
||||||
|
function setFileContentsCache(key, value) {
|
||||||
|
return sharedData.fileContentsCache[getCacheKey(key)] = value;
|
||||||
|
}
|
||||||
|
function getSourceMapCache(key) {
|
||||||
|
return sharedData.sourceMapCache[getCacheKey(key)];
|
||||||
|
}
|
||||||
|
function setSourceMapCache(key, value) {
|
||||||
|
return sharedData.sourceMapCache[getCacheKey(key)] = value;
|
||||||
|
}
|
||||||
|
function clearCaches() {
|
||||||
|
sharedData.fileContentsCache = Object.create(null);
|
||||||
|
sharedData.sourceMapCache = Object.create(null);
|
||||||
|
}
|
||||||
|
// #endregion Caches
|
||||||
|
|
||||||
|
function handlerExec(list, internalList) {
|
||||||
|
return function(arg) {
|
||||||
|
for (var i = 0; i < list.length; i++) {
|
||||||
|
var ret = list[i](arg);
|
||||||
|
if (ret) {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (var i = 0; i < internalList.length; i++) {
|
||||||
|
var ret = internalList[i](arg);
|
||||||
|
if (ret) {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var retrieveFile = handlerExec(sharedData.retrieveFileHandlers, sharedData.internalRetrieveFileHandlers);
|
||||||
|
|
||||||
|
sharedData.internalRetrieveFileHandlers.push(function(path) {
|
||||||
|
// Trim the path to make sure there is no extra whitespace.
|
||||||
|
path = path.trim();
|
||||||
|
if (/^file:/.test(path)) {
|
||||||
|
// existsSync/readFileSync can't handle file protocol, but once stripped, it works
|
||||||
|
path = path.replace(/file:\/\/\/(\w:)?/, function(protocol, drive) {
|
||||||
|
return drive ?
|
||||||
|
'' : // file:///C:/dir/file -> C:/dir/file
|
||||||
|
'/'; // file:///root-dir/file -> /root-dir/file
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const key = getCacheKey(path);
|
||||||
|
if(hasFileContentsCacheFromKey(key)) {
|
||||||
|
return getFileContentsCacheFromKey(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
var contents = '';
|
||||||
|
try {
|
||||||
|
if (!fs) {
|
||||||
|
// Use SJAX if we are in the browser
|
||||||
|
var xhr = new XMLHttpRequest();
|
||||||
|
xhr.open('GET', path, /** async */ false);
|
||||||
|
xhr.send(null);
|
||||||
|
if (xhr.readyState === 4 && xhr.status === 200) {
|
||||||
|
contents = xhr.responseText;
|
||||||
|
}
|
||||||
|
} else if (fs.existsSync(path)) {
|
||||||
|
// Otherwise, use the filesystem
|
||||||
|
contents = fs.readFileSync(path, 'utf8');
|
||||||
|
}
|
||||||
|
} catch (er) {
|
||||||
|
/* ignore any errors */
|
||||||
|
}
|
||||||
|
|
||||||
|
return setFileContentsCache(path, contents);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Support URLs relative to a directory, but be careful about a protocol prefix
|
||||||
|
// in case we are in the browser (i.e. directories may start with "http://" or "file:///")
|
||||||
|
function supportRelativeURL(file, url) {
|
||||||
|
if(!file) return url;
|
||||||
|
// given that this happens within error formatting codepath, probably best to
|
||||||
|
// fallback instead of throwing if anything goes wrong
|
||||||
|
try {
|
||||||
|
// if should output a URL
|
||||||
|
if(isAbsoluteUrl(file) || isSchemeRelativeUrl(file)) {
|
||||||
|
if(isAbsoluteUrl(url) || isSchemeRelativeUrl(url)) {
|
||||||
|
return new URL(url, file).toString();
|
||||||
|
}
|
||||||
|
if(path.isAbsolute(url)) {
|
||||||
|
return new URL(pathToFileURL(url), file).toString();
|
||||||
|
}
|
||||||
|
// url is relative path or URL
|
||||||
|
return new URL(url.replace(/\\/g, '/'), file).toString();
|
||||||
|
}
|
||||||
|
// if should output a path (unless URL is something like https://)
|
||||||
|
if(path.isAbsolute(file)) {
|
||||||
|
if(isFileUrl(url)) {
|
||||||
|
return fileURLToPath(url);
|
||||||
|
}
|
||||||
|
if(isSchemeRelativeUrl(url)) {
|
||||||
|
return fileURLToPath(new URL(url, 'file://'));
|
||||||
|
}
|
||||||
|
if(isAbsoluteUrl(url)) {
|
||||||
|
// url is a non-file URL
|
||||||
|
// Go with the URL
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if(path.isAbsolute(url)) {
|
||||||
|
// Normalize at all? decodeURI or normalize slashes?
|
||||||
|
return path.normalize(url);
|
||||||
|
}
|
||||||
|
// url is relative path or URL
|
||||||
|
return path.join(file, '..', decodeURI(url));
|
||||||
|
}
|
||||||
|
// If we get here, file is relative.
|
||||||
|
// Shouldn't happen since node identifies modules with absolute paths or URLs.
|
||||||
|
// But we can take a stab at returning something meaningful anyway.
|
||||||
|
if(isAbsoluteUrl(url) || isSchemeRelativeUrl(url)) {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
return path.join(file, '..', url);
|
||||||
|
} catch(e) {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return pathOrUrl in the same style as matchStyleOf: either a file URL or a native path
|
||||||
|
function matchStyleOfPathOrUrl(matchStyleOf, pathOrUrl) {
|
||||||
|
try {
|
||||||
|
if(isAbsoluteUrl(matchStyleOf) || isSchemeRelativeUrl(matchStyleOf)) {
|
||||||
|
if(isAbsoluteUrl(pathOrUrl) || isSchemeRelativeUrl(pathOrUrl)) return pathOrUrl;
|
||||||
|
if(path.isAbsolute(pathOrUrl)) return pathToFileURL(pathOrUrl).toString();
|
||||||
|
} else if(path.isAbsolute(matchStyleOf)) {
|
||||||
|
if(isAbsoluteUrl(pathOrUrl) || isSchemeRelativeUrl(pathOrUrl)) {
|
||||||
|
return fileURLToPath(new URL(pathOrUrl, 'file://'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return pathOrUrl;
|
||||||
|
} catch(e) {
|
||||||
|
return pathOrUrl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function retrieveSourceMapURL(source) {
|
||||||
|
var fileData;
|
||||||
|
|
||||||
|
if (isInBrowser()) {
|
||||||
|
try {
|
||||||
|
var xhr = new XMLHttpRequest();
|
||||||
|
xhr.open('GET', source, false);
|
||||||
|
xhr.send(null);
|
||||||
|
fileData = xhr.readyState === 4 ? xhr.responseText : null;
|
||||||
|
|
||||||
|
// Support providing a sourceMappingURL via the SourceMap header
|
||||||
|
var sourceMapHeader = xhr.getResponseHeader("SourceMap") ||
|
||||||
|
xhr.getResponseHeader("X-SourceMap");
|
||||||
|
if (sourceMapHeader) {
|
||||||
|
return sourceMapHeader;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the URL of the source map
|
||||||
|
fileData = retrieveFile(tryFileURLToPath(source));
|
||||||
|
var re = /(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg;
|
||||||
|
// Keep executing the search to find the *last* sourceMappingURL to avoid
|
||||||
|
// picking up sourceMappingURLs from comments, strings, etc.
|
||||||
|
var lastMatch, match;
|
||||||
|
while (match = re.exec(fileData)) lastMatch = match;
|
||||||
|
if (!lastMatch) return null;
|
||||||
|
return lastMatch[1];
|
||||||
|
};
|
||||||
|
|
||||||
|
// Can be overridden by the retrieveSourceMap option to install. Takes a
|
||||||
|
// generated source filename; returns a {map, optional url} object, or null if
|
||||||
|
// there is no source map. The map field may be either a string or the parsed
|
||||||
|
// JSON object (ie, it must be a valid argument to the SourceMapConsumer
|
||||||
|
// constructor).
|
||||||
|
/** @type {(source: string) => import('./source-map-support').UrlAndMap | null} */
|
||||||
|
var retrieveSourceMap = handlerExec(sharedData.retrieveMapHandlers, sharedData.internalRetrieveMapHandlers);
|
||||||
|
sharedData.internalRetrieveMapHandlers.push(function(source) {
|
||||||
|
var sourceMappingURL = retrieveSourceMapURL(source);
|
||||||
|
if (!sourceMappingURL) return null;
|
||||||
|
|
||||||
|
// Read the contents of the source map
|
||||||
|
var sourceMapData;
|
||||||
|
if (reSourceMap.test(sourceMappingURL)) {
|
||||||
|
// Support source map URL as a data url
|
||||||
|
var rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(',') + 1);
|
||||||
|
sourceMapData = Buffer.from(rawData, "base64").toString();
|
||||||
|
sourceMappingURL = source;
|
||||||
|
} else {
|
||||||
|
// Support source map URLs relative to the source URL
|
||||||
|
sourceMappingURL = supportRelativeURL(source, sourceMappingURL);
|
||||||
|
sourceMapData = retrieveFile(tryFileURLToPath(sourceMappingURL));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sourceMapData) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
url: sourceMappingURL,
|
||||||
|
map: sourceMapData
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
function mapSourcePosition(position) {
|
||||||
|
var sourceMap = getSourceMapCache(position.source);
|
||||||
|
if (!sourceMap) {
|
||||||
|
// Call the (overrideable) retrieveSourceMap function to get the source map.
|
||||||
|
var urlAndMap = retrieveSourceMap(position.source);
|
||||||
|
if (urlAndMap) {
|
||||||
|
sourceMap = setSourceMapCache(position.source, {
|
||||||
|
url: urlAndMap.url,
|
||||||
|
map: new AnyMap(urlAndMap.map, urlAndMap.url)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Overwrite trace-mapping's resolutions, because they do not handle
|
||||||
|
// Windows paths the way we want.
|
||||||
|
// TODO Remove now that windows path support was added to resolve-uri and thus trace-mapping?
|
||||||
|
sourceMap.map.resolvedSources = sourceMap.map.sources.map(s => supportRelativeURL(sourceMap.url, s));
|
||||||
|
|
||||||
|
// Load all sources stored inline with the source map into the file cache
|
||||||
|
// to pretend like they are already loaded. They may not exist on disk.
|
||||||
|
if (sourceMap.map.sourcesContent) {
|
||||||
|
sourceMap.map.resolvedSources.forEach(function(resolvedSource, i) {
|
||||||
|
var contents = sourceMap.map.sourcesContent[i];
|
||||||
|
if (contents) {
|
||||||
|
setFileContentsCache(resolvedSource, contents);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sourceMap = setSourceMapCache(position.source, {
|
||||||
|
url: null,
|
||||||
|
map: null
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve the source URL relative to the URL of the source map
|
||||||
|
if (sourceMap && sourceMap.map) {
|
||||||
|
var originalPosition = originalPositionFor(sourceMap.map, position);
|
||||||
|
|
||||||
|
// Only return the original position if a matching line was found. If no
|
||||||
|
// matching line is found then we return position instead, which will cause
|
||||||
|
// the stack trace to print the path and line for the compiled file. It is
|
||||||
|
// better to give a precise location in the compiled file than a vague
|
||||||
|
// location in the original file.
|
||||||
|
if (originalPosition.source !== null) {
|
||||||
|
// originalPosition.source has *already* been resolved against sourceMap.url
|
||||||
|
// so is *already* as absolute as possible.
|
||||||
|
// However, we want to ensure we output in same format as input: URL or native path
|
||||||
|
originalPosition.source = matchStyleOfPathOrUrl(
|
||||||
|
position.source, originalPosition.source);
|
||||||
|
return originalPosition;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return position;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parses code generated by FormatEvalOrigin(), a function inside V8:
|
||||||
|
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||||
|
function mapEvalOrigin(origin) {
|
||||||
|
// Most eval() calls are in this format
|
||||||
|
var match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin);
|
||||||
|
if (match) {
|
||||||
|
var position = mapSourcePosition({
|
||||||
|
source: match[2],
|
||||||
|
line: +match[3],
|
||||||
|
column: match[4] - 1
|
||||||
|
});
|
||||||
|
return 'eval at ' + match[1] + ' (' + position.source + ':' +
|
||||||
|
position.line + ':' + (position.column + 1) + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse nested eval() calls using recursion
|
||||||
|
match = /^eval at ([^(]+) \((.+)\)$/.exec(origin);
|
||||||
|
if (match) {
|
||||||
|
return 'eval at ' + match[1] + ' (' + mapEvalOrigin(match[2]) + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we still return useful information if we didn't find anything
|
||||||
|
return origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is copied almost verbatim from the V8 source code at
|
||||||
|
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||||
|
// Update 2022-04-29:
|
||||||
|
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/builtins/builtins-callsite.cc#L175-L179
|
||||||
|
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/objects/call-site-info.cc#L795-L804
|
||||||
|
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/objects/call-site-info.cc#L717-L750
|
||||||
|
// The implementation of wrapCallSite() used to just forward to the actual source
|
||||||
|
// code of CallSite.prototype.toString but unfortunately a new release of V8
|
||||||
|
// did something to the prototype chain and broke the shim. The only fix I
|
||||||
|
// could find was copy/paste.
|
||||||
|
function CallSiteToString() {
|
||||||
|
var fileName;
|
||||||
|
var fileLocation = "";
|
||||||
|
if (this.isNative()) {
|
||||||
|
fileLocation = "native";
|
||||||
|
} else {
|
||||||
|
fileName = this.getScriptNameOrSourceURL();
|
||||||
|
if (!fileName && this.isEval()) {
|
||||||
|
fileLocation = this.getEvalOrigin();
|
||||||
|
fileLocation += ", "; // Expecting source position to follow.
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileName) {
|
||||||
|
fileLocation += fileName;
|
||||||
|
} else {
|
||||||
|
// Source code does not originate from a file and is not native, but we
|
||||||
|
// can still get the source position inside the source string, e.g. in
|
||||||
|
// an eval string.
|
||||||
|
fileLocation += "<anonymous>";
|
||||||
|
}
|
||||||
|
var lineNumber = this.getLineNumber();
|
||||||
|
if (lineNumber != null) {
|
||||||
|
fileLocation += ":" + lineNumber;
|
||||||
|
var columnNumber = this.getColumnNumber();
|
||||||
|
if (columnNumber) {
|
||||||
|
fileLocation += ":" + columnNumber;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var line = "";
|
||||||
|
var isAsync = this.isAsync ? this.isAsync() : false;
|
||||||
|
if(isAsync) {
|
||||||
|
line += 'async ';
|
||||||
|
var isPromiseAll = this.isPromiseAll ? this.isPromiseAll() : false;
|
||||||
|
var isPromiseAny = this.isPromiseAny ? this.isPromiseAny() : false;
|
||||||
|
if(isPromiseAny || isPromiseAll) {
|
||||||
|
line += isPromiseAll ? 'Promise.all (index ' : 'Promise.any (index ';
|
||||||
|
var promiseIndex = this.getPromiseIndex();
|
||||||
|
line += promiseIndex + ')';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var functionName = this.getFunctionName();
|
||||||
|
var addSuffix = true;
|
||||||
|
var isConstructor = this.isConstructor();
|
||||||
|
var isMethodCall = !(this.isToplevel() || isConstructor);
|
||||||
|
if (isMethodCall) {
|
||||||
|
var typeName = this.getTypeName();
|
||||||
|
// Fixes shim to be backward compatable with Node v0 to v4
|
||||||
|
if (typeName === "[object Object]") {
|
||||||
|
typeName = "null";
|
||||||
|
}
|
||||||
|
var methodName = this.getMethodName();
|
||||||
|
if (functionName) {
|
||||||
|
if (typeName && functionName.indexOf(typeName) != 0) {
|
||||||
|
line += typeName + ".";
|
||||||
|
}
|
||||||
|
line += functionName;
|
||||||
|
if (methodName && functionName.indexOf("." + methodName) != functionName.length - methodName.length - 1) {
|
||||||
|
line += " [as " + methodName + "]";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
line += typeName + "." + (methodName || "<anonymous>");
|
||||||
|
}
|
||||||
|
} else if (isConstructor) {
|
||||||
|
line += "new " + (functionName || "<anonymous>");
|
||||||
|
} else if (functionName) {
|
||||||
|
line += functionName;
|
||||||
|
} else {
|
||||||
|
line += fileLocation;
|
||||||
|
addSuffix = false;
|
||||||
|
}
|
||||||
|
if (addSuffix) {
|
||||||
|
line += " (" + fileLocation + ")";
|
||||||
|
}
|
||||||
|
return line;
|
||||||
|
}
|
||||||
|
|
||||||
|
function cloneCallSite(frame) {
|
||||||
|
var object = {};
|
||||||
|
Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach(function(name) {
|
||||||
|
object[name] = /^(?:is|get)/.test(name) ? function() { return frame[name].call(frame); } : frame[name];
|
||||||
|
});
|
||||||
|
object.toString = CallSiteToString;
|
||||||
|
return object;
|
||||||
|
}
|
||||||
|
|
||||||
|
function wrapCallSite(frame, state) {
|
||||||
|
// provides interface backward compatibility
|
||||||
|
if (state === undefined) {
|
||||||
|
state = { nextPosition: null, curPosition: null }
|
||||||
|
}
|
||||||
|
if(frame.isNative()) {
|
||||||
|
state.curPosition = null;
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Most call sites will return the source file from getFileName(), but code
|
||||||
|
// passed to eval() ending in "//# sourceURL=..." will return the source file
|
||||||
|
// from getScriptNameOrSourceURL() instead
|
||||||
|
var source = frame.getFileName() || frame.getScriptNameOrSourceURL();
|
||||||
|
if (source) {
|
||||||
|
// v8 does not expose its internal isWasm, etc methods, so we do this instead.
|
||||||
|
if(source.startsWith('wasm://')) {
|
||||||
|
state.curPosition = null;
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
|
||||||
|
var line = frame.getLineNumber();
|
||||||
|
var column = frame.getColumnNumber() - 1;
|
||||||
|
|
||||||
|
// Fix position in Node where some (internal) code is prepended.
|
||||||
|
// See https://github.com/evanw/node-source-map-support/issues/36
|
||||||
|
// Header removed in node at ^10.16 || >=11.11.0
|
||||||
|
// v11 is not an LTS candidate, we can just test the one version with it.
|
||||||
|
// Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11
|
||||||
|
var noHeader = /^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;
|
||||||
|
var headerLength = noHeader.test(process.version) ? 0 : 62;
|
||||||
|
if (line === 1 && column > headerLength && !isInBrowser() && !frame.isEval()) {
|
||||||
|
column -= headerLength;
|
||||||
|
}
|
||||||
|
|
||||||
|
var position = mapSourcePosition({
|
||||||
|
source: source,
|
||||||
|
line: line,
|
||||||
|
column: column
|
||||||
|
});
|
||||||
|
state.curPosition = position;
|
||||||
|
frame = cloneCallSite(frame);
|
||||||
|
var originalFunctionName = frame.getFunctionName;
|
||||||
|
frame.getFunctionName = function() {
|
||||||
|
if (state.nextPosition == null) {
|
||||||
|
return originalFunctionName();
|
||||||
|
}
|
||||||
|
return state.nextPosition.name || originalFunctionName();
|
||||||
|
};
|
||||||
|
frame.getFileName = function() { return position.source; };
|
||||||
|
frame.getLineNumber = function() { return position.line; };
|
||||||
|
frame.getColumnNumber = function() { return position.column + 1; };
|
||||||
|
frame.getScriptNameOrSourceURL = function() { return position.source; };
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Code called using eval() needs special handling
|
||||||
|
var origin = frame.isEval() && frame.getEvalOrigin();
|
||||||
|
if (origin) {
|
||||||
|
origin = mapEvalOrigin(origin);
|
||||||
|
frame = cloneCallSite(frame);
|
||||||
|
frame.getEvalOrigin = function() { return origin; };
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we get here then we were unable to change the source position
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
|
||||||
|
var kIsNodeError = undefined;
|
||||||
|
try {
|
||||||
|
// Get a deliberate ERR_INVALID_ARG_TYPE
|
||||||
|
// TODO is there a better way to reliably get an instance of NodeError?
|
||||||
|
path.resolve(123);
|
||||||
|
} catch(e) {
|
||||||
|
const symbols = Object.getOwnPropertySymbols(e);
|
||||||
|
const symbol = symbols.find(function (s) {return s.toString().indexOf('kIsNodeError') >= 0});
|
||||||
|
if(symbol) kIsNodeError = symbol;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ErrorPrototypeToString = (err) =>Error.prototype.toString.call(err);
|
||||||
|
|
||||||
|
/** @param {HookState} hookState */
|
||||||
|
function createPrepareStackTrace(hookState) {
|
||||||
|
return prepareStackTrace;
|
||||||
|
|
||||||
|
// This function is part of the V8 stack trace API, for more info see:
|
||||||
|
// https://v8.dev/docs/stack-trace-api
|
||||||
|
function prepareStackTrace(error, stack) {
|
||||||
|
if(!hookState.enabled) return hookState.originalValue.apply(this, arguments);
|
||||||
|
|
||||||
|
if (sharedData.emptyCacheBetweenOperations) {
|
||||||
|
clearCaches();
|
||||||
|
}
|
||||||
|
|
||||||
|
// node gives its own errors special treatment. Mimic that behavior
|
||||||
|
// https://github.com/nodejs/node/blob/3cbaabc4622df1b4009b9d026a1a970bdbae6e89/lib/internal/errors.js#L118-L128
|
||||||
|
// https://github.com/nodejs/node/pull/39182
|
||||||
|
var errorString;
|
||||||
|
if (kIsNodeError) {
|
||||||
|
if(kIsNodeError in error) {
|
||||||
|
errorString = `${error.name} [${error.code}]: ${error.message}`;
|
||||||
|
} else {
|
||||||
|
errorString = ErrorPrototypeToString(error);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var name = error.name || 'Error';
|
||||||
|
var message = error.message || '';
|
||||||
|
errorString = message ? name + ": " + message : name;
|
||||||
|
}
|
||||||
|
|
||||||
|
var state = { nextPosition: null, curPosition: null };
|
||||||
|
var processedStack = [];
|
||||||
|
for (var i = stack.length - 1; i >= 0; i--) {
|
||||||
|
processedStack.push('\n at ' + wrapCallSite(stack[i], state));
|
||||||
|
state.nextPosition = state.curPosition;
|
||||||
|
}
|
||||||
|
state.curPosition = state.nextPosition = null;
|
||||||
|
return errorString + processedStack.reverse().join('');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate position and snippet of original source with pointer
|
||||||
|
function getErrorSource(error) {
|
||||||
|
var match = /\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(error.stack);
|
||||||
|
if (match) {
|
||||||
|
var source = match[1];
|
||||||
|
var line = +match[2];
|
||||||
|
var column = +match[3];
|
||||||
|
|
||||||
|
// Support the inline sourceContents inside the source map
|
||||||
|
var contents = getFileContentsCache(source);
|
||||||
|
|
||||||
|
const sourceAsPath = tryFileURLToPath(source);
|
||||||
|
|
||||||
|
// Support files on disk
|
||||||
|
if (!contents && fs && fs.existsSync(sourceAsPath)) {
|
||||||
|
try {
|
||||||
|
contents = fs.readFileSync(sourceAsPath, 'utf8');
|
||||||
|
} catch (er) {
|
||||||
|
contents = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format the line from the original source code like node does
|
||||||
|
if (contents) {
|
||||||
|
var code = contents.split(/(?:\r\n|\r|\n)/)[line - 1];
|
||||||
|
if (code) {
|
||||||
|
return source + ':' + line + '\n' + code + '\n' +
|
||||||
|
new Array(column).join(' ') + '^';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function printFatalErrorUponExit (error) {
|
||||||
|
var source = getErrorSource(error);
|
||||||
|
|
||||||
|
// Ensure error is printed synchronously and not truncated
|
||||||
|
if (process.stderr._handle && process.stderr._handle.setBlocking) {
|
||||||
|
process.stderr._handle.setBlocking(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source) {
|
||||||
|
console.error(source);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Matches node's behavior for colorized output
|
||||||
|
console.error(
|
||||||
|
util.inspect(error, {
|
||||||
|
customInspect: false,
|
||||||
|
colors: process.stderr.isTTY
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function shimEmitUncaughtException () {
|
||||||
|
const originalValue = process.emit;
|
||||||
|
var hook = sharedData.processEmitHook = {
|
||||||
|
enabled: true,
|
||||||
|
originalValue,
|
||||||
|
installedValue: undefined
|
||||||
|
};
|
||||||
|
var isTerminatingDueToFatalException = false;
|
||||||
|
var fatalException;
|
||||||
|
|
||||||
|
process.emit = sharedData.processEmitHook.installedValue = function (type) {
|
||||||
|
const hadListeners = originalValue.apply(this, arguments);
|
||||||
|
if(hook.enabled) {
|
||||||
|
if (type === 'uncaughtException' && !hadListeners) {
|
||||||
|
isTerminatingDueToFatalException = true;
|
||||||
|
fatalException = arguments[1];
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
if (type === 'exit' && isTerminatingDueToFatalException) {
|
||||||
|
printFatalErrorUponExit(fatalException);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return hadListeners;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var originalRetrieveFileHandlers = sharedData.retrieveFileHandlers.slice(0);
|
||||||
|
var originalRetrieveMapHandlers = sharedData.retrieveMapHandlers.slice(0);
|
||||||
|
|
||||||
|
exports.wrapCallSite = wrapCallSite;
|
||||||
|
exports.getErrorSource = getErrorSource;
|
||||||
|
exports.mapSourcePosition = mapSourcePosition;
|
||||||
|
exports.retrieveSourceMap = retrieveSourceMap;
|
||||||
|
|
||||||
|
exports.install = function(options) {
|
||||||
|
options = options || {};
|
||||||
|
|
||||||
|
if (options.environment) {
|
||||||
|
environment = options.environment;
|
||||||
|
if (["node", "browser", "auto"].indexOf(environment) === -1) {
|
||||||
|
throw new Error("environment " + environment + " was unknown. Available options are {auto, browser, node}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use dynamicRequire to avoid including in browser bundles
|
||||||
|
var Module = dynamicRequire(module, 'module');
|
||||||
|
|
||||||
|
// Redirect subsequent imports of "source-map-support"
|
||||||
|
// to this package
|
||||||
|
const {redirectConflictingLibrary = true, onConflictingLibraryRedirect} = options;
|
||||||
|
if(redirectConflictingLibrary) {
|
||||||
|
if (!sharedData.moduleResolveFilenameHook) {
|
||||||
|
const originalValue = Module._resolveFilename;
|
||||||
|
const moduleResolveFilenameHook = sharedData.moduleResolveFilenameHook = {
|
||||||
|
enabled: true,
|
||||||
|
originalValue,
|
||||||
|
installedValue: undefined,
|
||||||
|
}
|
||||||
|
Module._resolveFilename = sharedData.moduleResolveFilenameHook.installedValue = function (request, parent, isMain, options) {
|
||||||
|
if (moduleResolveFilenameHook.enabled) {
|
||||||
|
// Match all source-map-support entrypoints: source-map-support, source-map-support/register
|
||||||
|
let requestRedirect;
|
||||||
|
if (request === 'source-map-support') {
|
||||||
|
requestRedirect = './';
|
||||||
|
} else if (request === 'source-map-support/register') {
|
||||||
|
requestRedirect = './register';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (requestRedirect !== undefined) {
|
||||||
|
const newRequest = require.resolve(requestRedirect);
|
||||||
|
for (const cb of sharedData.onConflictingLibraryRedirectArr) {
|
||||||
|
cb(request, parent, isMain, options, newRequest);
|
||||||
|
}
|
||||||
|
request = newRequest;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return originalValue.call(this, request, parent, isMain, options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (onConflictingLibraryRedirect) {
|
||||||
|
sharedData.onConflictingLibraryRedirectArr.push(onConflictingLibraryRedirect);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow sources to be found by methods other than reading the files
|
||||||
|
// directly from disk.
|
||||||
|
if (options.retrieveFile) {
|
||||||
|
if (options.overrideRetrieveFile) {
|
||||||
|
sharedData.retrieveFileHandlers.length = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
sharedData.retrieveFileHandlers.unshift(options.retrieveFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow source maps to be found by methods other than reading the files
|
||||||
|
// directly from disk.
|
||||||
|
if (options.retrieveSourceMap) {
|
||||||
|
if (options.overrideRetrieveSourceMap) {
|
||||||
|
sharedData.retrieveMapHandlers.length = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
sharedData.retrieveMapHandlers.unshift(options.retrieveSourceMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support runtime transpilers that include inline source maps
|
||||||
|
if (options.hookRequire && !isInBrowser()) {
|
||||||
|
var $compile = Module.prototype._compile;
|
||||||
|
|
||||||
|
if (!$compile.__sourceMapSupport) {
|
||||||
|
Module.prototype._compile = function(content, filename) {
|
||||||
|
setFileContentsCache(filename, content);
|
||||||
|
setSourceMapCache(filename, undefined);
|
||||||
|
return $compile.call(this, content, filename);
|
||||||
|
};
|
||||||
|
|
||||||
|
Module.prototype._compile.__sourceMapSupport = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure options
|
||||||
|
if (!sharedData.emptyCacheBetweenOperations) {
|
||||||
|
sharedData.emptyCacheBetweenOperations = 'emptyCacheBetweenOperations' in options ?
|
||||||
|
options.emptyCacheBetweenOperations : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Install the error reformatter
|
||||||
|
if (!sharedData.errorPrepareStackTraceHook) {
|
||||||
|
const originalValue = Error.prepareStackTrace;
|
||||||
|
sharedData.errorPrepareStackTraceHook = {
|
||||||
|
enabled: true,
|
||||||
|
originalValue,
|
||||||
|
installedValue: undefined
|
||||||
|
};
|
||||||
|
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.installedValue = createPrepareStackTrace(sharedData.errorPrepareStackTraceHook);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sharedData.processEmitHook) {
|
||||||
|
var installHandler = 'handleUncaughtExceptions' in options ?
|
||||||
|
options.handleUncaughtExceptions : true;
|
||||||
|
|
||||||
|
// Do not override 'uncaughtException' with our own handler in Node.js
|
||||||
|
// Worker threads. Workers pass the error to the main thread as an event,
|
||||||
|
// rather than printing something to stderr and exiting.
|
||||||
|
try {
|
||||||
|
// We need to use `dynamicRequire` because `require` on it's own will be optimized by WebPack/Browserify.
|
||||||
|
var worker_threads = dynamicRequire(module, 'worker_threads');
|
||||||
|
if (worker_threads.isMainThread === false) {
|
||||||
|
installHandler = false;
|
||||||
|
}
|
||||||
|
} catch(e) {}
|
||||||
|
|
||||||
|
// Provide the option to not install the uncaught exception handler. This is
|
||||||
|
// to support other uncaught exception handlers (in test frameworks, for
|
||||||
|
// example). If this handler is not installed and there are no other uncaught
|
||||||
|
// exception handlers, uncaught exceptions will be caught by node's built-in
|
||||||
|
// exception handler and the process will still be terminated. However, the
|
||||||
|
// generated JavaScript code will be shown above the stack trace instead of
|
||||||
|
// the original source code.
|
||||||
|
if (installHandler && hasGlobalProcessEventEmitter()) {
|
||||||
|
shimEmitUncaughtException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.uninstall = function() {
|
||||||
|
if(sharedData.processEmitHook) {
|
||||||
|
// Disable behavior
|
||||||
|
sharedData.processEmitHook.enabled = false;
|
||||||
|
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
||||||
|
if(process.emit === sharedData.processEmitHook.installedValue) {
|
||||||
|
process.emit = sharedData.processEmitHook.originalValue;
|
||||||
|
}
|
||||||
|
sharedData.processEmitHook = undefined;
|
||||||
|
}
|
||||||
|
if(sharedData.errorPrepareStackTraceHook) {
|
||||||
|
// Disable behavior
|
||||||
|
sharedData.errorPrepareStackTraceHook.enabled = false;
|
||||||
|
// If possible or necessary, remove our hook function.
|
||||||
|
// In vanilla environments, prepareStackTrace is `undefined`.
|
||||||
|
// We cannot delegate to `undefined` the way we can to a function w/`.apply()`; our only option is to remove the function.
|
||||||
|
// If we are the *first* hook installed, and another was installed on top of us, we have no choice but to remove both.
|
||||||
|
if(Error.prepareStackTrace === sharedData.errorPrepareStackTraceHook.installedValue || typeof sharedData.errorPrepareStackTraceHook.originalValue !== 'function') {
|
||||||
|
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.originalValue;
|
||||||
|
}
|
||||||
|
sharedData.errorPrepareStackTraceHook = undefined;
|
||||||
|
}
|
||||||
|
if (sharedData.moduleResolveFilenameHook) {
|
||||||
|
// Disable behavior
|
||||||
|
sharedData.moduleResolveFilenameHook.enabled = false;
|
||||||
|
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
||||||
|
var Module = dynamicRequire(module, 'module');
|
||||||
|
if(Module._resolveFilename === sharedData.moduleResolveFilenameHook.installedValue) {
|
||||||
|
Module._resolveFilename = sharedData.moduleResolveFilenameHook.originalValue;
|
||||||
|
}
|
||||||
|
sharedData.moduleResolveFilenameHook = undefined;
|
||||||
|
}
|
||||||
|
sharedData.onConflictingLibraryRedirectArr.length = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.resetRetrieveHandlers = function() {
|
||||||
|
sharedData.retrieveFileHandlers.length = 0;
|
||||||
|
sharedData.retrieveMapHandlers.length = 0;
|
||||||
|
}
|
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# @jridgewell/resolve-uri
|
||||||
|
|
||||||
|
> Resolve a URI relative to an optional base URI
|
||||||
|
|
||||||
|
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/resolve-uri
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
function resolve(input: string, base?: string): string;
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
import resolve from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
||||||
|
```
|
||||||
|
|
||||||
|
| Input | Base | Resolution | Explanation |
|
||||||
|
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
||||||
|
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
||||||
|
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
||||||
|
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
||||||
|
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
||||||
|
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
||||||
|
| `/example` | _rest_ | `/example` | Input is normalized only |
|
||||||
|
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
||||||
|
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
||||||
|
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
||||||
|
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
@ -0,0 +1,232 @@
|
|||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
* 6. Query, including "?", optional.
|
||||||
|
* 7. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may include "/", guaranteed.
|
||||||
|
* 3. Query, including "?", optional.
|
||||||
|
* 4. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function isRelative(input) {
|
||||||
|
return /^[.?#]/.test(input);
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
hash,
|
||||||
|
type: 7 /* Absolute */,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.type = 6 /* SchemeRelative */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = 5 /* AbsolutePath */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = input
|
||||||
|
? input.startsWith('?')
|
||||||
|
? 3 /* Query */
|
||||||
|
: input.startsWith('#')
|
||||||
|
? 2 /* Hash */
|
||||||
|
: 4 /* RelativePath */
|
||||||
|
: 1 /* Empty */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
normalizePath(base, base.type);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url, type) {
|
||||||
|
const rel = type <= 4 /* RelativePath */;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (rel) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
let inputType = url.type;
|
||||||
|
if (base && inputType !== 7 /* Absolute */) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
const baseType = baseUrl.type;
|
||||||
|
switch (inputType) {
|
||||||
|
case 1 /* Empty */:
|
||||||
|
url.hash = baseUrl.hash;
|
||||||
|
// fall through
|
||||||
|
case 2 /* Hash */:
|
||||||
|
url.query = baseUrl.query;
|
||||||
|
// fall through
|
||||||
|
case 3 /* Query */:
|
||||||
|
case 4 /* RelativePath */:
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
// fall through
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
// fall through
|
||||||
|
case 6 /* SchemeRelative */:
|
||||||
|
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
}
|
||||||
|
if (baseType > inputType)
|
||||||
|
inputType = baseType;
|
||||||
|
}
|
||||||
|
normalizePath(url, inputType);
|
||||||
|
const queryHash = url.query + url.hash;
|
||||||
|
switch (inputType) {
|
||||||
|
// This is impossible, because of the empty checks at the start of the function.
|
||||||
|
// case UrlType.Empty:
|
||||||
|
case 2 /* Hash */:
|
||||||
|
case 3 /* Query */:
|
||||||
|
return queryHash;
|
||||||
|
case 4 /* RelativePath */: {
|
||||||
|
// The first char is always a "/", and we need it to be relative.
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return queryHash || '.';
|
||||||
|
if (isRelative(base || input) && !isRelative(path)) {
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||||
|
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||||
|
// relative starts with a "..", though, so check before prepending.
|
||||||
|
return './' + path + queryHash;
|
||||||
|
}
|
||||||
|
return path + queryHash;
|
||||||
|
}
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
return url.path + queryHash;
|
||||||
|
default:
|
||||||
|
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { resolve as default };
|
||||||
|
//# sourceMappingURL=resolve-uri.mjs.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||||
|
typeof define === 'function' && define.amd ? define(factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
||||||
|
})(this, (function () { 'use strict';
|
||||||
|
|
||||||
|
// Matches the scheme of a URL, eg "http://"
|
||||||
|
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||||
|
/**
|
||||||
|
* Matches the parts of a URL:
|
||||||
|
* 1. Scheme, including ":", guaranteed.
|
||||||
|
* 2. User/password, including "@", optional.
|
||||||
|
* 3. Host, guaranteed.
|
||||||
|
* 4. Port, including ":", optional.
|
||||||
|
* 5. Path, including "/", optional.
|
||||||
|
* 6. Query, including "?", optional.
|
||||||
|
* 7. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||||
|
/**
|
||||||
|
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||||
|
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||||
|
*
|
||||||
|
* 1. Host, optional.
|
||||||
|
* 2. Path, which may include "/", guaranteed.
|
||||||
|
* 3. Query, including "?", optional.
|
||||||
|
* 4. Hash, including "#", optional.
|
||||||
|
*/
|
||||||
|
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||||
|
function isAbsoluteUrl(input) {
|
||||||
|
return schemeRegex.test(input);
|
||||||
|
}
|
||||||
|
function isSchemeRelativeUrl(input) {
|
||||||
|
return input.startsWith('//');
|
||||||
|
}
|
||||||
|
function isAbsolutePath(input) {
|
||||||
|
return input.startsWith('/');
|
||||||
|
}
|
||||||
|
function isFileUrl(input) {
|
||||||
|
return input.startsWith('file:');
|
||||||
|
}
|
||||||
|
function isRelative(input) {
|
||||||
|
return /^[.?#]/.test(input);
|
||||||
|
}
|
||||||
|
function parseAbsoluteUrl(input) {
|
||||||
|
const match = urlRegex.exec(input);
|
||||||
|
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||||
|
}
|
||||||
|
function parseFileUrl(input) {
|
||||||
|
const match = fileRegex.exec(input);
|
||||||
|
const path = match[2];
|
||||||
|
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||||
|
}
|
||||||
|
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||||
|
return {
|
||||||
|
scheme,
|
||||||
|
user,
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
path,
|
||||||
|
query,
|
||||||
|
hash,
|
||||||
|
type: 7 /* Absolute */,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function parseUrl(input) {
|
||||||
|
if (isSchemeRelativeUrl(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http:' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.type = 6 /* SchemeRelative */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isAbsolutePath(input)) {
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = 5 /* AbsolutePath */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
if (isFileUrl(input))
|
||||||
|
return parseFileUrl(input);
|
||||||
|
if (isAbsoluteUrl(input))
|
||||||
|
return parseAbsoluteUrl(input);
|
||||||
|
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||||
|
url.scheme = '';
|
||||||
|
url.host = '';
|
||||||
|
url.type = input
|
||||||
|
? input.startsWith('?')
|
||||||
|
? 3 /* Query */
|
||||||
|
: input.startsWith('#')
|
||||||
|
? 2 /* Hash */
|
||||||
|
: 4 /* RelativePath */
|
||||||
|
: 1 /* Empty */;
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
function stripPathFilename(path) {
|
||||||
|
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||||
|
// paths. It's not a file, so we can't strip it.
|
||||||
|
if (path.endsWith('/..'))
|
||||||
|
return path;
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
function mergePaths(url, base) {
|
||||||
|
normalizePath(base, base.type);
|
||||||
|
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||||
|
// path).
|
||||||
|
if (url.path === '/') {
|
||||||
|
url.path = base.path;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Resolution happens relative to the base path's directory, not the file.
|
||||||
|
url.path = stripPathFilename(base.path) + url.path;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||||
|
* "foo/.". We need to normalize to a standard representation.
|
||||||
|
*/
|
||||||
|
function normalizePath(url, type) {
|
||||||
|
const rel = type <= 4 /* RelativePath */;
|
||||||
|
const pieces = url.path.split('/');
|
||||||
|
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||||
|
// pieces[0] is an empty string.
|
||||||
|
let pointer = 1;
|
||||||
|
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||||
|
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||||
|
let positive = 0;
|
||||||
|
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||||
|
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||||
|
// real directory, we won't need to append, unless the other conditions happen again.
|
||||||
|
let addTrailingSlash = false;
|
||||||
|
for (let i = 1; i < pieces.length; i++) {
|
||||||
|
const piece = pieces[i];
|
||||||
|
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||||
|
if (!piece) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// If we encounter a real directory, then we don't need to append anymore.
|
||||||
|
addTrailingSlash = false;
|
||||||
|
// A current directory, which we can always drop.
|
||||||
|
if (piece === '.')
|
||||||
|
continue;
|
||||||
|
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||||
|
// have an excess of parents, and we'll need to keep the "..".
|
||||||
|
if (piece === '..') {
|
||||||
|
if (positive) {
|
||||||
|
addTrailingSlash = true;
|
||||||
|
positive--;
|
||||||
|
pointer--;
|
||||||
|
}
|
||||||
|
else if (rel) {
|
||||||
|
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||||
|
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||||
|
// any popped or dropped directories.
|
||||||
|
pieces[pointer++] = piece;
|
||||||
|
positive++;
|
||||||
|
}
|
||||||
|
let path = '';
|
||||||
|
for (let i = 1; i < pointer; i++) {
|
||||||
|
path += '/' + pieces[i];
|
||||||
|
}
|
||||||
|
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||||
|
path += '/';
|
||||||
|
}
|
||||||
|
url.path = path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
function resolve(input, base) {
|
||||||
|
if (!input && !base)
|
||||||
|
return '';
|
||||||
|
const url = parseUrl(input);
|
||||||
|
let inputType = url.type;
|
||||||
|
if (base && inputType !== 7 /* Absolute */) {
|
||||||
|
const baseUrl = parseUrl(base);
|
||||||
|
const baseType = baseUrl.type;
|
||||||
|
switch (inputType) {
|
||||||
|
case 1 /* Empty */:
|
||||||
|
url.hash = baseUrl.hash;
|
||||||
|
// fall through
|
||||||
|
case 2 /* Hash */:
|
||||||
|
url.query = baseUrl.query;
|
||||||
|
// fall through
|
||||||
|
case 3 /* Query */:
|
||||||
|
case 4 /* RelativePath */:
|
||||||
|
mergePaths(url, baseUrl);
|
||||||
|
// fall through
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
// The host, user, and port are joined, you can't copy one without the others.
|
||||||
|
url.user = baseUrl.user;
|
||||||
|
url.host = baseUrl.host;
|
||||||
|
url.port = baseUrl.port;
|
||||||
|
// fall through
|
||||||
|
case 6 /* SchemeRelative */:
|
||||||
|
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||||
|
url.scheme = baseUrl.scheme;
|
||||||
|
}
|
||||||
|
if (baseType > inputType)
|
||||||
|
inputType = baseType;
|
||||||
|
}
|
||||||
|
normalizePath(url, inputType);
|
||||||
|
const queryHash = url.query + url.hash;
|
||||||
|
switch (inputType) {
|
||||||
|
// This is impossible, because of the empty checks at the start of the function.
|
||||||
|
// case UrlType.Empty:
|
||||||
|
case 2 /* Hash */:
|
||||||
|
case 3 /* Query */:
|
||||||
|
return queryHash;
|
||||||
|
case 4 /* RelativePath */: {
|
||||||
|
// The first char is always a "/", and we need it to be relative.
|
||||||
|
const path = url.path.slice(1);
|
||||||
|
if (!path)
|
||||||
|
return queryHash || '.';
|
||||||
|
if (isRelative(base || input) && !isRelative(path)) {
|
||||||
|
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||||
|
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||||
|
// relative starts with a "..", though, so check before prepending.
|
||||||
|
return './' + path + queryHash;
|
||||||
|
}
|
||||||
|
return path + queryHash;
|
||||||
|
}
|
||||||
|
case 5 /* AbsolutePath */:
|
||||||
|
return url.path + queryHash;
|
||||||
|
default:
|
||||||
|
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=resolve-uri.umd.js.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/**
|
||||||
|
* Attempts to resolve `input` URL/path relative to `base`.
|
||||||
|
*/
|
||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/resolve-uri",
|
||||||
|
"version": "3.1.2",
|
||||||
|
"description": "Resolve a URI relative to an optional base URI",
|
||||||
|
"keywords": [
|
||||||
|
"resolve",
|
||||||
|
"uri",
|
||||||
|
"url",
|
||||||
|
"path"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": "https://github.com/jridgewell/resolve-uri",
|
||||||
|
"main": "dist/resolve-uri.umd.js",
|
||||||
|
"module": "dist/resolve-uri.mjs",
|
||||||
|
"types": "dist/types/resolve-uri.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/resolve-uri.d.ts",
|
||||||
|
"browser": "./dist/resolve-uri.umd.js",
|
||||||
|
"require": "./dist/resolve-uri.umd.js",
|
||||||
|
"import": "./dist/resolve-uri.mjs"
|
||||||
|
},
|
||||||
|
"./dist/resolve-uri.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.66.0",
|
||||||
|
"typescript": "4.5.5"
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2015 Rich Harris
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
200
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
200
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
# @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||||
|
|
||||||
|
|
||||||
|
## Why?
|
||||||
|
|
||||||
|
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||||
|
|
||||||
|
This package makes the process slightly easier.
|
||||||
|
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install @jridgewell/sourcemap-codec
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
|
||||||
|
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||||
|
|
||||||
|
assert.deepEqual( decoded, [
|
||||||
|
// the first line (of the generated code) has no mappings,
|
||||||
|
// as shown by the starting semi-colon (which separates lines)
|
||||||
|
[],
|
||||||
|
|
||||||
|
// the second line contains four (comma-separated) segments
|
||||||
|
[
|
||||||
|
// segments are encoded as you'd expect:
|
||||||
|
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||||
|
|
||||||
|
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||||
|
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||||
|
// name in the `map.names` array
|
||||||
|
[ 2, 0, 2, 2, 0 ],
|
||||||
|
|
||||||
|
// the remaining segments are 4-length rather than 5-length,
|
||||||
|
// because they don't map a name
|
||||||
|
[ 4, 0, 2, 4 ],
|
||||||
|
[ 6, 0, 2, 5 ],
|
||||||
|
[ 7, 0, 2, 7 ]
|
||||||
|
],
|
||||||
|
|
||||||
|
// the final line contains two segments
|
||||||
|
[
|
||||||
|
[ 2, 1, 10, 19 ],
|
||||||
|
[ 12, 1, 11, 20 ]
|
||||||
|
]
|
||||||
|
]);
|
||||||
|
|
||||||
|
var encoded = encode( decoded );
|
||||||
|
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map - 45120 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 5479160 bytes
|
||||||
|
sourcemap-codec 5659336 bytes
|
||||||
|
source-map-0.6.1 17144440 bytes
|
||||||
|
source-map-0.8.0 6867424 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled)
|
||||||
|
decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 1261620 bytes
|
||||||
|
sourcemap-codec 9119248 bytes
|
||||||
|
source-map-0.6.1 8968560 bytes
|
||||||
|
source-map-0.8.0 8952952 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled)
|
||||||
|
encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
babel.min.js.map - 347793 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 35338184 bytes
|
||||||
|
sourcemap-codec 35922736 bytes
|
||||||
|
source-map-0.6.1 62366360 bytes
|
||||||
|
source-map-0.8.0 44337416 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled)
|
||||||
|
decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled)
|
||||||
|
Fastest is decode: source-map-0.8.0
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 7212604 bytes
|
||||||
|
sourcemap-codec 21421456 bytes
|
||||||
|
source-map-0.6.1 25286888 bytes
|
||||||
|
source-map-0.8.0 25498744 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled)
|
||||||
|
encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
preact.js.map - 1992 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 500272 bytes
|
||||||
|
sourcemap-codec 516864 bytes
|
||||||
|
source-map-0.6.1 1596672 bytes
|
||||||
|
source-map-0.8.0 517272 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled)
|
||||||
|
decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 321026 bytes
|
||||||
|
sourcemap-codec 830832 bytes
|
||||||
|
source-map-0.6.1 586608 bytes
|
||||||
|
source-map-0.8.0 586680 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled)
|
||||||
|
encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
|
||||||
|
react.js.map - 5726 segments
|
||||||
|
|
||||||
|
Decode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 734848 bytes
|
||||||
|
sourcemap-codec 954200 bytes
|
||||||
|
source-map-0.6.1 2276432 bytes
|
||||||
|
source-map-0.8.0 955488 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Decode speed:
|
||||||
|
decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled)
|
||||||
|
decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled)
|
||||||
|
decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled)
|
||||||
|
decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
Fastest is decode: @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode Memory Usage:
|
||||||
|
@jridgewell/sourcemap-codec 638672 bytes
|
||||||
|
sourcemap-codec 1109840 bytes
|
||||||
|
source-map-0.6.1 1321224 bytes
|
||||||
|
source-map-0.8.0 1324448 bytes
|
||||||
|
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||||
|
|
||||||
|
Encode speed:
|
||||||
|
encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled)
|
||||||
|
encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled)
|
||||||
|
encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled)
|
||||||
|
encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled)
|
||||||
|
Fastest is encode: @jridgewell/sourcemap-codec
|
||||||
|
```
|
||||||
|
|
||||||
|
# License
|
||||||
|
|
||||||
|
MIT
|
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
intToChar[i] = c;
|
||||||
|
charToInt[c] = i;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? /* #__PURE__ */ new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let index = 0;
|
||||||
|
do {
|
||||||
|
const semi = indexOf(mappings, index);
|
||||||
|
const line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let i = index; i < semi; i++) {
|
||||||
|
let seg;
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||||
|
seg = [col, state[1], state[2], state[3], state[4]];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col, state[1], state[2], state[3]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col];
|
||||||
|
}
|
||||||
|
line.push(seg);
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
index = semi + 1;
|
||||||
|
} while (index <= mappings.length);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function indexOf(mappings, index) {
|
||||||
|
const idx = mappings.indexOf(';', index);
|
||||||
|
return idx === -1 ? mappings.length : idx;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInt[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreVlq(mappings, i, length) {
|
||||||
|
if (i >= length)
|
||||||
|
return false;
|
||||||
|
return mappings.charCodeAt(i) !== comma;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const bufLength = 1024 * 16;
|
||||||
|
const subLength = bufLength - 36;
|
||||||
|
const buf = new Uint8Array(bufLength);
|
||||||
|
const sub = buf.subarray(0, subLength);
|
||||||
|
let pos = 0;
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
if (pos === bufLength) {
|
||||||
|
out += td.decode(buf);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
if (pos > subLength) {
|
||||||
|
out += td.decode(sub);
|
||||||
|
buf.copyWithin(0, subLength, pos);
|
||||||
|
pos -= subLength;
|
||||||
|
}
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out + td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { decode, encode };
|
||||||
|
//# sourceMappingURL=sourcemap-codec.mjs.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
const comma = ','.charCodeAt(0);
|
||||||
|
const semicolon = ';'.charCodeAt(0);
|
||||||
|
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||||
|
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||||
|
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||||
|
for (let i = 0; i < chars.length; i++) {
|
||||||
|
const c = chars.charCodeAt(i);
|
||||||
|
intToChar[i] = c;
|
||||||
|
charToInt[c] = i;
|
||||||
|
}
|
||||||
|
// Provide a fallback for older environments.
|
||||||
|
const td = typeof TextDecoder !== 'undefined'
|
||||||
|
? /* #__PURE__ */ new TextDecoder()
|
||||||
|
: typeof Buffer !== 'undefined'
|
||||||
|
? {
|
||||||
|
decode(buf) {
|
||||||
|
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||||
|
return out.toString();
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
decode(buf) {
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
out += String.fromCharCode(buf[i]);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function decode(mappings) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const decoded = [];
|
||||||
|
let index = 0;
|
||||||
|
do {
|
||||||
|
const semi = indexOf(mappings, index);
|
||||||
|
const line = [];
|
||||||
|
let sorted = true;
|
||||||
|
let lastCol = 0;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let i = index; i < semi; i++) {
|
||||||
|
let seg;
|
||||||
|
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||||
|
const col = state[0];
|
||||||
|
if (col < lastCol)
|
||||||
|
sorted = false;
|
||||||
|
lastCol = col;
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||||
|
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||||
|
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||||
|
if (hasMoreVlq(mappings, i, semi)) {
|
||||||
|
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||||
|
seg = [col, state[1], state[2], state[3], state[4]];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col, state[1], state[2], state[3]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seg = [col];
|
||||||
|
}
|
||||||
|
line.push(seg);
|
||||||
|
}
|
||||||
|
if (!sorted)
|
||||||
|
sort(line);
|
||||||
|
decoded.push(line);
|
||||||
|
index = semi + 1;
|
||||||
|
} while (index <= mappings.length);
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
function indexOf(mappings, index) {
|
||||||
|
const idx = mappings.indexOf(';', index);
|
||||||
|
return idx === -1 ? mappings.length : idx;
|
||||||
|
}
|
||||||
|
function decodeInteger(mappings, pos, state, j) {
|
||||||
|
let value = 0;
|
||||||
|
let shift = 0;
|
||||||
|
let integer = 0;
|
||||||
|
do {
|
||||||
|
const c = mappings.charCodeAt(pos++);
|
||||||
|
integer = charToInt[c];
|
||||||
|
value |= (integer & 31) << shift;
|
||||||
|
shift += 5;
|
||||||
|
} while (integer & 32);
|
||||||
|
const shouldNegate = value & 1;
|
||||||
|
value >>>= 1;
|
||||||
|
if (shouldNegate) {
|
||||||
|
value = -0x80000000 | -value;
|
||||||
|
}
|
||||||
|
state[j] += value;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
function hasMoreVlq(mappings, i, length) {
|
||||||
|
if (i >= length)
|
||||||
|
return false;
|
||||||
|
return mappings.charCodeAt(i) !== comma;
|
||||||
|
}
|
||||||
|
function sort(line) {
|
||||||
|
line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[0] - b[0];
|
||||||
|
}
|
||||||
|
function encode(decoded) {
|
||||||
|
const state = new Int32Array(5);
|
||||||
|
const bufLength = 1024 * 16;
|
||||||
|
const subLength = bufLength - 36;
|
||||||
|
const buf = new Uint8Array(bufLength);
|
||||||
|
const sub = buf.subarray(0, subLength);
|
||||||
|
let pos = 0;
|
||||||
|
let out = '';
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
if (i > 0) {
|
||||||
|
if (pos === bufLength) {
|
||||||
|
out += td.decode(buf);
|
||||||
|
pos = 0;
|
||||||
|
}
|
||||||
|
buf[pos++] = semicolon;
|
||||||
|
}
|
||||||
|
if (line.length === 0)
|
||||||
|
continue;
|
||||||
|
state[0] = 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const segment = line[j];
|
||||||
|
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||||
|
// may push a comma.
|
||||||
|
if (pos > subLength) {
|
||||||
|
out += td.decode(sub);
|
||||||
|
buf.copyWithin(0, subLength, pos);
|
||||||
|
pos -= subLength;
|
||||||
|
}
|
||||||
|
if (j > 0)
|
||||||
|
buf[pos++] = comma;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||||
|
if (segment.length === 1)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||||
|
if (segment.length === 4)
|
||||||
|
continue;
|
||||||
|
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out + td.decode(buf.subarray(0, pos));
|
||||||
|
}
|
||||||
|
function encodeInteger(buf, pos, state, segment, j) {
|
||||||
|
const next = segment[j];
|
||||||
|
let num = next - state[j];
|
||||||
|
state[j] = next;
|
||||||
|
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||||
|
do {
|
||||||
|
let clamped = num & 0b011111;
|
||||||
|
num >>>= 5;
|
||||||
|
if (num > 0)
|
||||||
|
clamped |= 0b100000;
|
||||||
|
buf[pos++] = intToChar[clamped];
|
||||||
|
} while (num > 0);
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.decode = decode;
|
||||||
|
exports.encode = encode;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||||
|
export declare type SourceMapLine = SourceMapSegment[];
|
||||||
|
export declare type SourceMapMappings = SourceMapLine[];
|
||||||
|
export declare function decode(mappings: string): SourceMapMappings;
|
||||||
|
export declare function encode(decoded: SourceMapMappings): string;
|
||||||
|
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
74
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
74
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/sourcemap-codec",
|
||||||
|
"version": "1.4.15",
|
||||||
|
"description": "Encode/decode sourcemap mappings",
|
||||||
|
"keywords": [
|
||||||
|
"sourcemap",
|
||||||
|
"vlq"
|
||||||
|
],
|
||||||
|
"main": "dist/sourcemap-codec.umd.js",
|
||||||
|
"module": "dist/sourcemap-codec.mjs",
|
||||||
|
"types": "dist/types/sourcemap-codec.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||||
|
"browser": "./dist/sourcemap-codec.umd.js",
|
||||||
|
"require": "./dist/sourcemap-codec.umd.js",
|
||||||
|
"import": "./dist/sourcemap-codec.mjs"
|
||||||
|
},
|
||||||
|
"./dist/sourcemap-codec.umd.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"pretest": "run-s build:rollup",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "mocha --inspect-brk",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "mocha",
|
||||||
|
"test:coverage": "c8 mocha",
|
||||||
|
"test:watch": "mocha --watch"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||||
|
},
|
||||||
|
"author": "Rich Harris",
|
||||||
|
"license": "MIT",
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@types/node": "17.0.15",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.2",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"mocha": "9.2.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.64.0",
|
||||||
|
"source-map": "0.6.1",
|
||||||
|
"source-map-js": "1.0.2",
|
||||||
|
"sourcemap-codec": "1.4.8",
|
||||||
|
"typescript": "4.5.4"
|
||||||
|
}
|
||||||
|
}
|
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
193
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
193
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
# @jridgewell/trace-mapping
|
||||||
|
|
||||||
|
> Trace the original position through a source map
|
||||||
|
|
||||||
|
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||||
|
original location in the source file through a source map.
|
||||||
|
|
||||||
|
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||||
|
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @jridgewell/trace-mapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { TraceMap, originalPositionFor, generatedPositionFor } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
const tracer = new TraceMap({
|
||||||
|
version: 3,
|
||||||
|
sources: ['input.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'KAyCIA',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Lines start at line 1, columns at column 0.
|
||||||
|
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
name: 'foo',
|
||||||
|
});
|
||||||
|
|
||||||
|
const generated = generatedPositionFor(tracer, {
|
||||||
|
source: 'input.js',
|
||||||
|
line: 42,
|
||||||
|
column: 4,
|
||||||
|
});
|
||||||
|
assert.deepEqual(generated, {
|
||||||
|
line: 1,
|
||||||
|
column: 5,
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||||
|
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||||
|
|
||||||
|
// line is 0-base.
|
||||||
|
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||||
|
|
||||||
|
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||||
|
// Again, line is 0-base and so is sourceLine
|
||||||
|
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### SectionedSourceMaps
|
||||||
|
|
||||||
|
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||||
|
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||||
|
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||||
|
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||||
|
`TraceMap` instance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||||
|
const fooOutput = 'foo';
|
||||||
|
const barOutput = 'bar';
|
||||||
|
const output = [fooOutput, barOutput].join('\n');
|
||||||
|
|
||||||
|
const sectioned = new AnyMap({
|
||||||
|
version: 3,
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
// 0-base line and column
|
||||||
|
offset: { line: 0, column: 0 },
|
||||||
|
// fooOutput's sourcemap
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['foo.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// barOutput's sourcemap will not affect the first line, only the second
|
||||||
|
offset: { line: 1, column: 0 },
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: ['bar.js'],
|
||||||
|
names: ['bar'],
|
||||||
|
mappings: 'AAAAA',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const traced = originalPositionFor(sectioned, {
|
||||||
|
line: 2,
|
||||||
|
column: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(traced, {
|
||||||
|
source: 'bar.js',
|
||||||
|
line: 1,
|
||||||
|
column: 0,
|
||||||
|
name: 'bar',
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
node v18.0.0
|
||||||
|
|
||||||
|
amp.js.map
|
||||||
|
trace-mapping: decoded JSON input x 183 ops/sec ±0.41% (87 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 384 ops/sec ±0.89% (89 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 3,085 ops/sec ±0.24% (100 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 452 ops/sec ±0.80% (84 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 88.82 ops/sec ±0.45% (77 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 38.39 ops/sec ±1.88% (52 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
trace-mapping: decoded originalPositionFor x 4,025,347 ops/sec ±0.15% (97 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 3,333,136 ops/sec ±1.26% (90 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 824,978 ops/sec ±1.06% (94 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 741,300 ops/sec ±0.93% (92 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 2,587,603 ops/sec ±0.75% (97 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
babel.min.js.map
|
||||||
|
trace-mapping: decoded JSON input x 17.43 ops/sec ±8.81% (33 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 34.18 ops/sec ±4.67% (50 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 1,010 ops/sec ±0.41% (98 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 39.45 ops/sec ±4.01% (52 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 6.57 ops/sec ±3.04% (21 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 4.23 ops/sec ±2.93% (15 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,576,265 ops/sec ±0.74% (96 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 5,019,743 ops/sec ±0.74% (94 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 3,396,137 ops/sec ±42.32% (95 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 3,753,176 ops/sec ±0.72% (95 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 6,423,633 ops/sec ±0.74% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
preact.js.map
|
||||||
|
trace-mapping: decoded JSON input x 3,499 ops/sec ±0.18% (98 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 6,078 ops/sec ±0.25% (99 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 254,788 ops/sec ±0.13% (100 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 14,063 ops/sec ±0.27% (94 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 2,465 ops/sec ±0.25% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 1,174 ops/sec ±1.90% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
trace-mapping: decoded originalPositionFor x 7,720,171 ops/sec ±0.14% (97 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 6,864,485 ops/sec ±0.16% (101 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 2,387,219 ops/sec ±0.28% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 1,565,339 ops/sec ±0.32% (101 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 3,819,732 ops/sec ±0.38% (98 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
react.js.map
|
||||||
|
trace-mapping: decoded JSON input x 1,719 ops/sec ±0.19% (99 runs sampled)
|
||||||
|
trace-mapping: encoded JSON input x 4,284 ops/sec ±0.51% (99 runs sampled)
|
||||||
|
trace-mapping: decoded Object input x 94,668 ops/sec ±0.08% (99 runs sampled)
|
||||||
|
trace-mapping: encoded Object input x 5,287 ops/sec ±0.24% (99 runs sampled)
|
||||||
|
source-map-js: encoded Object input x 814 ops/sec ±0.20% (98 runs sampled)
|
||||||
|
source-map-0.6.1: encoded Object input x 429 ops/sec ±0.24% (94 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded Object input
|
||||||
|
|
||||||
|
trace-mapping: decoded originalPositionFor x 28,927,989 ops/sec ±0.61% (94 runs sampled)
|
||||||
|
trace-mapping: encoded originalPositionFor x 27,394,475 ops/sec ±0.55% (97 runs sampled)
|
||||||
|
source-map-js: encoded originalPositionFor x 16,856,730 ops/sec ±0.45% (96 runs sampled)
|
||||||
|
source-map-0.6.1: encoded originalPositionFor x 12,258,950 ops/sec ±0.41% (97 runs sampled)
|
||||||
|
source-map-0.8.0: encoded originalPositionFor x 22,272,990 ops/sec ±0.58% (95 runs sampled)
|
||||||
|
Fastest is trace-mapping: decoded originalPositionFor
|
||||||
|
```
|
||||||
|
|
||||||
|
[source-map]: https://www.npmjs.com/package/source-map
|
514
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
514
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,514 @@
|
|||||||
|
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||||
|
import resolveUri from '@jridgewell/resolve-uri';
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri(input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; i--, index--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
if (!('sections' in parsed))
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const { sections } = parsed;
|
||||||
|
let i = 0;
|
||||||
|
for (; i < sections.length - 1; i++) {
|
||||||
|
const no = sections[i + 1].offset;
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||||
|
}
|
||||||
|
if (sections.length > 0) {
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||||
|
}
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
return presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||||
|
const map = AnyMap(section.map, mapUrl);
|
||||||
|
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { resolvedSources } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||||
|
append(names, map.names);
|
||||||
|
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||||
|
for (let i = mappings.length; i <= lineOffset; i++)
|
||||||
|
mappings.push([]);
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range.
|
||||||
|
const stopI = stopLine - lineOffset;
|
||||||
|
const len = Math.min(decoded.length, stopI + 1);
|
||||||
|
for (let i = 0; i < len; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||||
|
// loop above.
|
||||||
|
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (i === stopI && column >= stopColumn)
|
||||||
|
break;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
if (seg.length === 4) {
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||||
|
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||||
|
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||||
|
// sourcemap would desynchronize the sources/contents.
|
||||||
|
function fillSourcesContent(len) {
|
||||||
|
const sourcesContent = [];
|
||||||
|
for (let i = 0; i < len; i++)
|
||||||
|
sourcesContent[i] = null;
|
||||||
|
return sourcesContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null,
|
||||||
|
});
|
||||||
|
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
});
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let encodedMappings;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
let decodedMappings;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
let traceSegment;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
let originalPositionFor;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
let generatedPositionFor;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
let eachMapping;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
let presortedDecodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let decodedMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
let encodedMap;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map.constructor === TraceMap)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
if (sourceRoot || mapUrl) {
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.resolvedSources = sources.map((s) => s || '');
|
||||||
|
}
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
encodedMappings = (map) => {
|
||||||
|
var _a;
|
||||||
|
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
|
||||||
|
};
|
||||||
|
decodedMappings = (map) => {
|
||||||
|
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||||
|
};
|
||||||
|
traceSegment = (map, line, column) => {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
};
|
||||||
|
originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
if (segment.length == 1)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return {
|
||||||
|
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||||
|
line: segment[SOURCE_LINE] + 1,
|
||||||
|
column: segment[SOURCE_COLUMN],
|
||||||
|
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const memos = map._bySourceMemos;
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
return {
|
||||||
|
line: segment[REV_GENERATED_LINE] + 1,
|
||||||
|
column: segment[REV_GENERATED_COLUMN],
|
||||||
|
};
|
||||||
|
};
|
||||||
|
eachMapping = (map, cb) => {
|
||||||
|
const decoded = decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const clone = Object.assign({}, map);
|
||||||
|
clone.mappings = [];
|
||||||
|
const tracer = new TraceMap(clone, mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
decodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: decodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
encodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: encodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return null;
|
||||||
|
return segments[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, traceSegment };
|
||||||
|
//# sourceMappingURL=trace-mapping.mjs.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
528
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
528
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,528 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||||
|
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||||
|
|
||||||
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||||
|
|
||||||
|
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
|
||||||
|
|
||||||
|
function resolve(input, base) {
|
||||||
|
// The base is always treated as a directory, if it's not empty.
|
||||||
|
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||||
|
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||||
|
if (base && !base.endsWith('/'))
|
||||||
|
base += '/';
|
||||||
|
return resolveUri__default["default"](input, base);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
function stripFilename(path) {
|
||||||
|
if (!path)
|
||||||
|
return '';
|
||||||
|
const index = path.lastIndexOf('/');
|
||||||
|
return path.slice(0, index + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMN = 0;
|
||||||
|
const SOURCES_INDEX = 1;
|
||||||
|
const SOURCE_LINE = 2;
|
||||||
|
const SOURCE_COLUMN = 3;
|
||||||
|
const NAMES_INDEX = 4;
|
||||||
|
const REV_GENERATED_LINE = 1;
|
||||||
|
const REV_GENERATED_COLUMN = 2;
|
||||||
|
|
||||||
|
function maybeSort(mappings, owned) {
|
||||||
|
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||||
|
if (unsortedIndex === mappings.length)
|
||||||
|
return mappings;
|
||||||
|
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||||
|
// not, we do not want to modify the consumer's input array.
|
||||||
|
if (!owned)
|
||||||
|
mappings = mappings.slice();
|
||||||
|
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||||
|
mappings[i] = sortSegments(mappings[i], owned);
|
||||||
|
}
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
function nextUnsortedSegmentLine(mappings, start) {
|
||||||
|
for (let i = start; i < mappings.length; i++) {
|
||||||
|
if (!isSorted(mappings[i]))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
return mappings.length;
|
||||||
|
}
|
||||||
|
function isSorted(line) {
|
||||||
|
for (let j = 1; j < line.length; j++) {
|
||||||
|
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function sortSegments(line, owned) {
|
||||||
|
if (!owned)
|
||||||
|
line = line.slice();
|
||||||
|
return line.sort(sortComparator);
|
||||||
|
}
|
||||||
|
function sortComparator(a, b) {
|
||||||
|
return a[COLUMN] - b[COLUMN];
|
||||||
|
}
|
||||||
|
|
||||||
|
let found = false;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
function binarySearch(haystack, needle, low, high) {
|
||||||
|
while (low <= high) {
|
||||||
|
const mid = low + ((high - low) >> 1);
|
||||||
|
const cmp = haystack[mid][COLUMN] - needle;
|
||||||
|
if (cmp === 0) {
|
||||||
|
found = true;
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
if (cmp < 0) {
|
||||||
|
low = mid + 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = mid - 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
found = false;
|
||||||
|
return low - 1;
|
||||||
|
}
|
||||||
|
function upperBound(haystack, needle, index) {
|
||||||
|
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function lowerBound(haystack, needle, index) {
|
||||||
|
for (let i = index - 1; i >= 0; i--, index--) {
|
||||||
|
if (haystack[i][COLUMN] !== needle)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return index;
|
||||||
|
}
|
||||||
|
function memoizedState() {
|
||||||
|
return {
|
||||||
|
lastKey: -1,
|
||||||
|
lastNeedle: -1,
|
||||||
|
lastIndex: -1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||||
|
const { lastKey, lastNeedle, lastIndex } = state;
|
||||||
|
let low = 0;
|
||||||
|
let high = haystack.length - 1;
|
||||||
|
if (key === lastKey) {
|
||||||
|
if (needle === lastNeedle) {
|
||||||
|
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||||
|
return lastIndex;
|
||||||
|
}
|
||||||
|
if (needle >= lastNeedle) {
|
||||||
|
// lastIndex may be -1 if the previous needle was not found.
|
||||||
|
low = lastIndex === -1 ? 0 : lastIndex;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
high = lastIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.lastKey = key;
|
||||||
|
state.lastNeedle = needle;
|
||||||
|
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||||
|
// of generated line/column.
|
||||||
|
function buildBySources(decoded, memos) {
|
||||||
|
const sources = memos.map(buildNullArray);
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
if (seg.length === 1)
|
||||||
|
continue;
|
||||||
|
const sourceIndex = seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
const originalSource = sources[sourceIndex];
|
||||||
|
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||||
|
const memo = memos[sourceIndex];
|
||||||
|
// The binary search either found a match, or it found the left-index just before where the
|
||||||
|
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||||
|
// generated segments associated with an original location, so there may need to move several
|
||||||
|
// indexes before we find where we need to insert.
|
||||||
|
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||||
|
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
function insert(array, index, value) {
|
||||||
|
for (let i = array.length; i > index; i--) {
|
||||||
|
array[i] = array[i - 1];
|
||||||
|
}
|
||||||
|
array[index] = value;
|
||||||
|
}
|
||||||
|
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||||
|
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||||
|
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||||
|
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||||
|
// order when iterating with for-in.
|
||||||
|
function buildNullArray() {
|
||||||
|
return { __proto__: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const AnyMap = function (map, mapUrl) {
|
||||||
|
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||||
|
if (!('sections' in parsed))
|
||||||
|
return new TraceMap(parsed, mapUrl);
|
||||||
|
const mappings = [];
|
||||||
|
const sources = [];
|
||||||
|
const sourcesContent = [];
|
||||||
|
const names = [];
|
||||||
|
const { sections } = parsed;
|
||||||
|
let i = 0;
|
||||||
|
for (; i < sections.length - 1; i++) {
|
||||||
|
const no = sections[i + 1].offset;
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||||
|
}
|
||||||
|
if (sections.length > 0) {
|
||||||
|
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||||
|
}
|
||||||
|
const joined = {
|
||||||
|
version: 3,
|
||||||
|
file: parsed.file,
|
||||||
|
names,
|
||||||
|
sources,
|
||||||
|
sourcesContent,
|
||||||
|
mappings,
|
||||||
|
};
|
||||||
|
return exports.presortedDecodedMap(joined);
|
||||||
|
};
|
||||||
|
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||||
|
const map = AnyMap(section.map, mapUrl);
|
||||||
|
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||||
|
const sourcesOffset = sources.length;
|
||||||
|
const namesOffset = names.length;
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
const { resolvedSources } = map;
|
||||||
|
append(sources, resolvedSources);
|
||||||
|
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||||
|
append(names, map.names);
|
||||||
|
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||||
|
for (let i = mappings.length; i <= lineOffset; i++)
|
||||||
|
mappings.push([]);
|
||||||
|
// We can only add so many lines before we step into the range that the next section's map
|
||||||
|
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||||
|
// they've crossed into the column range.
|
||||||
|
const stopI = stopLine - lineOffset;
|
||||||
|
const len = Math.min(decoded.length, stopI + 1);
|
||||||
|
for (let i = 0; i < len; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||||
|
// loop above.
|
||||||
|
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||||
|
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||||
|
// map can be multiple lines), it doesn't.
|
||||||
|
const cOffset = i === 0 ? columnOffset : 0;
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const column = cOffset + seg[COLUMN];
|
||||||
|
// If this segment steps into the column range that the next section's map controls, we need
|
||||||
|
// to stop early.
|
||||||
|
if (i === stopI && column >= stopColumn)
|
||||||
|
break;
|
||||||
|
if (seg.length === 1) {
|
||||||
|
out.push([column]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||||
|
const sourceLine = seg[SOURCE_LINE];
|
||||||
|
const sourceColumn = seg[SOURCE_COLUMN];
|
||||||
|
if (seg.length === 4) {
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function append(arr, other) {
|
||||||
|
for (let i = 0; i < other.length; i++)
|
||||||
|
arr.push(other[i]);
|
||||||
|
}
|
||||||
|
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||||
|
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||||
|
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||||
|
// sourcemap would desynchronize the sources/contents.
|
||||||
|
function fillSourcesContent(len) {
|
||||||
|
const sourcesContent = [];
|
||||||
|
for (let i = 0; i < len; i++)
|
||||||
|
sourcesContent[i] = null;
|
||||||
|
return sourcesContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null,
|
||||||
|
});
|
||||||
|
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
});
|
||||||
|
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||||
|
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||||
|
const LEAST_UPPER_BOUND = -1;
|
||||||
|
const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
exports.encodedMappings = void 0;
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
exports.decodedMappings = void 0;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
exports.traceSegment = void 0;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
exports.originalPositionFor = void 0;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
exports.generatedPositionFor = void 0;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
exports.eachMapping = void 0;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
exports.presortedDecodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.decodedMap = void 0;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
exports.encodedMap = void 0;
|
||||||
|
class TraceMap {
|
||||||
|
constructor(map, mapUrl) {
|
||||||
|
this._decodedMemo = memoizedState();
|
||||||
|
this._bySources = undefined;
|
||||||
|
this._bySourceMemos = undefined;
|
||||||
|
const isString = typeof map === 'string';
|
||||||
|
if (!isString && map.constructor === TraceMap)
|
||||||
|
return map;
|
||||||
|
const parsed = (isString ? JSON.parse(map) : map);
|
||||||
|
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||||
|
this.version = version;
|
||||||
|
this.file = file;
|
||||||
|
this.names = names;
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sources = sources;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
if (sourceRoot || mapUrl) {
|
||||||
|
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||||
|
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.resolvedSources = sources.map((s) => s || '');
|
||||||
|
}
|
||||||
|
const { mappings } = parsed;
|
||||||
|
if (typeof mappings === 'string') {
|
||||||
|
this._encoded = mappings;
|
||||||
|
this._decoded = undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this._encoded = undefined;
|
||||||
|
this._decoded = maybeSort(mappings, isString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(() => {
|
||||||
|
exports.encodedMappings = (map) => {
|
||||||
|
var _a;
|
||||||
|
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
|
||||||
|
};
|
||||||
|
exports.decodedMappings = (map) => {
|
||||||
|
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
|
||||||
|
};
|
||||||
|
exports.traceSegment = (map, line, column) => {
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return null;
|
||||||
|
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||||
|
};
|
||||||
|
exports.originalPositionFor = (map, { line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
// It's common for parent source maps to have pointers to lines that have no
|
||||||
|
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||||
|
if (line >= decoded.length)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
if (segment.length == 1)
|
||||||
|
return INVALID_ORIGINAL_MAPPING;
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
return {
|
||||||
|
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||||
|
line: segment[SOURCE_LINE] + 1,
|
||||||
|
column: segment[SOURCE_COLUMN],
|
||||||
|
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||||
|
line--;
|
||||||
|
if (line < 0)
|
||||||
|
throw new Error(LINE_GTR_ZERO);
|
||||||
|
if (column < 0)
|
||||||
|
throw new Error(COL_GTR_EQ_ZERO);
|
||||||
|
const { sources, resolvedSources } = map;
|
||||||
|
let sourceIndex = sources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
sourceIndex = resolvedSources.indexOf(source);
|
||||||
|
if (sourceIndex === -1)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||||
|
const memos = map._bySourceMemos;
|
||||||
|
const segments = generated[sourceIndex][line];
|
||||||
|
if (segments == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||||
|
if (segment == null)
|
||||||
|
return INVALID_GENERATED_MAPPING;
|
||||||
|
return {
|
||||||
|
line: segment[REV_GENERATED_LINE] + 1,
|
||||||
|
column: segment[REV_GENERATED_COLUMN],
|
||||||
|
};
|
||||||
|
};
|
||||||
|
exports.eachMapping = (map, cb) => {
|
||||||
|
const decoded = exports.decodedMappings(map);
|
||||||
|
const { names, resolvedSources } = map;
|
||||||
|
for (let i = 0; i < decoded.length; i++) {
|
||||||
|
const line = decoded[i];
|
||||||
|
for (let j = 0; j < line.length; j++) {
|
||||||
|
const seg = line[j];
|
||||||
|
const generatedLine = i + 1;
|
||||||
|
const generatedColumn = seg[0];
|
||||||
|
let source = null;
|
||||||
|
let originalLine = null;
|
||||||
|
let originalColumn = null;
|
||||||
|
let name = null;
|
||||||
|
if (seg.length !== 1) {
|
||||||
|
source = resolvedSources[seg[1]];
|
||||||
|
originalLine = seg[2] + 1;
|
||||||
|
originalColumn = seg[3];
|
||||||
|
}
|
||||||
|
if (seg.length === 5)
|
||||||
|
name = names[seg[4]];
|
||||||
|
cb({
|
||||||
|
generatedLine,
|
||||||
|
generatedColumn,
|
||||||
|
source,
|
||||||
|
originalLine,
|
||||||
|
originalColumn,
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.presortedDecodedMap = (map, mapUrl) => {
|
||||||
|
const clone = Object.assign({}, map);
|
||||||
|
clone.mappings = [];
|
||||||
|
const tracer = new TraceMap(clone, mapUrl);
|
||||||
|
tracer._decoded = map.mappings;
|
||||||
|
return tracer;
|
||||||
|
};
|
||||||
|
exports.decodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: exports.decodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
exports.encodedMap = (map) => {
|
||||||
|
return {
|
||||||
|
version: 3,
|
||||||
|
file: map.file,
|
||||||
|
names: map.names,
|
||||||
|
sourceRoot: map.sourceRoot,
|
||||||
|
sources: map.sources,
|
||||||
|
sourcesContent: map.sourcesContent,
|
||||||
|
mappings: exports.encodedMappings(map),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||||
|
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||||
|
if (found) {
|
||||||
|
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||||
|
}
|
||||||
|
else if (bias === LEAST_UPPER_BOUND)
|
||||||
|
index++;
|
||||||
|
if (index === -1 || index === segments.length)
|
||||||
|
return null;
|
||||||
|
return segments[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.AnyMap = AnyMap;
|
||||||
|
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||||
|
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||||
|
exports.TraceMap = TraceMap;
|
||||||
|
|
||||||
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=trace-mapping.umd.js.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
import { TraceMap } from './trace-mapping';
|
||||||
|
import type { SectionedSourceMapInput } from './types';
|
||||||
|
declare type AnyMap = {
|
||||||
|
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||||
|
};
|
||||||
|
export declare const AnyMap: AnyMap;
|
||||||
|
export {};
|
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||||
|
export declare type MemoState = {
|
||||||
|
lastKey: number;
|
||||||
|
lastNeedle: number;
|
||||||
|
lastIndex: number;
|
||||||
|
};
|
||||||
|
export declare let found: boolean;
|
||||||
|
/**
|
||||||
|
* A binary search implementation that returns the index if a match is found.
|
||||||
|
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||||
|
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||||
|
* the next index:
|
||||||
|
*
|
||||||
|
* ```js
|
||||||
|
* const array = [1, 3];
|
||||||
|
* const needle = 2;
|
||||||
|
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||||
|
*
|
||||||
|
* assert.equal(index, 0);
|
||||||
|
* array.splice(index + 1, 0, needle);
|
||||||
|
* assert.deepEqual(array, [1, 2, 3]);
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||||
|
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||||
|
export declare function memoizedState(): MemoState;
|
||||||
|
/**
|
||||||
|
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||||
|
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||||
|
*/
|
||||||
|
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { MemoState } from './binary-search';
|
||||||
|
export declare type Source = {
|
||||||
|
__proto__: null;
|
||||||
|
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||||
|
};
|
||||||
|
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
export default function resolve(input: string, base: string | undefined): string;
|
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
declare type GeneratedColumn = number;
|
||||||
|
declare type SourcesIndex = number;
|
||||||
|
declare type SourceLine = number;
|
||||||
|
declare type SourceColumn = number;
|
||||||
|
declare type NamesIndex = number;
|
||||||
|
declare type GeneratedLine = number;
|
||||||
|
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||||
|
export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||||
|
export declare const COLUMN = 0;
|
||||||
|
export declare const SOURCES_INDEX = 1;
|
||||||
|
export declare const SOURCE_LINE = 2;
|
||||||
|
export declare const SOURCE_COLUMN = 3;
|
||||||
|
export declare const NAMES_INDEX = 4;
|
||||||
|
export declare const REV_GENERATED_LINE = 1;
|
||||||
|
export declare const REV_GENERATED_COLUMN = 2;
|
||||||
|
export {};
|
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/**
|
||||||
|
* Removes everything after the last "/", but leaves the slash.
|
||||||
|
*/
|
||||||
|
export default function stripFilename(path: string | undefined | null): string;
|
70
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
70
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||||
|
export type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types';
|
||||||
|
export declare const LEAST_UPPER_BOUND = -1;
|
||||||
|
export declare const GREATEST_LOWER_BOUND = 1;
|
||||||
|
/**
|
||||||
|
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];
|
||||||
|
/**
|
||||||
|
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||||
|
*/
|
||||||
|
export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;
|
||||||
|
/**
|
||||||
|
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||||
|
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null;
|
||||||
|
/**
|
||||||
|
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||||
|
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||||
|
* `source-map` library.
|
||||||
|
*/
|
||||||
|
export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping;
|
||||||
|
/**
|
||||||
|
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||||
|
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||||
|
*
|
||||||
|
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||||
|
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||||
|
*/
|
||||||
|
export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping;
|
||||||
|
/**
|
||||||
|
* Iterates each mapping in generated position order.
|
||||||
|
*/
|
||||||
|
export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;
|
||||||
|
/**
|
||||||
|
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||||
|
* maps.
|
||||||
|
*/
|
||||||
|
export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & {
|
||||||
|
mappings: readonly SourceMapSegment[][];
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||||
|
* a sourcemap, or to JSON.stringify.
|
||||||
|
*/
|
||||||
|
export declare let encodedMap: (map: TraceMap) => EncodedSourceMap;
|
||||||
|
export { AnyMap } from './any-map';
|
||||||
|
export declare class TraceMap implements SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
resolvedSources: string[];
|
||||||
|
private _encoded;
|
||||||
|
private _decoded;
|
||||||
|
private _decodedMemo;
|
||||||
|
private _bySources;
|
||||||
|
private _bySourceMemos;
|
||||||
|
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||||
|
}
|
85
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
85
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
import type { SourceMapSegment } from './sourcemap-segment';
|
||||||
|
import type { TraceMap } from './trace-mapping';
|
||||||
|
export interface SourceMapV3 {
|
||||||
|
file?: string | null;
|
||||||
|
names: string[];
|
||||||
|
sourceRoot?: string;
|
||||||
|
sources: (string | null)[];
|
||||||
|
sourcesContent?: (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export interface EncodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: string;
|
||||||
|
}
|
||||||
|
export interface DecodedSourceMap extends SourceMapV3 {
|
||||||
|
mappings: SourceMapSegment[][];
|
||||||
|
}
|
||||||
|
export interface Section {
|
||||||
|
offset: {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||||
|
}
|
||||||
|
export interface SectionedSourceMap {
|
||||||
|
file?: string | null;
|
||||||
|
sections: Section[];
|
||||||
|
version: 3;
|
||||||
|
}
|
||||||
|
export declare type OriginalMapping = {
|
||||||
|
source: string | null;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export declare type InvalidOriginalMapping = {
|
||||||
|
source: null;
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
name: null;
|
||||||
|
};
|
||||||
|
export declare type GeneratedMapping = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
};
|
||||||
|
export declare type InvalidGeneratedMapping = {
|
||||||
|
line: null;
|
||||||
|
column: null;
|
||||||
|
};
|
||||||
|
export declare type SourceMapInput = string | EncodedSourceMap | DecodedSourceMap | TraceMap;
|
||||||
|
export declare type SectionedSourceMapInput = SourceMapInput | SectionedSourceMap;
|
||||||
|
export declare type Needle = {
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: 1 | -1;
|
||||||
|
};
|
||||||
|
export declare type SourceNeedle = {
|
||||||
|
source: string;
|
||||||
|
line: number;
|
||||||
|
column: number;
|
||||||
|
bias?: 1 | -1;
|
||||||
|
};
|
||||||
|
export declare type EachMapping = {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: null;
|
||||||
|
originalLine: null;
|
||||||
|
originalColumn: null;
|
||||||
|
name: null;
|
||||||
|
} | {
|
||||||
|
generatedLine: number;
|
||||||
|
generatedColumn: number;
|
||||||
|
source: string | null;
|
||||||
|
originalLine: number;
|
||||||
|
originalColumn: number;
|
||||||
|
name: string | null;
|
||||||
|
};
|
||||||
|
export declare abstract class SourceMap {
|
||||||
|
version: SourceMapV3['version'];
|
||||||
|
file: SourceMapV3['file'];
|
||||||
|
names: SourceMapV3['names'];
|
||||||
|
sourceRoot: SourceMapV3['sourceRoot'];
|
||||||
|
sources: SourceMapV3['sources'];
|
||||||
|
sourcesContent: SourceMapV3['sourcesContent'];
|
||||||
|
resolvedSources: SourceMapV3['sources'];
|
||||||
|
}
|
70
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
70
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
{
|
||||||
|
"name": "@jridgewell/trace-mapping",
|
||||||
|
"version": "0.3.9",
|
||||||
|
"description": "Trace the original position through a source map",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map"
|
||||||
|
],
|
||||||
|
"main": "dist/trace-mapping.umd.js",
|
||||||
|
"module": "dist/trace-mapping.mjs",
|
||||||
|
"typings": "dist/types/trace-mapping.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"browser": "./dist/trace-mapping.umd.js",
|
||||||
|
"require": "./dist/trace-mapping.umd.js",
|
||||||
|
"import": "./dist/trace-mapping.mjs"
|
||||||
|
},
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"scripts": {
|
||||||
|
"benchmark": "run-s build:rollup benchmark:*",
|
||||||
|
"benchmark:install": "cd benchmark && npm install",
|
||||||
|
"benchmark:only": "node benchmark/index.mjs",
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "ava debug",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "c8 ava",
|
||||||
|
"test:watch": "ava --watch"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||||
|
"@typescript-eslint/parser": "5.10.0",
|
||||||
|
"ava": "4.0.1",
|
||||||
|
"benchmark": "2.1.4",
|
||||||
|
"c8": "7.11.0",
|
||||||
|
"esbuild": "0.14.14",
|
||||||
|
"esbuild-node-loader": "0.6.4",
|
||||||
|
"eslint": "8.7.0",
|
||||||
|
"eslint-config-prettier": "8.3.0",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.5.1",
|
||||||
|
"rollup": "2.64.0",
|
||||||
|
"typescript": "4.5.4"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/resolve-uri": "^3.0.3",
|
||||||
|
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/@tsconfig/node10/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node10/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
40
node_modules/@tsconfig/node10/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node10/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
### A base TSConfig for working with Node 10.
|
||||||
|
|
||||||
|
Add the package to your `"devDependencies"`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save-dev @tsconfig/node10
|
||||||
|
yarn add --dev @tsconfig/node10
|
||||||
|
```
|
||||||
|
|
||||||
|
Add to your `tsconfig.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
"extends": "@tsconfig/node10/tsconfig.json"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The `tsconfig.json`:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 10",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2018"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"target": "es2018",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node10.json).
|
1
node_modules/@tsconfig/node10/package.json
generated
vendored
Normal file
1
node_modules/@tsconfig/node10/package.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"name":"@tsconfig/node10","repository":{"type":"git","url":"https://github.com/tsconfig/bases.git","directory":"bases"},"license":"MIT","description":"A base TSConfig for working with Node 10.","keywords":["tsconfig","node10"],"version":"1.0.9"}
|
16
node_modules/@tsconfig/node10/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node10/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 10",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2018"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"target": "es2018",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/@tsconfig/node12/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node12/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
40
node_modules/@tsconfig/node12/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node12/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
### A base TSConfig for working with Node 12.
|
||||||
|
|
||||||
|
Add the package to your `"devDependencies"`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save-dev @tsconfig/node12
|
||||||
|
yarn add --dev @tsconfig/node12
|
||||||
|
```
|
||||||
|
|
||||||
|
Add to your `tsconfig.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
"extends": "@tsconfig/node12/tsconfig.json"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The `tsconfig.json`:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 12",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2019", "es2020.promise", "es2020.bigint", "es2020.string"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"target": "es2019",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node12.json).
|
1
node_modules/@tsconfig/node12/package.json
generated
vendored
Normal file
1
node_modules/@tsconfig/node12/package.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"name":"@tsconfig/node12","repository":{"type":"git","url":"https://github.com/tsconfig/bases.git","directory":"bases"},"license":"MIT","description":"A base TSConfig for working with Node 12.","keywords":["tsconfig","node12"],"version":"1.0.11"}
|
16
node_modules/@tsconfig/node12/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node12/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 12",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2019", "es2020.promise", "es2020.bigint", "es2020.string"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"target": "es2019",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/@tsconfig/node14/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node14/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
40
node_modules/@tsconfig/node14/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node14/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
### A base TSConfig for working with Node 14.
|
||||||
|
|
||||||
|
Add the package to your `"devDependencies"`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save-dev @tsconfig/node14
|
||||||
|
yarn add --dev @tsconfig/node14
|
||||||
|
```
|
||||||
|
|
||||||
|
Add to your `tsconfig.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
"extends": "@tsconfig/node14/tsconfig.json"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The `tsconfig.json`:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 14",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2020"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"target": "es2020",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node14.json).
|
1
node_modules/@tsconfig/node14/package.json
generated
vendored
Normal file
1
node_modules/@tsconfig/node14/package.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"name":"@tsconfig/node14","repository":{"type":"git","url":"https://github.com/tsconfig/bases.git","directory":"bases"},"license":"MIT","description":"A base TSConfig for working with Node 14.","keywords":["tsconfig","node14"],"version":"1.0.3"}
|
16
node_modules/@tsconfig/node14/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node14/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 14",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2020"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"target": "es2020",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/@tsconfig/node16/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node16/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
40
node_modules/@tsconfig/node16/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node16/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
### A base TSConfig for working with Node 16.
|
||||||
|
|
||||||
|
Add the package to your `"devDependencies"`:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save-dev @tsconfig/node16
|
||||||
|
yarn add --dev @tsconfig/node16
|
||||||
|
```
|
||||||
|
|
||||||
|
Add to your `tsconfig.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
"extends": "@tsconfig/node16/tsconfig.json"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The `tsconfig.json`:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 16",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2021"],
|
||||||
|
"module": "Node16",
|
||||||
|
"target": "es2021",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node16.json).
|
15
node_modules/@tsconfig/node16/package.json
generated
vendored
Normal file
15
node_modules/@tsconfig/node16/package.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "@tsconfig/node16",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/tsconfig/bases.git",
|
||||||
|
"directory": "bases"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"description": "A base TSConfig for working with Node 16.",
|
||||||
|
"keywords": [
|
||||||
|
"tsconfig",
|
||||||
|
"node16"
|
||||||
|
],
|
||||||
|
"version": "1.0.4"
|
||||||
|
}
|
16
node_modules/@tsconfig/node16/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node16/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/tsconfig",
|
||||||
|
"display": "Node 16",
|
||||||
|
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["es2021"],
|
||||||
|
"module": "Node16",
|
||||||
|
"target": "es2021",
|
||||||
|
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"moduleResolution": "node"
|
||||||
|
}
|
||||||
|
}
|
181
node_modules/acorn-walk/CHANGELOG.md
generated
vendored
Normal file
181
node_modules/acorn-walk/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
## 8.3.1 (2023-12-06)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Add `Function` and `Class` to the `AggregateType` type, so that they can be used in walkers without raising a type error.
|
||||||
|
|
||||||
|
Visitor functions are now called in such a way that their `this` refers to the object they are part of.
|
||||||
|
|
||||||
|
## 8.3.0 (2023-10-26)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Use a set of new, much more precise, TypeScript types.
|
||||||
|
|
||||||
|
## 8.2.0 (2021-09-06)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for walking ES2022 class static blocks.
|
||||||
|
|
||||||
|
## 8.1.1 (2021-06-29)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Include `base` in the type declarations.
|
||||||
|
|
||||||
|
## 8.1.0 (2021-04-24)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support node types for class fields and private methods.
|
||||||
|
|
||||||
|
## 8.0.2 (2021-01-25)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Adjust package.json to work with Node 12.16.0 and 13.0-13.6.
|
||||||
|
|
||||||
|
## 8.0.0 (2021-01-05)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug where `full` and `fullAncestor` would skip nodes with overridden types.
|
||||||
|
|
||||||
|
## 8.0.0 (2020-08-12)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
The package can now be loaded directly as an ECMAScript module in node 13+.
|
||||||
|
|
||||||
|
## 7.2.0 (2020-06-17)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support optional chaining and nullish coalescing.
|
||||||
|
|
||||||
|
Support `import.meta`.
|
||||||
|
|
||||||
|
Add support for `export * as ns from "source"`.
|
||||||
|
|
||||||
|
## 7.1.1 (2020-02-13)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Clean up the type definitions to actually work well with the main parser.
|
||||||
|
|
||||||
|
## 7.1.0 (2020-02-11)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add a TypeScript definition file for the library.
|
||||||
|
|
||||||
|
## 7.0.0 (2017-08-12)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support walking `ImportExpression` nodes.
|
||||||
|
|
||||||
|
## 6.2.0 (2017-07-04)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for `Import` nodes.
|
||||||
|
|
||||||
|
## 6.1.0 (2018-09-28)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
The walker now walks `TemplateElement` nodes.
|
||||||
|
|
||||||
|
## 6.0.1 (2018-09-14)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix bad "main" field in package.json.
|
||||||
|
|
||||||
|
## 6.0.0 (2018-09-14)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
This is now a separate package, `acorn-walk`, rather than part of the main `acorn` package.
|
||||||
|
|
||||||
|
The `ScopeBody` and `ScopeExpression` meta-node-types are no longer supported.
|
||||||
|
|
||||||
|
## 5.7.1 (2018-06-15)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Make sure the walker and bin files are rebuilt on release (the previous release didn't get the up-to-date versions).
|
||||||
|
|
||||||
|
## 5.7.0 (2018-06-15)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix crash in walker when walking a binding-less catch node.
|
||||||
|
|
||||||
|
## 5.6.2 (2018-06-05)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
In the walker, go back to allowing the `baseVisitor` argument to be null to default to the default base everywhere.
|
||||||
|
|
||||||
|
## 5.6.1 (2018-06-01)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix regression when passing `null` as fourth argument to `walk.recursive`.
|
||||||
|
|
||||||
|
## 5.6.0 (2018-05-31)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug in the walker that caused a crash when walking an object pattern spread.
|
||||||
|
|
||||||
|
## 5.5.1 (2018-03-06)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix regression in walker causing property values in object patterns to be walked as expressions.
|
||||||
|
|
||||||
|
## 5.5.0 (2018-02-27)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Support object spread in the AST walker.
|
||||||
|
|
||||||
|
## 5.4.1 (2018-02-02)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
5.4.0 somehow accidentally included an old version of walk.js.
|
||||||
|
|
||||||
|
## 5.2.0 (2017-10-30)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
The `full` and `fullAncestor` walkers no longer visit nodes multiple times.
|
||||||
|
|
||||||
|
## 5.1.0 (2017-07-05)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
New walker functions `full` and `fullAncestor`.
|
||||||
|
|
||||||
|
## 3.2.0 (2016-06-07)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Make it possible to use `visit.ancestor` with a walk state.
|
||||||
|
|
||||||
|
## 3.1.0 (2016-04-18)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
The walker now allows defining handlers for `CatchClause` nodes.
|
||||||
|
|
||||||
|
## 2.5.2 (2015-10-27)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
Fix bug where the walker walked an exported `let` statement as an expression.
|
21
node_modules/acorn-walk/LICENSE
generated
vendored
Normal file
21
node_modules/acorn-walk/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (C) 2012-2020 by various contributors (see AUTHORS)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
124
node_modules/acorn-walk/README.md
generated
vendored
Normal file
124
node_modules/acorn-walk/README.md
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
# Acorn AST walker
|
||||||
|
|
||||||
|
An abstract syntax tree walker for the
|
||||||
|
[ESTree](https://github.com/estree/estree) format.
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
Acorn is open source software released under an
|
||||||
|
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn-walk/LICENSE).
|
||||||
|
|
||||||
|
You are welcome to
|
||||||
|
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||||
|
requests on [github](https://github.com/acornjs/acorn).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install acorn-walk
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternately, you can download the source and build acorn yourself:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone https://github.com/acornjs/acorn.git
|
||||||
|
cd acorn
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
## Interface
|
||||||
|
|
||||||
|
An algorithm for recursing through a syntax tree is stored as an
|
||||||
|
object, with a property for each tree node type holding a function
|
||||||
|
that will recurse through such a node. There are several ways to run
|
||||||
|
such a walker.
|
||||||
|
|
||||||
|
**simple**`(node, visitors, base, state)` does a 'simple' walk over a
|
||||||
|
tree. `node` should be the AST node to walk, and `visitors` an object
|
||||||
|
with properties whose names correspond to node types in the [ESTree
|
||||||
|
spec](https://github.com/estree/estree). The properties should contain
|
||||||
|
functions that will be called with the node object and, if applicable
|
||||||
|
the state at that point. The last two arguments are optional. `base`
|
||||||
|
is a walker algorithm, and `state` is a start state. The default
|
||||||
|
walker will simply visit all statements and expressions and not
|
||||||
|
produce a meaningful state. (An example of a use of state is to track
|
||||||
|
scope at each point in the tree.)
|
||||||
|
|
||||||
|
```js
|
||||||
|
const acorn = require("acorn")
|
||||||
|
const walk = require("acorn-walk")
|
||||||
|
|
||||||
|
walk.simple(acorn.parse("let x = 10"), {
|
||||||
|
Literal(node) {
|
||||||
|
console.log(`Found a literal: ${node.value}`)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
**ancestor**`(node, visitors, base, state)` does a 'simple' walk over
|
||||||
|
a tree, building up an array of ancestor nodes (including the current node)
|
||||||
|
and passing the array to the callbacks as a third parameter.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const acorn = require("acorn")
|
||||||
|
const walk = require("acorn-walk")
|
||||||
|
|
||||||
|
walk.ancestor(acorn.parse("foo('hi')"), {
|
||||||
|
Literal(_node, _state, ancestors) {
|
||||||
|
console.log("This literal's ancestors are:", ancestors.map(n => n.type))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
**recursive**`(node, state, functions, base)` does a 'recursive'
|
||||||
|
walk, where the walker functions are responsible for continuing the
|
||||||
|
walk on the child nodes of their target node. `state` is the start
|
||||||
|
state, and `functions` should contain an object that maps node types
|
||||||
|
to walker functions. Such functions are called with `(node, state, c)`
|
||||||
|
arguments, and can cause the walk to continue on a sub-node by calling
|
||||||
|
the `c` argument on it with `(node, state)` arguments. The optional
|
||||||
|
`base` argument provides the fallback walker functions for node types
|
||||||
|
that aren't handled in the `functions` object. If not given, the
|
||||||
|
default walkers will be used.
|
||||||
|
|
||||||
|
**make**`(functions, base)` builds a new walker object by using the
|
||||||
|
walker functions in `functions` and filling in the missing ones by
|
||||||
|
taking defaults from `base`.
|
||||||
|
|
||||||
|
**full**`(node, callback, base, state)` does a 'full' walk over a
|
||||||
|
tree, calling the callback with the arguments (node, state, type) for
|
||||||
|
each node
|
||||||
|
|
||||||
|
**fullAncestor**`(node, callback, base, state)` does a 'full' walk
|
||||||
|
over a tree, building up an array of ancestor nodes (including the
|
||||||
|
current node) and passing the array to the callbacks as a third
|
||||||
|
parameter.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const acorn = require("acorn")
|
||||||
|
const walk = require("acorn-walk")
|
||||||
|
|
||||||
|
walk.full(acorn.parse("1 + 1"), node => {
|
||||||
|
console.log(`There's a ${node.type} node at ${node.ch}`)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
**findNodeAt**`(node, start, end, test, base, state)` tries to locate
|
||||||
|
a node in a tree at the given start and/or end offsets, which
|
||||||
|
satisfies the predicate `test`. `start` and `end` can be either `null`
|
||||||
|
(as wildcard) or a number. `test` may be a string (indicating a node
|
||||||
|
type) or a function that takes `(nodeType, node)` arguments and
|
||||||
|
returns a boolean indicating whether this node is interesting. `base`
|
||||||
|
and `state` are optional, and can be used to specify a custom walker.
|
||||||
|
Nodes are tested from inner to outer, so if two nodes match the
|
||||||
|
boundaries, the inner one will be preferred.
|
||||||
|
|
||||||
|
**findNodeAround**`(node, pos, test, base, state)` is a lot like
|
||||||
|
`findNodeAt`, but will match any node that exists 'around' (spanning)
|
||||||
|
the given position.
|
||||||
|
|
||||||
|
**findNodeAfter**`(node, pos, test, base, state)` is similar to
|
||||||
|
`findNodeAround`, but will match all nodes *after* the given position
|
||||||
|
(testing outer nodes before inner nodes).
|
177
node_modules/acorn-walk/dist/walk.d.mts
generated
vendored
Normal file
177
node_modules/acorn-walk/dist/walk.d.mts
generated
vendored
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
import * as acorn from "acorn"
|
||||||
|
|
||||||
|
export type FullWalkerCallback<TState> = (
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState,
|
||||||
|
type: string
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export type FullAncestorWalkerCallback<TState> = (
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState,
|
||||||
|
ancestors: acorn.Node[],
|
||||||
|
type: string
|
||||||
|
) => void
|
||||||
|
|
||||||
|
type AggregateType = {
|
||||||
|
Expression: acorn.Expression,
|
||||||
|
Statement: acorn.Statement,
|
||||||
|
Function: acorn.Function,
|
||||||
|
Class: acorn.Class,
|
||||||
|
Pattern: acorn.Pattern,
|
||||||
|
ForInit: acorn.VariableDeclaration | acorn.Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SimpleVisitors<TState> = {
|
||||||
|
[type in acorn.AnyNode["type"]]?: (node: Extract<acorn.AnyNode, { type: type }>, state: TState) => void
|
||||||
|
} & {
|
||||||
|
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AncestorVisitors<TState> = {
|
||||||
|
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, ancestors: acorn.Node[]
|
||||||
|
) => void
|
||||||
|
} & {
|
||||||
|
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, ancestors: acorn.Node[]) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WalkerCallback<TState> = (node: acorn.Node, state: TState) => void
|
||||||
|
|
||||||
|
export type RecursiveVisitors<TState> = {
|
||||||
|
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, callback: WalkerCallback<TState>) => void
|
||||||
|
} & {
|
||||||
|
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, callback: WalkerCallback<TState>) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FindPredicate = (type: string, node: acorn.Node) => boolean
|
||||||
|
|
||||||
|
export interface Found<TState> {
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'simple' walk over a tree
|
||||||
|
* @param node the AST node to walk
|
||||||
|
* @param visitors an object with properties whose names correspond to node types in the {@link https://github.com/estree/estree | ESTree spec}. The properties should contain functions that will be called with the node object and, if applicable the state at that point.
|
||||||
|
* @param base a walker algorithm
|
||||||
|
* @param state a start state. The default walker will simply visit all statements and expressions and not produce a meaningful state. (An example of a use of state is to track scope at each point in the tree.)
|
||||||
|
*/
|
||||||
|
export function simple<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
visitors: SimpleVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||||
|
* @param node
|
||||||
|
* @param visitors
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function ancestor<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
visitors: AncestorVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
|
||||||
|
* @param node
|
||||||
|
* @param state the start state
|
||||||
|
* @param functions contain an object that maps node types to walker functions
|
||||||
|
* @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used.
|
||||||
|
*/
|
||||||
|
export function recursive<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState,
|
||||||
|
functions: RecursiveVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node
|
||||||
|
* @param node
|
||||||
|
* @param callback
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function full<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
callback: FullWalkerCallback<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||||
|
* @param node
|
||||||
|
* @param callback
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function fullAncestor<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
callback: FullAncestorWalkerCallback<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}.
|
||||||
|
* @param functions
|
||||||
|
* @param base
|
||||||
|
*/
|
||||||
|
export function make<TState>(
|
||||||
|
functions: RecursiveVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>
|
||||||
|
): RecursiveVisitors<TState>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred.
|
||||||
|
* @param node
|
||||||
|
* @param start
|
||||||
|
* @param end
|
||||||
|
* @param type
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function findNodeAt<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
start: number | undefined,
|
||||||
|
end?: number | undefined,
|
||||||
|
type?: FindPredicate | string,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): Found<TState> | undefined
|
||||||
|
|
||||||
|
/**
|
||||||
|
* like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position.
|
||||||
|
* @param node
|
||||||
|
* @param start
|
||||||
|
* @param type
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function findNodeAround<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
start: number | undefined,
|
||||||
|
type?: FindPredicate | string,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): Found<TState> | undefined
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the outermost matching node after a given position.
|
||||||
|
*/
|
||||||
|
export const findNodeAfter: typeof findNodeAround
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the outermost matching node before a given position.
|
||||||
|
*/
|
||||||
|
export const findNodeBefore: typeof findNodeAround
|
||||||
|
|
||||||
|
export const base: RecursiveVisitors<any>
|
177
node_modules/acorn-walk/dist/walk.d.ts
generated
vendored
Normal file
177
node_modules/acorn-walk/dist/walk.d.ts
generated
vendored
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
import * as acorn from "acorn"
|
||||||
|
|
||||||
|
export type FullWalkerCallback<TState> = (
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState,
|
||||||
|
type: string
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export type FullAncestorWalkerCallback<TState> = (
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState,
|
||||||
|
ancestors: acorn.Node[],
|
||||||
|
type: string
|
||||||
|
) => void
|
||||||
|
|
||||||
|
type AggregateType = {
|
||||||
|
Expression: acorn.Expression,
|
||||||
|
Statement: acorn.Statement,
|
||||||
|
Function: acorn.Function,
|
||||||
|
Class: acorn.Class,
|
||||||
|
Pattern: acorn.Pattern,
|
||||||
|
ForInit: acorn.VariableDeclaration | acorn.Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SimpleVisitors<TState> = {
|
||||||
|
[type in acorn.AnyNode["type"]]?: (node: Extract<acorn.AnyNode, { type: type }>, state: TState) => void
|
||||||
|
} & {
|
||||||
|
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AncestorVisitors<TState> = {
|
||||||
|
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, ancestors: acorn.Node[]
|
||||||
|
) => void
|
||||||
|
} & {
|
||||||
|
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, ancestors: acorn.Node[]) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WalkerCallback<TState> = (node: acorn.Node, state: TState) => void
|
||||||
|
|
||||||
|
export type RecursiveVisitors<TState> = {
|
||||||
|
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, callback: WalkerCallback<TState>) => void
|
||||||
|
} & {
|
||||||
|
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, callback: WalkerCallback<TState>) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FindPredicate = (type: string, node: acorn.Node) => boolean
|
||||||
|
|
||||||
|
export interface Found<TState> {
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'simple' walk over a tree
|
||||||
|
* @param node the AST node to walk
|
||||||
|
* @param visitors an object with properties whose names correspond to node types in the {@link https://github.com/estree/estree | ESTree spec}. The properties should contain functions that will be called with the node object and, if applicable the state at that point.
|
||||||
|
* @param base a walker algorithm
|
||||||
|
* @param state a start state. The default walker will simply visit all statements and expressions and not produce a meaningful state. (An example of a use of state is to track scope at each point in the tree.)
|
||||||
|
*/
|
||||||
|
export function simple<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
visitors: SimpleVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||||
|
* @param node
|
||||||
|
* @param visitors
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function ancestor<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
visitors: AncestorVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
|
||||||
|
* @param node
|
||||||
|
* @param state the start state
|
||||||
|
* @param functions contain an object that maps node types to walker functions
|
||||||
|
* @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used.
|
||||||
|
*/
|
||||||
|
export function recursive<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
state: TState,
|
||||||
|
functions: RecursiveVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node
|
||||||
|
* @param node
|
||||||
|
* @param callback
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function full<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
callback: FullWalkerCallback<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||||
|
* @param node
|
||||||
|
* @param callback
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function fullAncestor<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
callback: FullAncestorWalkerCallback<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}.
|
||||||
|
* @param functions
|
||||||
|
* @param base
|
||||||
|
*/
|
||||||
|
export function make<TState>(
|
||||||
|
functions: RecursiveVisitors<TState>,
|
||||||
|
base?: RecursiveVisitors<TState>
|
||||||
|
): RecursiveVisitors<TState>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred.
|
||||||
|
* @param node
|
||||||
|
* @param start
|
||||||
|
* @param end
|
||||||
|
* @param type
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function findNodeAt<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
start: number | undefined,
|
||||||
|
end?: number | undefined,
|
||||||
|
type?: FindPredicate | string,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): Found<TState> | undefined
|
||||||
|
|
||||||
|
/**
|
||||||
|
* like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position.
|
||||||
|
* @param node
|
||||||
|
* @param start
|
||||||
|
* @param type
|
||||||
|
* @param base
|
||||||
|
* @param state
|
||||||
|
*/
|
||||||
|
export function findNodeAround<TState>(
|
||||||
|
node: acorn.Node,
|
||||||
|
start: number | undefined,
|
||||||
|
type?: FindPredicate | string,
|
||||||
|
base?: RecursiveVisitors<TState>,
|
||||||
|
state?: TState
|
||||||
|
): Found<TState> | undefined
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the outermost matching node after a given position.
|
||||||
|
*/
|
||||||
|
export const findNodeAfter: typeof findNodeAround
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the outermost matching node before a given position.
|
||||||
|
*/
|
||||||
|
export const findNodeBefore: typeof findNodeAround
|
||||||
|
|
||||||
|
export const base: RecursiveVisitors<any>
|
461
node_modules/acorn-walk/dist/walk.js
generated
vendored
Normal file
461
node_modules/acorn-walk/dist/walk.js
generated
vendored
Normal file
@ -0,0 +1,461 @@
|
|||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory((global.acorn = global.acorn || {}, global.acorn.walk = {})));
|
||||||
|
})(this, (function (exports) { 'use strict';
|
||||||
|
|
||||||
|
// AST walker module for ESTree compatible trees
|
||||||
|
|
||||||
|
// A simple walk is one where you simply specify callbacks to be
|
||||||
|
// called on specific nodes. The last two arguments are optional. A
|
||||||
|
// simple use would be
|
||||||
|
//
|
||||||
|
// walk.simple(myTree, {
|
||||||
|
// Expression: function(node) { ... }
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// to do something with all expressions. All ESTree node types
|
||||||
|
// can be used to identify node types, as well as Expression and
|
||||||
|
// Statement, which denote categories of nodes.
|
||||||
|
//
|
||||||
|
// The base argument can be used to pass a custom (recursive)
|
||||||
|
// walker, and state can be used to give this walked an initial
|
||||||
|
// state.
|
||||||
|
|
||||||
|
function simple(node, visitors, baseVisitor, state, override) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base
|
||||||
|
; }(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (visitors[type]) { visitors[type](node, st); }
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
// An ancestor walk keeps an array of ancestor nodes (including the
|
||||||
|
// current node) and passes them to the callback as third parameter
|
||||||
|
// (and also as state parameter when no other state is present).
|
||||||
|
function ancestor(node, visitors, baseVisitor, state, override) {
|
||||||
|
var ancestors = [];
|
||||||
|
if (!baseVisitor) { baseVisitor = base
|
||||||
|
; }(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
var isNew = node !== ancestors[ancestors.length - 1];
|
||||||
|
if (isNew) { ancestors.push(node); }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (visitors[type]) { visitors[type](node, st || ancestors, ancestors); }
|
||||||
|
if (isNew) { ancestors.pop(); }
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
// A recursive walk is one where your functions override the default
|
||||||
|
// walkers. They can modify and replace the state parameter that's
|
||||||
|
// threaded through the walk, and can opt how and whether to walk
|
||||||
|
// their child nodes (by calling their third argument on these
|
||||||
|
// nodes).
|
||||||
|
function recursive(node, state, funcs, baseVisitor, override) {
|
||||||
|
var visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
visitor[override || node.type](node, st, c);
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeTest(test) {
|
||||||
|
if (typeof test === "string")
|
||||||
|
{ return function (type) { return type === test; } }
|
||||||
|
else if (!test)
|
||||||
|
{ return function () { return true; } }
|
||||||
|
else
|
||||||
|
{ return test }
|
||||||
|
}
|
||||||
|
|
||||||
|
var Found = function Found(node, state) { this.node = node; this.state = state; };
|
||||||
|
|
||||||
|
// A full walk triggers the callback on each node
|
||||||
|
function full(node, callback, baseVisitor, state, override) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
var last
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (last !== node) {
|
||||||
|
callback(node, st, type);
|
||||||
|
last = node;
|
||||||
|
}
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
// An fullAncestor walk is like an ancestor walk, but triggers
|
||||||
|
// the callback on each node
|
||||||
|
function fullAncestor(node, callback, baseVisitor, state) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
var ancestors = [], last
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
var isNew = node !== ancestors[ancestors.length - 1];
|
||||||
|
if (isNew) { ancestors.push(node); }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (last !== node) {
|
||||||
|
callback(node, st || ancestors, ancestors, type);
|
||||||
|
last = node;
|
||||||
|
}
|
||||||
|
if (isNew) { ancestors.pop(); }
|
||||||
|
})(node, state);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find a node with a given start, end, and type (all are optional,
|
||||||
|
// null can be used as wildcard). Returns a {node, state} object, or
|
||||||
|
// undefined when it doesn't find a matching node.
|
||||||
|
function findNodeAt(node, start, end, test, baseVisitor, state) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
test = makeTest(test);
|
||||||
|
try {
|
||||||
|
(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
if ((start == null || node.start <= start) &&
|
||||||
|
(end == null || node.end >= end))
|
||||||
|
{ baseVisitor[type](node, st, c); }
|
||||||
|
if ((start == null || node.start === start) &&
|
||||||
|
(end == null || node.end === end) &&
|
||||||
|
test(type, node))
|
||||||
|
{ throw new Found(node, st) }
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) { return e }
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the innermost node of a given type that contains the given
|
||||||
|
// position. Interface similar to findNodeAt.
|
||||||
|
function findNodeAround(node, pos, test, baseVisitor, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
try {
|
||||||
|
(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.start > pos || node.end < pos) { return }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (test(type, node)) { throw new Found(node, st) }
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) { return e }
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node after a given position.
|
||||||
|
function findNodeAfter(node, pos, test, baseVisitor, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
try {
|
||||||
|
(function c(node, st, override) {
|
||||||
|
if (node.end < pos) { return }
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.start >= pos && test(type, node)) { throw new Found(node, st) }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) { return e }
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node before a given position.
|
||||||
|
function findNodeBefore(node, pos, test, baseVisitor, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
var max
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
if (node.start > pos) { return }
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))
|
||||||
|
{ max = new Found(node, st); }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
})(node, state);
|
||||||
|
return max
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used to create a custom walker. Will fill in all missing node
|
||||||
|
// type properties with the defaults.
|
||||||
|
function make(funcs, baseVisitor) {
|
||||||
|
var visitor = Object.create(baseVisitor || base);
|
||||||
|
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||||
|
return visitor
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipThrough(node, st, c) { c(node, st); }
|
||||||
|
function ignore(_node, _st, _c) {}
|
||||||
|
|
||||||
|
// Node walkers.
|
||||||
|
|
||||||
|
var base = {};
|
||||||
|
|
||||||
|
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var stmt = list[i];
|
||||||
|
|
||||||
|
c(stmt, st, "Statement");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.Statement = skipThrough;
|
||||||
|
base.EmptyStatement = ignore;
|
||||||
|
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
|
||||||
|
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||||
|
base.IfStatement = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.consequent, st, "Statement");
|
||||||
|
if (node.alternate) { c(node.alternate, st, "Statement"); }
|
||||||
|
};
|
||||||
|
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
|
||||||
|
base.BreakStatement = base.ContinueStatement = ignore;
|
||||||
|
base.WithStatement = function (node, st, c) {
|
||||||
|
c(node.object, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.SwitchStatement = function (node, st, c) {
|
||||||
|
c(node.discriminant, st, "Expression");
|
||||||
|
for (var i$1 = 0, list$1 = node.cases; i$1 < list$1.length; i$1 += 1) {
|
||||||
|
var cs = list$1[i$1];
|
||||||
|
|
||||||
|
if (cs.test) { c(cs.test, st, "Expression"); }
|
||||||
|
for (var i = 0, list = cs.consequent; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var cons = list[i];
|
||||||
|
|
||||||
|
c(cons, st, "Statement");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.SwitchCase = function (node, st, c) {
|
||||||
|
if (node.test) { c(node.test, st, "Expression"); }
|
||||||
|
for (var i = 0, list = node.consequent; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var cons = list[i];
|
||||||
|
|
||||||
|
c(cons, st, "Statement");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
|
||||||
|
if (node.argument) { c(node.argument, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.ThrowStatement = base.SpreadElement =
|
||||||
|
function (node, st, c) { return c(node.argument, st, "Expression"); };
|
||||||
|
base.TryStatement = function (node, st, c) {
|
||||||
|
c(node.block, st, "Statement");
|
||||||
|
if (node.handler) { c(node.handler, st); }
|
||||||
|
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
|
||||||
|
};
|
||||||
|
base.CatchClause = function (node, st, c) {
|
||||||
|
if (node.param) { c(node.param, st, "Pattern"); }
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForStatement = function (node, st, c) {
|
||||||
|
if (node.init) { c(node.init, st, "ForInit"); }
|
||||||
|
if (node.test) { c(node.test, st, "Expression"); }
|
||||||
|
if (node.update) { c(node.update, st, "Expression"); }
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
||||||
|
c(node.left, st, "ForInit");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForInit = function (node, st, c) {
|
||||||
|
if (node.type === "VariableDeclaration") { c(node, st); }
|
||||||
|
else { c(node, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.DebuggerStatement = ignore;
|
||||||
|
|
||||||
|
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
|
||||||
|
base.VariableDeclaration = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.declarations; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var decl = list[i];
|
||||||
|
|
||||||
|
c(decl, st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.VariableDeclarator = function (node, st, c) {
|
||||||
|
c(node.id, st, "Pattern");
|
||||||
|
if (node.init) { c(node.init, st, "Expression"); }
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Function = function (node, st, c) {
|
||||||
|
if (node.id) { c(node.id, st, "Pattern"); }
|
||||||
|
for (var i = 0, list = node.params; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var param = list[i];
|
||||||
|
|
||||||
|
c(param, st, "Pattern");
|
||||||
|
}
|
||||||
|
c(node.body, st, node.expression ? "Expression" : "Statement");
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Pattern = function (node, st, c) {
|
||||||
|
if (node.type === "Identifier")
|
||||||
|
{ c(node, st, "VariablePattern"); }
|
||||||
|
else if (node.type === "MemberExpression")
|
||||||
|
{ c(node, st, "MemberPattern"); }
|
||||||
|
else
|
||||||
|
{ c(node, st); }
|
||||||
|
};
|
||||||
|
base.VariablePattern = ignore;
|
||||||
|
base.MemberPattern = skipThrough;
|
||||||
|
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
|
||||||
|
base.ArrayPattern = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||||
|
var elt = list[i];
|
||||||
|
|
||||||
|
if (elt) { c(elt, st, "Pattern"); }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ObjectPattern = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.properties; i < list.length; i += 1) {
|
||||||
|
var prop = list[i];
|
||||||
|
|
||||||
|
if (prop.type === "Property") {
|
||||||
|
if (prop.computed) { c(prop.key, st, "Expression"); }
|
||||||
|
c(prop.value, st, "Pattern");
|
||||||
|
} else if (prop.type === "RestElement") {
|
||||||
|
c(prop.argument, st, "Pattern");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Expression = skipThrough;
|
||||||
|
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
||||||
|
base.ArrayExpression = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||||
|
var elt = list[i];
|
||||||
|
|
||||||
|
if (elt) { c(elt, st, "Expression"); }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ObjectExpression = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.properties; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var prop = list[i];
|
||||||
|
|
||||||
|
c(prop, st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
||||||
|
base.SequenceExpression = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.expressions; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var expr = list[i];
|
||||||
|
|
||||||
|
c(expr, st, "Expression");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.TemplateLiteral = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.quasis; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var quasi = list[i];
|
||||||
|
|
||||||
|
c(quasi, st);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
|
||||||
|
{
|
||||||
|
var expr = list$1[i$1];
|
||||||
|
|
||||||
|
c(expr, st, "Expression");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.TemplateElement = ignore;
|
||||||
|
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
||||||
|
c(node.argument, st, "Expression");
|
||||||
|
};
|
||||||
|
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
||||||
|
c(node.left, st, "Expression");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
};
|
||||||
|
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
||||||
|
c(node.left, st, "Pattern");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ConditionalExpression = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.consequent, st, "Expression");
|
||||||
|
c(node.alternate, st, "Expression");
|
||||||
|
};
|
||||||
|
base.NewExpression = base.CallExpression = function (node, st, c) {
|
||||||
|
c(node.callee, st, "Expression");
|
||||||
|
if (node.arguments)
|
||||||
|
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var arg = list[i];
|
||||||
|
|
||||||
|
c(arg, st, "Expression");
|
||||||
|
} }
|
||||||
|
};
|
||||||
|
base.MemberExpression = function (node, st, c) {
|
||||||
|
c(node.object, st, "Expression");
|
||||||
|
if (node.computed) { c(node.property, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
||||||
|
if (node.declaration)
|
||||||
|
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
|
||||||
|
if (node.source) { c(node.source, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.ExportAllDeclaration = function (node, st, c) {
|
||||||
|
if (node.exported)
|
||||||
|
{ c(node.exported, st); }
|
||||||
|
c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportDeclaration = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var spec = list[i];
|
||||||
|
|
||||||
|
c(spec, st);
|
||||||
|
}
|
||||||
|
c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportExpression = function (node, st, c) {
|
||||||
|
c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
|
||||||
|
|
||||||
|
base.TaggedTemplateExpression = function (node, st, c) {
|
||||||
|
c(node.tag, st, "Expression");
|
||||||
|
c(node.quasi, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
|
||||||
|
base.Class = function (node, st, c) {
|
||||||
|
if (node.id) { c(node.id, st, "Pattern"); }
|
||||||
|
if (node.superClass) { c(node.superClass, st, "Expression"); }
|
||||||
|
c(node.body, st);
|
||||||
|
};
|
||||||
|
base.ClassBody = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var elt = list[i];
|
||||||
|
|
||||||
|
c(elt, st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
|
||||||
|
if (node.computed) { c(node.key, st, "Expression"); }
|
||||||
|
if (node.value) { c(node.value, st, "Expression"); }
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.ancestor = ancestor;
|
||||||
|
exports.base = base;
|
||||||
|
exports.findNodeAfter = findNodeAfter;
|
||||||
|
exports.findNodeAround = findNodeAround;
|
||||||
|
exports.findNodeAt = findNodeAt;
|
||||||
|
exports.findNodeBefore = findNodeBefore;
|
||||||
|
exports.full = full;
|
||||||
|
exports.fullAncestor = fullAncestor;
|
||||||
|
exports.make = make;
|
||||||
|
exports.recursive = recursive;
|
||||||
|
exports.simple = simple;
|
||||||
|
|
||||||
|
}));
|
443
node_modules/acorn-walk/dist/walk.mjs
generated
vendored
Normal file
443
node_modules/acorn-walk/dist/walk.mjs
generated
vendored
Normal file
@ -0,0 +1,443 @@
|
|||||||
|
// AST walker module for ESTree compatible trees
|
||||||
|
|
||||||
|
// A simple walk is one where you simply specify callbacks to be
|
||||||
|
// called on specific nodes. The last two arguments are optional. A
|
||||||
|
// simple use would be
|
||||||
|
//
|
||||||
|
// walk.simple(myTree, {
|
||||||
|
// Expression: function(node) { ... }
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// to do something with all expressions. All ESTree node types
|
||||||
|
// can be used to identify node types, as well as Expression and
|
||||||
|
// Statement, which denote categories of nodes.
|
||||||
|
//
|
||||||
|
// The base argument can be used to pass a custom (recursive)
|
||||||
|
// walker, and state can be used to give this walked an initial
|
||||||
|
// state.
|
||||||
|
|
||||||
|
function simple(node, visitors, baseVisitor, state, override) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base
|
||||||
|
; }(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (visitors[type]) { visitors[type](node, st); }
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
// An ancestor walk keeps an array of ancestor nodes (including the
|
||||||
|
// current node) and passes them to the callback as third parameter
|
||||||
|
// (and also as state parameter when no other state is present).
|
||||||
|
function ancestor(node, visitors, baseVisitor, state, override) {
|
||||||
|
var ancestors = [];
|
||||||
|
if (!baseVisitor) { baseVisitor = base
|
||||||
|
; }(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
var isNew = node !== ancestors[ancestors.length - 1];
|
||||||
|
if (isNew) { ancestors.push(node); }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (visitors[type]) { visitors[type](node, st || ancestors, ancestors); }
|
||||||
|
if (isNew) { ancestors.pop(); }
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
// A recursive walk is one where your functions override the default
|
||||||
|
// walkers. They can modify and replace the state parameter that's
|
||||||
|
// threaded through the walk, and can opt how and whether to walk
|
||||||
|
// their child nodes (by calling their third argument on these
|
||||||
|
// nodes).
|
||||||
|
function recursive(node, state, funcs, baseVisitor, override) {
|
||||||
|
var visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
visitor[override || node.type](node, st, c);
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeTest(test) {
|
||||||
|
if (typeof test === "string")
|
||||||
|
{ return function (type) { return type === test; } }
|
||||||
|
else if (!test)
|
||||||
|
{ return function () { return true; } }
|
||||||
|
else
|
||||||
|
{ return test }
|
||||||
|
}
|
||||||
|
|
||||||
|
var Found = function Found(node, state) { this.node = node; this.state = state; };
|
||||||
|
|
||||||
|
// A full walk triggers the callback on each node
|
||||||
|
function full(node, callback, baseVisitor, state, override) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
var last
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (last !== node) {
|
||||||
|
callback(node, st, type);
|
||||||
|
last = node;
|
||||||
|
}
|
||||||
|
})(node, state, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
// An fullAncestor walk is like an ancestor walk, but triggers
|
||||||
|
// the callback on each node
|
||||||
|
function fullAncestor(node, callback, baseVisitor, state) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
var ancestors = [], last
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
var isNew = node !== ancestors[ancestors.length - 1];
|
||||||
|
if (isNew) { ancestors.push(node); }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (last !== node) {
|
||||||
|
callback(node, st || ancestors, ancestors, type);
|
||||||
|
last = node;
|
||||||
|
}
|
||||||
|
if (isNew) { ancestors.pop(); }
|
||||||
|
})(node, state);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find a node with a given start, end, and type (all are optional,
|
||||||
|
// null can be used as wildcard). Returns a {node, state} object, or
|
||||||
|
// undefined when it doesn't find a matching node.
|
||||||
|
function findNodeAt(node, start, end, test, baseVisitor, state) {
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
test = makeTest(test);
|
||||||
|
try {
|
||||||
|
(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
if ((start == null || node.start <= start) &&
|
||||||
|
(end == null || node.end >= end))
|
||||||
|
{ baseVisitor[type](node, st, c); }
|
||||||
|
if ((start == null || node.start === start) &&
|
||||||
|
(end == null || node.end === end) &&
|
||||||
|
test(type, node))
|
||||||
|
{ throw new Found(node, st) }
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) { return e }
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the innermost node of a given type that contains the given
|
||||||
|
// position. Interface similar to findNodeAt.
|
||||||
|
function findNodeAround(node, pos, test, baseVisitor, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
try {
|
||||||
|
(function c(node, st, override) {
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.start > pos || node.end < pos) { return }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
if (test(type, node)) { throw new Found(node, st) }
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) { return e }
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node after a given position.
|
||||||
|
function findNodeAfter(node, pos, test, baseVisitor, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
try {
|
||||||
|
(function c(node, st, override) {
|
||||||
|
if (node.end < pos) { return }
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.start >= pos && test(type, node)) { throw new Found(node, st) }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
})(node, state);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Found) { return e }
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the outermost matching node before a given position.
|
||||||
|
function findNodeBefore(node, pos, test, baseVisitor, state) {
|
||||||
|
test = makeTest(test);
|
||||||
|
if (!baseVisitor) { baseVisitor = base; }
|
||||||
|
var max
|
||||||
|
;(function c(node, st, override) {
|
||||||
|
if (node.start > pos) { return }
|
||||||
|
var type = override || node.type;
|
||||||
|
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))
|
||||||
|
{ max = new Found(node, st); }
|
||||||
|
baseVisitor[type](node, st, c);
|
||||||
|
})(node, state);
|
||||||
|
return max
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used to create a custom walker. Will fill in all missing node
|
||||||
|
// type properties with the defaults.
|
||||||
|
function make(funcs, baseVisitor) {
|
||||||
|
var visitor = Object.create(baseVisitor || base);
|
||||||
|
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||||
|
return visitor
|
||||||
|
}
|
||||||
|
|
||||||
|
function skipThrough(node, st, c) { c(node, st); }
|
||||||
|
function ignore(_node, _st, _c) {}
|
||||||
|
|
||||||
|
// Node walkers.
|
||||||
|
|
||||||
|
var base = {};
|
||||||
|
|
||||||
|
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var stmt = list[i];
|
||||||
|
|
||||||
|
c(stmt, st, "Statement");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.Statement = skipThrough;
|
||||||
|
base.EmptyStatement = ignore;
|
||||||
|
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
|
||||||
|
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||||
|
base.IfStatement = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.consequent, st, "Statement");
|
||||||
|
if (node.alternate) { c(node.alternate, st, "Statement"); }
|
||||||
|
};
|
||||||
|
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
|
||||||
|
base.BreakStatement = base.ContinueStatement = ignore;
|
||||||
|
base.WithStatement = function (node, st, c) {
|
||||||
|
c(node.object, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.SwitchStatement = function (node, st, c) {
|
||||||
|
c(node.discriminant, st, "Expression");
|
||||||
|
for (var i$1 = 0, list$1 = node.cases; i$1 < list$1.length; i$1 += 1) {
|
||||||
|
var cs = list$1[i$1];
|
||||||
|
|
||||||
|
if (cs.test) { c(cs.test, st, "Expression"); }
|
||||||
|
for (var i = 0, list = cs.consequent; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var cons = list[i];
|
||||||
|
|
||||||
|
c(cons, st, "Statement");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.SwitchCase = function (node, st, c) {
|
||||||
|
if (node.test) { c(node.test, st, "Expression"); }
|
||||||
|
for (var i = 0, list = node.consequent; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var cons = list[i];
|
||||||
|
|
||||||
|
c(cons, st, "Statement");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
|
||||||
|
if (node.argument) { c(node.argument, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.ThrowStatement = base.SpreadElement =
|
||||||
|
function (node, st, c) { return c(node.argument, st, "Expression"); };
|
||||||
|
base.TryStatement = function (node, st, c) {
|
||||||
|
c(node.block, st, "Statement");
|
||||||
|
if (node.handler) { c(node.handler, st); }
|
||||||
|
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
|
||||||
|
};
|
||||||
|
base.CatchClause = function (node, st, c) {
|
||||||
|
if (node.param) { c(node.param, st, "Pattern"); }
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForStatement = function (node, st, c) {
|
||||||
|
if (node.init) { c(node.init, st, "ForInit"); }
|
||||||
|
if (node.test) { c(node.test, st, "Expression"); }
|
||||||
|
if (node.update) { c(node.update, st, "Expression"); }
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
||||||
|
c(node.left, st, "ForInit");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
c(node.body, st, "Statement");
|
||||||
|
};
|
||||||
|
base.ForInit = function (node, st, c) {
|
||||||
|
if (node.type === "VariableDeclaration") { c(node, st); }
|
||||||
|
else { c(node, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.DebuggerStatement = ignore;
|
||||||
|
|
||||||
|
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
|
||||||
|
base.VariableDeclaration = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.declarations; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var decl = list[i];
|
||||||
|
|
||||||
|
c(decl, st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.VariableDeclarator = function (node, st, c) {
|
||||||
|
c(node.id, st, "Pattern");
|
||||||
|
if (node.init) { c(node.init, st, "Expression"); }
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Function = function (node, st, c) {
|
||||||
|
if (node.id) { c(node.id, st, "Pattern"); }
|
||||||
|
for (var i = 0, list = node.params; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var param = list[i];
|
||||||
|
|
||||||
|
c(param, st, "Pattern");
|
||||||
|
}
|
||||||
|
c(node.body, st, node.expression ? "Expression" : "Statement");
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Pattern = function (node, st, c) {
|
||||||
|
if (node.type === "Identifier")
|
||||||
|
{ c(node, st, "VariablePattern"); }
|
||||||
|
else if (node.type === "MemberExpression")
|
||||||
|
{ c(node, st, "MemberPattern"); }
|
||||||
|
else
|
||||||
|
{ c(node, st); }
|
||||||
|
};
|
||||||
|
base.VariablePattern = ignore;
|
||||||
|
base.MemberPattern = skipThrough;
|
||||||
|
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
|
||||||
|
base.ArrayPattern = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||||
|
var elt = list[i];
|
||||||
|
|
||||||
|
if (elt) { c(elt, st, "Pattern"); }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ObjectPattern = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.properties; i < list.length; i += 1) {
|
||||||
|
var prop = list[i];
|
||||||
|
|
||||||
|
if (prop.type === "Property") {
|
||||||
|
if (prop.computed) { c(prop.key, st, "Expression"); }
|
||||||
|
c(prop.value, st, "Pattern");
|
||||||
|
} else if (prop.type === "RestElement") {
|
||||||
|
c(prop.argument, st, "Pattern");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
base.Expression = skipThrough;
|
||||||
|
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
||||||
|
base.ArrayExpression = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||||
|
var elt = list[i];
|
||||||
|
|
||||||
|
if (elt) { c(elt, st, "Expression"); }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.ObjectExpression = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.properties; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var prop = list[i];
|
||||||
|
|
||||||
|
c(prop, st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
||||||
|
base.SequenceExpression = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.expressions; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var expr = list[i];
|
||||||
|
|
||||||
|
c(expr, st, "Expression");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.TemplateLiteral = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.quasis; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var quasi = list[i];
|
||||||
|
|
||||||
|
c(quasi, st);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
|
||||||
|
{
|
||||||
|
var expr = list$1[i$1];
|
||||||
|
|
||||||
|
c(expr, st, "Expression");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.TemplateElement = ignore;
|
||||||
|
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
||||||
|
c(node.argument, st, "Expression");
|
||||||
|
};
|
||||||
|
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
||||||
|
c(node.left, st, "Expression");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
};
|
||||||
|
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
||||||
|
c(node.left, st, "Pattern");
|
||||||
|
c(node.right, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ConditionalExpression = function (node, st, c) {
|
||||||
|
c(node.test, st, "Expression");
|
||||||
|
c(node.consequent, st, "Expression");
|
||||||
|
c(node.alternate, st, "Expression");
|
||||||
|
};
|
||||||
|
base.NewExpression = base.CallExpression = function (node, st, c) {
|
||||||
|
c(node.callee, st, "Expression");
|
||||||
|
if (node.arguments)
|
||||||
|
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var arg = list[i];
|
||||||
|
|
||||||
|
c(arg, st, "Expression");
|
||||||
|
} }
|
||||||
|
};
|
||||||
|
base.MemberExpression = function (node, st, c) {
|
||||||
|
c(node.object, st, "Expression");
|
||||||
|
if (node.computed) { c(node.property, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
||||||
|
if (node.declaration)
|
||||||
|
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
|
||||||
|
if (node.source) { c(node.source, st, "Expression"); }
|
||||||
|
};
|
||||||
|
base.ExportAllDeclaration = function (node, st, c) {
|
||||||
|
if (node.exported)
|
||||||
|
{ c(node.exported, st); }
|
||||||
|
c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportDeclaration = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var spec = list[i];
|
||||||
|
|
||||||
|
c(spec, st);
|
||||||
|
}
|
||||||
|
c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportExpression = function (node, st, c) {
|
||||||
|
c(node.source, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
|
||||||
|
|
||||||
|
base.TaggedTemplateExpression = function (node, st, c) {
|
||||||
|
c(node.tag, st, "Expression");
|
||||||
|
c(node.quasi, st, "Expression");
|
||||||
|
};
|
||||||
|
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
|
||||||
|
base.Class = function (node, st, c) {
|
||||||
|
if (node.id) { c(node.id, st, "Pattern"); }
|
||||||
|
if (node.superClass) { c(node.superClass, st, "Expression"); }
|
||||||
|
c(node.body, st);
|
||||||
|
};
|
||||||
|
base.ClassBody = function (node, st, c) {
|
||||||
|
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||||
|
{
|
||||||
|
var elt = list[i];
|
||||||
|
|
||||||
|
c(elt, st);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
|
||||||
|
if (node.computed) { c(node.key, st, "Expression"); }
|
||||||
|
if (node.value) { c(node.value, st, "Expression"); }
|
||||||
|
};
|
||||||
|
|
||||||
|
export { ancestor, base, findNodeAfter, findNodeAround, findNodeAt, findNodeBefore, full, fullAncestor, make, recursive, simple };
|
47
node_modules/acorn-walk/package.json
generated
vendored
Normal file
47
node_modules/acorn-walk/package.json
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
{
|
||||||
|
"name": "acorn-walk",
|
||||||
|
"description": "ECMAScript (ESTree) AST walker",
|
||||||
|
"homepage": "https://github.com/acornjs/acorn",
|
||||||
|
"main": "dist/walk.js",
|
||||||
|
"types": "dist/walk.d.ts",
|
||||||
|
"module": "dist/walk.mjs",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"import": "./dist/walk.mjs",
|
||||||
|
"require": "./dist/walk.js",
|
||||||
|
"default": "./dist/walk.js"
|
||||||
|
},
|
||||||
|
"./dist/walk.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"version": "8.3.2",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.4.0"
|
||||||
|
},
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "Marijn Haverbeke",
|
||||||
|
"email": "marijnh@gmail.com",
|
||||||
|
"web": "https://marijnhaverbeke.nl"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Ingvar Stepanyan",
|
||||||
|
"email": "me@rreverser.com",
|
||||||
|
"web": "https://rreverser.com/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Adrian Heine",
|
||||||
|
"web": "http://adrianheine.de"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/acornjs/acorn.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prepare": "cd ..; npm run build:walk"
|
||||||
|
},
|
||||||
|
"license": "MIT"
|
||||||
|
}
|
880
node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
880
node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,880 @@
|
|||||||
|
## 8.11.3 (2023-12-29)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Add `Function` and `Class` to the `AggregateType` type, so that they can be used in walkers without raising a type error.
|
||||||
|
|
||||||
|
Make sure `onToken` get an `import` keyword token when parsing `import.meta`.
|
||||||
|
|
||||||
|
Fix a bug where `.loc.start` could be undefined for `new.target` `meta` nodes.
|
||||||
|
|
||||||
|
## 8.11.2 (2023-10-27)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug that caused regular expressions after colon tokens to not be properly tokenized in some circumstances.
|
||||||
|
|
||||||
|
## 8.11.1 (2023-10-26)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a regression where `onToken` would receive 'name' tokens for 'new' keyword tokens.
|
||||||
|
|
||||||
|
## 8.11.0 (2023-10-26)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix an issue where tokenizing (without parsing) an object literal with a property named `class` or `function` could, in some circumstance, put the tokenizer into an invalid state.
|
||||||
|
|
||||||
|
Fix an issue where a slash after a call to a propery named the same as some keywords would be tokenized as a regular expression.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Upgrade to Unicode 15.1.
|
||||||
|
|
||||||
|
Use a set of new, much more precise, TypeScript types.
|
||||||
|
|
||||||
|
## 8.10.0 (2023-07-05)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add a `checkPrivateFields` option that disables strict checking of private property use.
|
||||||
|
|
||||||
|
## 8.9.0 (2023-06-16)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Forbid dynamic import after `new`, even when part of a member expression.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add Unicode properties for ES2023.
|
||||||
|
|
||||||
|
Add support for the `v` flag to regular expressions.
|
||||||
|
|
||||||
|
## 8.8.2 (2023-01-23)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug that caused `allowHashBang` to be set to false when not provided, even with `ecmaVersion >= 14`.
|
||||||
|
|
||||||
|
Fix an exception when passing no option object to `parse` or `new Parser`.
|
||||||
|
|
||||||
|
Fix incorrect parse error on `if (0) let\n[astral identifier char]`.
|
||||||
|
|
||||||
|
## 8.8.1 (2022-10-24)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Make type for `Comment` compatible with estree types.
|
||||||
|
|
||||||
|
## 8.8.0 (2022-07-21)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Allow parentheses around spread args in destructuring object assignment.
|
||||||
|
|
||||||
|
Fix an issue where the tree contained `directive` properties in when parsing with a language version that doesn't support them.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support hashbang comments by default in ECMAScript 2023 and later.
|
||||||
|
|
||||||
|
## 8.7.1 (2021-04-26)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Stop handling `"use strict"` directives in ECMAScript versions before 5.
|
||||||
|
|
||||||
|
Fix an issue where duplicate quoted export names in `export *` syntax were incorrectly checked.
|
||||||
|
|
||||||
|
Add missing type for `tokTypes`.
|
||||||
|
|
||||||
|
## 8.7.0 (2021-12-27)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support quoted export names.
|
||||||
|
|
||||||
|
Upgrade to Unicode 14.
|
||||||
|
|
||||||
|
Add support for Unicode 13 properties in regular expressions.
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Use a loop to find line breaks, because the existing regexp search would overrun the end of the searched range and waste a lot of time in minified code.
|
||||||
|
|
||||||
|
## 8.6.0 (2021-11-18)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug where an object literal with multiple `__proto__` properties would incorrectly be accepted if a later property value held an assigment.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support class private fields with the `in` operator.
|
||||||
|
|
||||||
|
## 8.5.0 (2021-09-06)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Improve context-dependent tokenization in a number of corner cases.
|
||||||
|
|
||||||
|
Fix location tracking after a 0x2028 or 0x2029 character in a string literal (which before did not increase the line number).
|
||||||
|
|
||||||
|
Fix an issue where arrow function bodies in for loop context would inappropriately consume `in` operators.
|
||||||
|
|
||||||
|
Fix wrong end locations stored on SequenceExpression nodes.
|
||||||
|
|
||||||
|
Implement restriction that `for`/`of` loop LHS can't start with `let`.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for ES2022 class static blocks.
|
||||||
|
|
||||||
|
Allow multiple input files to be passed to the CLI tool.
|
||||||
|
|
||||||
|
## 8.4.1 (2021-06-24)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug where `allowAwaitOutsideFunction` would allow `await` in class field initializers, and setting `ecmaVersion` to 13 or higher would allow top-level await in non-module sources.
|
||||||
|
|
||||||
|
## 8.4.0 (2021-06-11)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
A new option, `allowSuperOutsideMethod`, can be used to suppress the error when `super` is used in the wrong context.
|
||||||
|
|
||||||
|
## 8.3.0 (2021-05-31)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Default `allowAwaitOutsideFunction` to true for ECMAScript 2022 an higher.
|
||||||
|
|
||||||
|
Add support for the `d` ([indices](https://github.com/tc39/proposal-regexp-match-indices)) regexp flag.
|
||||||
|
|
||||||
|
## 8.2.4 (2021-05-04)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix spec conformity in corner case 'for await (async of ...)'.
|
||||||
|
|
||||||
|
## 8.2.3 (2021-05-04)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix an issue where the library couldn't parse 'for (async of ...)'.
|
||||||
|
|
||||||
|
Fix a bug in UTF-16 decoding that would read characters incorrectly in some circumstances.
|
||||||
|
|
||||||
|
## 8.2.2 (2021-04-29)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug where a class field initialized to an async arrow function wouldn't allow await inside it. Same issue existed for generator arrow functions with yield.
|
||||||
|
|
||||||
|
## 8.2.1 (2021-04-24)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a regression introduced in 8.2.0 where static or async class methods with keyword names fail to parse.
|
||||||
|
|
||||||
|
## 8.2.0 (2021-04-24)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for ES2022 class fields and private methods.
|
||||||
|
|
||||||
|
## 8.1.1 (2021-04-12)
|
||||||
|
|
||||||
|
### Various
|
||||||
|
|
||||||
|
Stop shipping source maps in the NPM package.
|
||||||
|
|
||||||
|
## 8.1.0 (2021-03-09)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a spurious error in nested destructuring arrays.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Expose `allowAwaitOutsideFunction` in CLI interface.
|
||||||
|
|
||||||
|
Make `allowImportExportAnywhere` also apply to `import.meta`.
|
||||||
|
|
||||||
|
## 8.0.5 (2021-01-25)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Adjust package.json to work with Node 12.16.0 and 13.0-13.6.
|
||||||
|
|
||||||
|
## 8.0.4 (2020-10-05)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Make `await x ** y` an error, following the spec.
|
||||||
|
|
||||||
|
Fix potentially exponential regular expression.
|
||||||
|
|
||||||
|
## 8.0.3 (2020-10-02)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a wasteful loop during `Parser` creation when setting `ecmaVersion` to `"latest"`.
|
||||||
|
|
||||||
|
## 8.0.2 (2020-09-30)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Make the TypeScript types reflect the current allowed values for `ecmaVersion`.
|
||||||
|
|
||||||
|
Fix another regexp/division tokenizer issue.
|
||||||
|
|
||||||
|
## 8.0.1 (2020-08-12)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Provide the correct value in the `version` export.
|
||||||
|
|
||||||
|
## 8.0.0 (2020-08-12)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Disallow expressions like `(a = b) = c`.
|
||||||
|
|
||||||
|
Make non-octal escape sequences a syntax error in strict mode.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
The package can now be loaded directly as an ECMAScript module in node 13+.
|
||||||
|
|
||||||
|
Update to the set of Unicode properties from ES2021.
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
The `ecmaVersion` option is now required. For the moment, omitting it will still work with a warning, but that will change in a future release.
|
||||||
|
|
||||||
|
Some changes to method signatures that may be used by plugins.
|
||||||
|
|
||||||
|
## 7.4.0 (2020-08-03)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for logical assignment operators.
|
||||||
|
|
||||||
|
Add support for numeric separators.
|
||||||
|
|
||||||
|
## 7.3.1 (2020-06-11)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Make the string in the `version` export match the actual library version.
|
||||||
|
|
||||||
|
## 7.3.0 (2020-06-11)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a bug that caused parsing of object patterns with a property named `set` that had a default value to fail.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for optional chaining (`?.`).
|
||||||
|
|
||||||
|
## 7.2.0 (2020-05-09)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix precedence issue in parsing of async arrow functions.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for nullish coalescing.
|
||||||
|
|
||||||
|
Add support for `import.meta`.
|
||||||
|
|
||||||
|
Support `export * as ...` syntax.
|
||||||
|
|
||||||
|
Upgrade to Unicode 13.
|
||||||
|
|
||||||
|
## 6.4.1 (2020-03-09)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||||
|
|
||||||
|
## 7.1.1 (2020-03-01)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Treat `\8` and `\9` as invalid escapes in template strings.
|
||||||
|
|
||||||
|
Allow unicode escapes in property names that are keywords.
|
||||||
|
|
||||||
|
Don't error on an exponential operator expression as argument to `await`.
|
||||||
|
|
||||||
|
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||||
|
|
||||||
|
## 7.1.0 (2019-09-24)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Disallow trailing object literal commas when ecmaVersion is less than 5.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add a static `acorn` property to the `Parser` class that contains the entire module interface, to allow plugins to access the instance of the library that they are acting on.
|
||||||
|
|
||||||
|
## 7.0.0 (2019-08-13)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
Changes the node format for dynamic imports to use the `ImportExpression` node type, as defined in [ESTree](https://github.com/estree/estree/blob/master/es2020.md#importexpression).
|
||||||
|
|
||||||
|
Makes 10 (ES2019) the default value for the `ecmaVersion` option.
|
||||||
|
|
||||||
|
## 6.3.0 (2019-08-12)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
`sourceType: "module"` can now be used even when `ecmaVersion` is less than 6, to parse module-style code that otherwise conforms to an older standard.
|
||||||
|
|
||||||
|
## 6.2.1 (2019-07-21)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix bug causing Acorn to treat some characters as identifier characters that shouldn't be treated as such.
|
||||||
|
|
||||||
|
Fix issue where setting the `allowReserved` option to `"never"` allowed reserved words in some circumstances.
|
||||||
|
|
||||||
|
## 6.2.0 (2019-07-04)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Improve valid assignment checking in `for`/`in` and `for`/`of` loops.
|
||||||
|
|
||||||
|
Disallow binding `let` in patterns.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support bigint syntax with `ecmaVersion` >= 11.
|
||||||
|
|
||||||
|
Support dynamic `import` syntax with `ecmaVersion` >= 11.
|
||||||
|
|
||||||
|
Upgrade to Unicode version 12.
|
||||||
|
|
||||||
|
## 6.1.1 (2019-02-27)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix bug that caused parsing default exports of with names to fail.
|
||||||
|
|
||||||
|
## 6.1.0 (2019-02-08)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix scope checking when redefining a `var` as a lexical binding.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Split up `parseSubscripts` to use an internal `parseSubscript` method to make it easier to extend with plugins.
|
||||||
|
|
||||||
|
## 6.0.7 (2019-02-04)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Check that exported bindings are defined.
|
||||||
|
|
||||||
|
Don't treat `\u180e` as a whitespace character.
|
||||||
|
|
||||||
|
Check for duplicate parameter names in methods.
|
||||||
|
|
||||||
|
Don't allow shorthand properties when they are generators or async methods.
|
||||||
|
|
||||||
|
Forbid binding `await` in async arrow function's parameter list.
|
||||||
|
|
||||||
|
## 6.0.6 (2019-01-30)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
The content of class declarations and expressions is now always parsed in strict mode.
|
||||||
|
|
||||||
|
Don't allow `let` or `const` to bind the variable name `let`.
|
||||||
|
|
||||||
|
Treat class declarations as lexical.
|
||||||
|
|
||||||
|
Don't allow a generator function declaration as the sole body of an `if` or `else`.
|
||||||
|
|
||||||
|
Ignore `"use strict"` when after an empty statement.
|
||||||
|
|
||||||
|
Allow string line continuations with special line terminator characters.
|
||||||
|
|
||||||
|
Treat `for` bodies as part of the `for` scope when checking for conflicting bindings.
|
||||||
|
|
||||||
|
Fix bug with parsing `yield` in a `for` loop initializer.
|
||||||
|
|
||||||
|
Implement special cases around scope checking for functions.
|
||||||
|
|
||||||
|
## 6.0.5 (2019-01-02)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix TypeScript type for `Parser.extend` and add `allowAwaitOutsideFunction` to options type.
|
||||||
|
|
||||||
|
Don't treat `let` as a keyword when the next token is `{` on the next line.
|
||||||
|
|
||||||
|
Fix bug that broke checking for parentheses around an object pattern in a destructuring assignment when `preserveParens` was on.
|
||||||
|
|
||||||
|
## 6.0.4 (2018-11-05)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Further improvements to tokenizing regular expressions in corner cases.
|
||||||
|
|
||||||
|
## 6.0.3 (2018-11-04)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix bug in tokenizing an expression-less return followed by a function followed by a regular expression.
|
||||||
|
|
||||||
|
Remove stray symlink in the package tarball.
|
||||||
|
|
||||||
|
## 6.0.2 (2018-09-26)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix bug where default expressions could fail to parse inside an object destructuring assignment expression.
|
||||||
|
|
||||||
|
## 6.0.1 (2018-09-14)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix wrong value in `version` export.
|
||||||
|
|
||||||
|
## 6.0.0 (2018-09-14)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Better handle variable-redefinition checks for catch bindings and functions directly under if statements.
|
||||||
|
|
||||||
|
Forbid `new.target` in top-level arrow functions.
|
||||||
|
|
||||||
|
Fix issue with parsing a regexp after `yield` in some contexts.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
The package now comes with TypeScript definitions.
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
The default value of the `ecmaVersion` option is now 9 (2018).
|
||||||
|
|
||||||
|
Plugins work differently, and will have to be rewritten to work with this version.
|
||||||
|
|
||||||
|
The loose parser and walker have been moved into separate packages (`acorn-loose` and `acorn-walk`).
|
||||||
|
|
||||||
|
## 5.7.3 (2018-09-10)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix failure to tokenize regexps after expressions like `x.of`.
|
||||||
|
|
||||||
|
Better error message for unterminated template literals.
|
||||||
|
|
||||||
|
## 5.7.2 (2018-08-24)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Properly handle `allowAwaitOutsideFunction` in for statements.
|
||||||
|
|
||||||
|
Treat function declarations at the top level of modules like let bindings.
|
||||||
|
|
||||||
|
Don't allow async function declarations as the only statement under a label.
|
||||||
|
|
||||||
|
## 5.7.0 (2018-06-15)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Upgraded to Unicode 11.
|
||||||
|
|
||||||
|
## 5.6.0 (2018-05-31)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Allow U+2028 and U+2029 in string when ECMAVersion >= 10.
|
||||||
|
|
||||||
|
Allow binding-less catch statements when ECMAVersion >= 10.
|
||||||
|
|
||||||
|
Add `allowAwaitOutsideFunction` option for parsing top-level `await`.
|
||||||
|
|
||||||
|
## 5.5.3 (2018-03-08)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
A _second_ republish of the code in 5.5.1, this time with yarn, to hopefully get valid timestamps.
|
||||||
|
|
||||||
|
## 5.5.2 (2018-03-08)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
A republish of the code in 5.5.1 in an attempt to solve an issue with the file timestamps in the npm package being 0.
|
||||||
|
|
||||||
|
## 5.5.1 (2018-03-06)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix misleading error message for octal escapes in template strings.
|
||||||
|
|
||||||
|
## 5.5.0 (2018-02-27)
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
The identifier character categorization is now based on Unicode version 10.
|
||||||
|
|
||||||
|
Acorn will now validate the content of regular expressions, including new ES9 features.
|
||||||
|
|
||||||
|
## 5.4.0 (2018-02-01)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Disallow duplicate or escaped flags on regular expressions.
|
||||||
|
|
||||||
|
Disallow octal escapes in strings in strict mode.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Add support for async iteration.
|
||||||
|
|
||||||
|
Add support for object spread and rest.
|
||||||
|
|
||||||
|
## 5.3.0 (2017-12-28)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix parsing of floating point literals with leading zeroes in loose mode.
|
||||||
|
|
||||||
|
Allow duplicate property names in object patterns.
|
||||||
|
|
||||||
|
Don't allow static class methods named `prototype`.
|
||||||
|
|
||||||
|
Disallow async functions directly under `if` or `else`.
|
||||||
|
|
||||||
|
Parse right-hand-side of `for`/`of` as an assignment expression.
|
||||||
|
|
||||||
|
Stricter parsing of `for`/`in`.
|
||||||
|
|
||||||
|
Don't allow unicode escapes in contextual keywords.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Parsing class members was factored into smaller methods to allow plugins to hook into it.
|
||||||
|
|
||||||
|
## 5.2.1 (2017-10-30)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix a token context corruption bug.
|
||||||
|
|
||||||
|
## 5.2.0 (2017-10-30)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix token context tracking for `class` and `function` in property-name position.
|
||||||
|
|
||||||
|
Make sure `%*` isn't parsed as a valid operator.
|
||||||
|
|
||||||
|
Allow shorthand properties `get` and `set` to be followed by default values.
|
||||||
|
|
||||||
|
Disallow `super` when not in callee or object position.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support [`directive` property](https://github.com/estree/estree/compare/b3de58c9997504d6fba04b72f76e6dd1619ee4eb...1da8e603237144f44710360f8feb7a9977e905e0) on directive expression statements.
|
||||||
|
|
||||||
|
## 5.1.2 (2017-09-04)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Disable parsing of legacy HTML-style comments in modules.
|
||||||
|
|
||||||
|
Fix parsing of async methods whose names are keywords.
|
||||||
|
|
||||||
|
## 5.1.1 (2017-07-06)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix problem with disambiguating regexp and division after a class.
|
||||||
|
|
||||||
|
## 5.1.0 (2017-07-05)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix tokenizing of regexps in an object-desctructuring `for`/`of` loop and after `yield`.
|
||||||
|
|
||||||
|
Parse zero-prefixed numbers with non-octal digits as decimal.
|
||||||
|
|
||||||
|
Allow object/array patterns in rest parameters.
|
||||||
|
|
||||||
|
Don't error when `yield` is used as a property name.
|
||||||
|
|
||||||
|
Allow `async` as a shorthand object property.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Implement the [template literal revision proposal](https://github.com/tc39/proposal-template-literal-revision) for ES9.
|
||||||
|
|
||||||
|
## 5.0.3 (2017-04-01)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix spurious duplicate variable definition errors for named functions.
|
||||||
|
|
||||||
|
## 5.0.2 (2017-03-30)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
A binary operator after a parenthesized arrow expression is no longer incorrectly treated as an error.
|
||||||
|
|
||||||
|
## 5.0.0 (2017-03-28)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Raise an error for duplicated lexical bindings.
|
||||||
|
|
||||||
|
Fix spurious error when an assignement expression occurred after a spread expression.
|
||||||
|
|
||||||
|
Accept regular expressions after `of` (in `for`/`of`), `yield` (in a generator), and braced arrow functions.
|
||||||
|
|
||||||
|
Allow labels in front or `var` declarations, even in strict mode.
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
Parse declarations following `export default` as declaration nodes, not expressions. This means that class and function declarations nodes can now have `null` as their `id`.
|
||||||
|
|
||||||
|
## 4.0.11 (2017-02-07)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Allow all forms of member expressions to be parenthesized as lvalue.
|
||||||
|
|
||||||
|
## 4.0.10 (2017-02-07)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Don't expect semicolons after default-exported functions or classes, even when they are expressions.
|
||||||
|
|
||||||
|
Check for use of `'use strict'` directives in non-simple parameter functions, even when already in strict mode.
|
||||||
|
|
||||||
|
## 4.0.9 (2017-02-06)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix incorrect error raised for parenthesized simple assignment targets, so that `(x) = 1` parses again.
|
||||||
|
|
||||||
|
## 4.0.8 (2017-02-03)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Solve spurious parenthesized pattern errors by temporarily erring on the side of accepting programs that our delayed errors don't handle correctly yet.
|
||||||
|
|
||||||
|
## 4.0.7 (2017-02-02)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Accept invalidly rejected code like `(x).y = 2` again.
|
||||||
|
|
||||||
|
Don't raise an error when a function _inside_ strict code has a non-simple parameter list.
|
||||||
|
|
||||||
|
## 4.0.6 (2017-02-02)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix exponential behavior (manifesting itself as a complete hang for even relatively small source files) introduced by the new 'use strict' check.
|
||||||
|
|
||||||
|
## 4.0.5 (2017-02-02)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Disallow parenthesized pattern expressions.
|
||||||
|
|
||||||
|
Allow keywords as export names.
|
||||||
|
|
||||||
|
Don't allow the `async` keyword to be parenthesized.
|
||||||
|
|
||||||
|
Properly raise an error when a keyword contains a character escape.
|
||||||
|
|
||||||
|
Allow `"use strict"` to appear after other string literal expressions.
|
||||||
|
|
||||||
|
Disallow labeled declarations.
|
||||||
|
|
||||||
|
## 4.0.4 (2016-12-19)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix crash when `export` was followed by a keyword that can't be
|
||||||
|
exported.
|
||||||
|
|
||||||
|
## 4.0.3 (2016-08-16)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Allow regular function declarations inside single-statement `if` branches in loose mode. Forbid them entirely in strict mode.
|
||||||
|
|
||||||
|
Properly parse properties named `async` in ES2017 mode.
|
||||||
|
|
||||||
|
Fix bug where reserved words were broken in ES2017 mode.
|
||||||
|
|
||||||
|
## 4.0.2 (2016-08-11)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Don't ignore period or 'e' characters after octal numbers.
|
||||||
|
|
||||||
|
Fix broken parsing for call expressions in default parameter values of arrow functions.
|
||||||
|
|
||||||
|
## 4.0.1 (2016-08-08)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix false positives in duplicated export name errors.
|
||||||
|
|
||||||
|
## 4.0.0 (2016-08-07)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
The default `ecmaVersion` option value is now 7.
|
||||||
|
|
||||||
|
A number of internal method signatures changed, so plugins might need to be updated.
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
The parser now raises errors on duplicated export names.
|
||||||
|
|
||||||
|
`arguments` and `eval` can now be used in shorthand properties.
|
||||||
|
|
||||||
|
Duplicate parameter names in non-simple argument lists now always produce an error.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
The `ecmaVersion` option now also accepts year-style version numbers
|
||||||
|
(2015, etc).
|
||||||
|
|
||||||
|
Support for `async`/`await` syntax when `ecmaVersion` is >= 8.
|
||||||
|
|
||||||
|
Support for trailing commas in call expressions when `ecmaVersion` is >= 8.
|
||||||
|
|
||||||
|
## 3.3.0 (2016-07-25)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Fix bug in tokenizing of regexp operator after a function declaration.
|
||||||
|
|
||||||
|
Fix parser crash when parsing an array pattern with a hole.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Implement check against complex argument lists in functions that enable strict mode in ES7.
|
||||||
|
|
||||||
|
## 3.2.0 (2016-06-07)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Improve handling of lack of unicode regexp support in host
|
||||||
|
environment.
|
||||||
|
|
||||||
|
Properly reject shorthand properties whose name is a keyword.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Visitors created with `visit.make` now have their base as _prototype_, rather than copying properties into a fresh object.
|
||||||
|
|
||||||
|
## 3.1.0 (2016-04-18)
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
Properly tokenize the division operator directly after a function expression.
|
||||||
|
|
||||||
|
Allow trailing comma in destructuring arrays.
|
||||||
|
|
||||||
|
## 3.0.4 (2016-02-25)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
Allow update expressions as left-hand-side of the ES7 exponential operator.
|
||||||
|
|
||||||
|
## 3.0.2 (2016-02-10)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
Fix bug that accidentally made `undefined` a reserved word when parsing ES7.
|
||||||
|
|
||||||
|
## 3.0.0 (2016-02-10)
|
||||||
|
|
||||||
|
### Breaking changes
|
||||||
|
|
||||||
|
The default value of the `ecmaVersion` option is now 6 (used to be 5).
|
||||||
|
|
||||||
|
Support for comprehension syntax (which was dropped from the draft spec) has been removed.
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
`let` and `yield` are now “contextual keywords”, meaning you can mostly use them as identifiers in ES5 non-strict code.
|
||||||
|
|
||||||
|
A parenthesized class or function expression after `export default` is now parsed correctly.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
When `ecmaVersion` is set to 7, Acorn will parse the exponentiation operator (`**`).
|
||||||
|
|
||||||
|
The identifier character ranges are now based on Unicode 8.0.0.
|
||||||
|
|
||||||
|
Plugins can now override the `raiseRecoverable` method to override the way non-critical errors are handled.
|
||||||
|
|
||||||
|
## 2.7.0 (2016-01-04)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
Stop allowing rest parameters in setters.
|
||||||
|
|
||||||
|
Disallow `y` rexexp flag in ES5.
|
||||||
|
|
||||||
|
Disallow `\00` and `\000` escapes in strict mode.
|
||||||
|
|
||||||
|
Raise an error when an import name is a reserved word.
|
||||||
|
|
||||||
|
## 2.6.2 (2015-11-10)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
Don't crash when no options object is passed.
|
||||||
|
|
||||||
|
## 2.6.0 (2015-11-09)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
Add `await` as a reserved word in module sources.
|
||||||
|
|
||||||
|
Disallow `yield` in a parameter default value for a generator.
|
||||||
|
|
||||||
|
Forbid using a comma after a rest pattern in an array destructuring.
|
||||||
|
|
||||||
|
### New features
|
||||||
|
|
||||||
|
Support parsing stdin in command-line tool.
|
||||||
|
|
||||||
|
## 2.5.0 (2015-10-27)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
|
||||||
|
Fix tokenizer support in the command-line tool.
|
||||||
|
|
||||||
|
Stop allowing `new.target` outside of functions.
|
||||||
|
|
||||||
|
Remove legacy `guard` and `guardedHandler` properties from try nodes.
|
||||||
|
|
||||||
|
Stop allowing multiple `__proto__` properties on an object literal in strict mode.
|
||||||
|
|
||||||
|
Don't allow rest parameters to be non-identifier patterns.
|
||||||
|
|
||||||
|
Check for duplicate paramter names in arrow functions.
|
21
node_modules/acorn/LICENSE
generated
vendored
Normal file
21
node_modules/acorn/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (C) 2012-2022 by various contributors (see AUTHORS)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
283
node_modules/acorn/README.md
generated
vendored
Normal file
283
node_modules/acorn/README.md
generated
vendored
Normal file
@ -0,0 +1,283 @@
|
|||||||
|
# Acorn
|
||||||
|
|
||||||
|
A tiny, fast JavaScript parser written in JavaScript.
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
Acorn is open source software released under an
|
||||||
|
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn/LICENSE).
|
||||||
|
|
||||||
|
You are welcome to
|
||||||
|
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||||
|
requests on [github](https://github.com/acornjs/acorn).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install acorn
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternately, you can download the source and build acorn yourself:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone https://github.com/acornjs/acorn.git
|
||||||
|
cd acorn
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
## Interface
|
||||||
|
|
||||||
|
**parse**`(input, options)` is the main interface to the library. The
|
||||||
|
`input` parameter is a string, `options` must be an object setting
|
||||||
|
some of the options listed below. The return value will be an abstract
|
||||||
|
syntax tree object as specified by the [ESTree
|
||||||
|
spec](https://github.com/estree/estree).
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
let acorn = require("acorn");
|
||||||
|
console.log(acorn.parse("1 + 1", {ecmaVersion: 2020}));
|
||||||
|
```
|
||||||
|
|
||||||
|
When encountering a syntax error, the parser will raise a
|
||||||
|
`SyntaxError` object with a meaningful message. The error object will
|
||||||
|
have a `pos` property that indicates the string offset at which the
|
||||||
|
error occurred, and a `loc` object that contains a `{line, column}`
|
||||||
|
object referring to that same position.
|
||||||
|
|
||||||
|
Options are provided by in a second argument, which should be an
|
||||||
|
object containing any of these fields (only `ecmaVersion` is
|
||||||
|
required):
|
||||||
|
|
||||||
|
- **ecmaVersion**: Indicates the ECMAScript version to parse. Must be
|
||||||
|
either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10 (2019),
|
||||||
|
11 (2020), 12 (2021), 13 (2022), 14 (2023), or `"latest"` (the
|
||||||
|
latest the library supports). This influences support for strict
|
||||||
|
mode, the set of reserved words, and support for new syntax
|
||||||
|
features.
|
||||||
|
|
||||||
|
**NOTE**: Only 'stage 4' (finalized) ECMAScript features are being
|
||||||
|
implemented by Acorn. Other proposed new features must be
|
||||||
|
implemented through plugins.
|
||||||
|
|
||||||
|
- **sourceType**: Indicate the mode the code should be parsed in. Can be
|
||||||
|
either `"script"` or `"module"`. This influences global strict mode
|
||||||
|
and parsing of `import` and `export` declarations.
|
||||||
|
|
||||||
|
**NOTE**: If set to `"module"`, then static `import` / `export` syntax
|
||||||
|
will be valid, even if `ecmaVersion` is less than 6.
|
||||||
|
|
||||||
|
- **onInsertedSemicolon**: If given a callback, that callback will be
|
||||||
|
called whenever a missing semicolon is inserted by the parser. The
|
||||||
|
callback will be given the character offset of the point where the
|
||||||
|
semicolon is inserted as argument, and if `locations` is on, also a
|
||||||
|
`{line, column}` object representing this position.
|
||||||
|
|
||||||
|
- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing
|
||||||
|
commas.
|
||||||
|
|
||||||
|
- **allowReserved**: If `false`, using a reserved word will generate
|
||||||
|
an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher
|
||||||
|
versions. When given the value `"never"`, reserved words and
|
||||||
|
keywords can also not be used as property names (as in Internet
|
||||||
|
Explorer's old parser).
|
||||||
|
|
||||||
|
- **allowReturnOutsideFunction**: By default, a return statement at
|
||||||
|
the top level raises an error. Set this to `true` to accept such
|
||||||
|
code.
|
||||||
|
|
||||||
|
- **allowImportExportEverywhere**: By default, `import` and `export`
|
||||||
|
declarations can only appear at a program's top level. Setting this
|
||||||
|
option to `true` allows them anywhere where a statement is allowed,
|
||||||
|
and also allows `import.meta` expressions to appear in scripts
|
||||||
|
(when `sourceType` is not `"module"`).
|
||||||
|
|
||||||
|
- **allowAwaitOutsideFunction**: If `false`, `await` expressions can
|
||||||
|
only appear inside `async` functions. Defaults to `true` in modules
|
||||||
|
for `ecmaVersion` 2022 and later, `false` for lower versions.
|
||||||
|
Setting this option to `true` allows to have top-level `await`
|
||||||
|
expressions. They are still not allowed in non-`async` functions,
|
||||||
|
though.
|
||||||
|
|
||||||
|
- **allowSuperOutsideMethod**: By default, `super` outside a method
|
||||||
|
raises an error. Set this to `true` to accept such code.
|
||||||
|
|
||||||
|
- **allowHashBang**: When this is enabled, if the code starts with the
|
||||||
|
characters `#!` (as in a shellscript), the first line will be
|
||||||
|
treated as a comment. Defaults to true when `ecmaVersion` >= 2023.
|
||||||
|
|
||||||
|
- **checkPrivateFields**: By default, the parser will verify that
|
||||||
|
private properties are only used in places where they are valid and
|
||||||
|
have been declared. Set this to false to turn such checks off.
|
||||||
|
|
||||||
|
- **locations**: When `true`, each node has a `loc` object attached
|
||||||
|
with `start` and `end` subobjects, each of which contains the
|
||||||
|
one-based line and zero-based column numbers in `{line, column}`
|
||||||
|
form. Default is `false`.
|
||||||
|
|
||||||
|
- **onToken**: If a function is passed for this option, each found
|
||||||
|
token will be passed in same format as tokens returned from
|
||||||
|
`tokenizer().getToken()`.
|
||||||
|
|
||||||
|
If array is passed, each found token is pushed to it.
|
||||||
|
|
||||||
|
Note that you are not allowed to call the parser from the
|
||||||
|
callback—that will corrupt its internal state.
|
||||||
|
|
||||||
|
- **onComment**: If a function is passed for this option, whenever a
|
||||||
|
comment is encountered the function will be called with the
|
||||||
|
following parameters:
|
||||||
|
|
||||||
|
- `block`: `true` if the comment is a block comment, false if it
|
||||||
|
is a line comment.
|
||||||
|
- `text`: The content of the comment.
|
||||||
|
- `start`: Character offset of the start of the comment.
|
||||||
|
- `end`: Character offset of the end of the comment.
|
||||||
|
|
||||||
|
When the `locations` options is on, the `{line, column}` locations
|
||||||
|
of the comment’s start and end are passed as two additional
|
||||||
|
parameters.
|
||||||
|
|
||||||
|
If array is passed for this option, each found comment is pushed
|
||||||
|
to it as object in Esprima format:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
"type": "Line" | "Block",
|
||||||
|
"value": "comment text",
|
||||||
|
"start": Number,
|
||||||
|
"end": Number,
|
||||||
|
// If `locations` option is on:
|
||||||
|
"loc": {
|
||||||
|
"start": {line: Number, column: Number}
|
||||||
|
"end": {line: Number, column: Number}
|
||||||
|
},
|
||||||
|
// If `ranges` option is on:
|
||||||
|
"range": [Number, Number]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that you are not allowed to call the parser from the
|
||||||
|
callback—that will corrupt its internal state.
|
||||||
|
|
||||||
|
- **ranges**: Nodes have their start and end characters offsets
|
||||||
|
recorded in `start` and `end` properties (directly on the node,
|
||||||
|
rather than the `loc` object, which holds line/column data. To also
|
||||||
|
add a
|
||||||
|
[semi-standardized](https://bugzilla.mozilla.org/show_bug.cgi?id=745678)
|
||||||
|
`range` property holding a `[start, end]` array with the same
|
||||||
|
numbers, set the `ranges` option to `true`.
|
||||||
|
|
||||||
|
- **program**: It is possible to parse multiple files into a single
|
||||||
|
AST by passing the tree produced by parsing the first file as the
|
||||||
|
`program` option in subsequent parses. This will add the toplevel
|
||||||
|
forms of the parsed file to the "Program" (top) node of an existing
|
||||||
|
parse tree.
|
||||||
|
|
||||||
|
- **sourceFile**: When the `locations` option is `true`, you can pass
|
||||||
|
this option to add a `source` attribute in every node’s `loc`
|
||||||
|
object. Note that the contents of this option are not examined or
|
||||||
|
processed in any way; you are free to use whatever format you
|
||||||
|
choose.
|
||||||
|
|
||||||
|
- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property
|
||||||
|
will be added (regardless of the `location` option) directly to the
|
||||||
|
nodes, rather than the `loc` object.
|
||||||
|
|
||||||
|
- **preserveParens**: If this option is `true`, parenthesized expressions
|
||||||
|
are represented by (non-standard) `ParenthesizedExpression` nodes
|
||||||
|
that have a single `expression` property containing the expression
|
||||||
|
inside parentheses.
|
||||||
|
|
||||||
|
**parseExpressionAt**`(input, offset, options)` will parse a single
|
||||||
|
expression in a string, and return its AST. It will not complain if
|
||||||
|
there is more of the string left after the expression.
|
||||||
|
|
||||||
|
**tokenizer**`(input, options)` returns an object with a `getToken`
|
||||||
|
method that can be called repeatedly to get the next token, a `{start,
|
||||||
|
end, type, value}` object (with added `loc` property when the
|
||||||
|
`locations` option is enabled and `range` property when the `ranges`
|
||||||
|
option is enabled). When the token's type is `tokTypes.eof`, you
|
||||||
|
should stop calling the method, since it will keep returning that same
|
||||||
|
token forever.
|
||||||
|
|
||||||
|
Note that tokenizing JavaScript without parsing it is, in modern
|
||||||
|
versions of the language, not really possible due to the way syntax is
|
||||||
|
overloaded in ways that can only be disambiguated by the parse
|
||||||
|
context. This package applies a bunch of heuristics to try and do a
|
||||||
|
reasonable job, but you are advised to use `parse` with the `onToken`
|
||||||
|
option instead of this.
|
||||||
|
|
||||||
|
In ES6 environment, returned result can be used as any other
|
||||||
|
protocol-compliant iterable:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
for (let token of acorn.tokenizer(str)) {
|
||||||
|
// iterate over the tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
// transform code to array of tokens:
|
||||||
|
var tokens = [...acorn.tokenizer(str)];
|
||||||
|
```
|
||||||
|
|
||||||
|
**tokTypes** holds an object mapping names to the token type objects
|
||||||
|
that end up in the `type` properties of tokens.
|
||||||
|
|
||||||
|
**getLineInfo**`(input, offset)` can be used to get a `{line,
|
||||||
|
column}` object for a given program string and offset.
|
||||||
|
|
||||||
|
### The `Parser` class
|
||||||
|
|
||||||
|
Instances of the **`Parser`** class contain all the state and logic
|
||||||
|
that drives a parse. It has static methods `parse`,
|
||||||
|
`parseExpressionAt`, and `tokenizer` that match the top-level
|
||||||
|
functions by the same name.
|
||||||
|
|
||||||
|
When extending the parser with plugins, you need to call these methods
|
||||||
|
on the extended version of the class. To extend a parser with plugins,
|
||||||
|
you can use its static `extend` method.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var acorn = require("acorn");
|
||||||
|
var jsx = require("acorn-jsx");
|
||||||
|
var JSXParser = acorn.Parser.extend(jsx());
|
||||||
|
JSXParser.parse("foo(<bar/>)", {ecmaVersion: 2020});
|
||||||
|
```
|
||||||
|
|
||||||
|
The `extend` method takes any number of plugin values, and returns a
|
||||||
|
new `Parser` class that includes the extra parser logic provided by
|
||||||
|
the plugins.
|
||||||
|
|
||||||
|
## Command line interface
|
||||||
|
|
||||||
|
The `bin/acorn` utility can be used to parse a file from the command
|
||||||
|
line. It accepts as arguments its input file and the following
|
||||||
|
options:
|
||||||
|
|
||||||
|
- `--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|--ecma10`: Sets the ECMAScript version
|
||||||
|
to parse. Default is version 9.
|
||||||
|
|
||||||
|
- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise.
|
||||||
|
|
||||||
|
- `--locations`: Attaches a "loc" object to each node with "start" and
|
||||||
|
"end" subobjects, each of which contains the one-based line and
|
||||||
|
zero-based column numbers in `{line, column}` form.
|
||||||
|
|
||||||
|
- `--allow-hash-bang`: If the code starts with the characters #! (as
|
||||||
|
in a shellscript), the first line will be treated as a comment.
|
||||||
|
|
||||||
|
- `--allow-await-outside-function`: Allows top-level `await` expressions.
|
||||||
|
See the `allowAwaitOutsideFunction` option for more information.
|
||||||
|
|
||||||
|
- `--compact`: No whitespace is used in the AST output.
|
||||||
|
|
||||||
|
- `--silent`: Do not output the AST, just return the exit status.
|
||||||
|
|
||||||
|
- `--help`: Print the usage information and quit.
|
||||||
|
|
||||||
|
The utility spits out the syntax tree as JSON data.
|
||||||
|
|
||||||
|
## Existing plugins
|
||||||
|
|
||||||
|
- [`acorn-jsx`](https://github.com/RReverser/acorn-jsx): Parse [Facebook JSX syntax extensions](https://github.com/facebook/jsx)
|
4
node_modules/acorn/bin/acorn
generated
vendored
Normal file
4
node_modules/acorn/bin/acorn
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
"use strict"
|
||||||
|
|
||||||
|
require("../dist/bin.js")
|
857
node_modules/acorn/dist/acorn.d.mts
generated
vendored
Normal file
857
node_modules/acorn/dist/acorn.d.mts
generated
vendored
Normal file
@ -0,0 +1,857 @@
|
|||||||
|
export interface Node {
|
||||||
|
start: number
|
||||||
|
end: number
|
||||||
|
type: string
|
||||||
|
range?: [number, number]
|
||||||
|
loc?: SourceLocation | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SourceLocation {
|
||||||
|
source?: string | null
|
||||||
|
start: Position
|
||||||
|
end: Position
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Position {
|
||||||
|
/** 1-based */
|
||||||
|
line: number
|
||||||
|
/** 0-based */
|
||||||
|
column: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Identifier extends Node {
|
||||||
|
type: "Identifier"
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Literal extends Node {
|
||||||
|
type: "Literal"
|
||||||
|
value?: string | boolean | null | number | RegExp | bigint
|
||||||
|
raw?: string
|
||||||
|
regex?: {
|
||||||
|
pattern: string
|
||||||
|
flags: string
|
||||||
|
}
|
||||||
|
bigint?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Program extends Node {
|
||||||
|
type: "Program"
|
||||||
|
body: Array<Statement | ModuleDeclaration>
|
||||||
|
sourceType: "script" | "module"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Function extends Node {
|
||||||
|
id?: Identifier | null
|
||||||
|
params: Array<Pattern>
|
||||||
|
body: BlockStatement | Expression
|
||||||
|
generator: boolean
|
||||||
|
expression: boolean
|
||||||
|
async: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExpressionStatement extends Node {
|
||||||
|
type: "ExpressionStatement"
|
||||||
|
expression: Expression | Literal
|
||||||
|
directive?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BlockStatement extends Node {
|
||||||
|
type: "BlockStatement"
|
||||||
|
body: Array<Statement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EmptyStatement extends Node {
|
||||||
|
type: "EmptyStatement"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DebuggerStatement extends Node {
|
||||||
|
type: "DebuggerStatement"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WithStatement extends Node {
|
||||||
|
type: "WithStatement"
|
||||||
|
object: Expression
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReturnStatement extends Node {
|
||||||
|
type: "ReturnStatement"
|
||||||
|
argument?: Expression | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LabeledStatement extends Node {
|
||||||
|
type: "LabeledStatement"
|
||||||
|
label: Identifier
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BreakStatement extends Node {
|
||||||
|
type: "BreakStatement"
|
||||||
|
label?: Identifier | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ContinueStatement extends Node {
|
||||||
|
type: "ContinueStatement"
|
||||||
|
label?: Identifier | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IfStatement extends Node {
|
||||||
|
type: "IfStatement"
|
||||||
|
test: Expression
|
||||||
|
consequent: Statement
|
||||||
|
alternate?: Statement | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SwitchStatement extends Node {
|
||||||
|
type: "SwitchStatement"
|
||||||
|
discriminant: Expression
|
||||||
|
cases: Array<SwitchCase>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SwitchCase extends Node {
|
||||||
|
type: "SwitchCase"
|
||||||
|
test?: Expression | null
|
||||||
|
consequent: Array<Statement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThrowStatement extends Node {
|
||||||
|
type: "ThrowStatement"
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TryStatement extends Node {
|
||||||
|
type: "TryStatement"
|
||||||
|
block: BlockStatement
|
||||||
|
handler?: CatchClause | null
|
||||||
|
finalizer?: BlockStatement | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CatchClause extends Node {
|
||||||
|
type: "CatchClause"
|
||||||
|
param?: Pattern | null
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WhileStatement extends Node {
|
||||||
|
type: "WhileStatement"
|
||||||
|
test: Expression
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DoWhileStatement extends Node {
|
||||||
|
type: "DoWhileStatement"
|
||||||
|
body: Statement
|
||||||
|
test: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForStatement extends Node {
|
||||||
|
type: "ForStatement"
|
||||||
|
init?: VariableDeclaration | Expression | null
|
||||||
|
test?: Expression | null
|
||||||
|
update?: Expression | null
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForInStatement extends Node {
|
||||||
|
type: "ForInStatement"
|
||||||
|
left: VariableDeclaration | Pattern
|
||||||
|
right: Expression
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FunctionDeclaration extends Function {
|
||||||
|
type: "FunctionDeclaration"
|
||||||
|
id: Identifier
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableDeclaration extends Node {
|
||||||
|
type: "VariableDeclaration"
|
||||||
|
declarations: Array<VariableDeclarator>
|
||||||
|
kind: "var" | "let" | "const"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableDeclarator extends Node {
|
||||||
|
type: "VariableDeclarator"
|
||||||
|
id: Pattern
|
||||||
|
init?: Expression | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThisExpression extends Node {
|
||||||
|
type: "ThisExpression"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrayExpression extends Node {
|
||||||
|
type: "ArrayExpression"
|
||||||
|
elements: Array<Expression | SpreadElement | null>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ObjectExpression extends Node {
|
||||||
|
type: "ObjectExpression"
|
||||||
|
properties: Array<Property | SpreadElement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Property extends Node {
|
||||||
|
type: "Property"
|
||||||
|
key: Expression
|
||||||
|
value: Expression
|
||||||
|
kind: "init" | "get" | "set"
|
||||||
|
method: boolean
|
||||||
|
shorthand: boolean
|
||||||
|
computed: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FunctionExpression extends Function {
|
||||||
|
type: "FunctionExpression"
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UnaryExpression extends Node {
|
||||||
|
type: "UnaryExpression"
|
||||||
|
operator: UnaryOperator
|
||||||
|
prefix: boolean
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete"
|
||||||
|
|
||||||
|
export interface UpdateExpression extends Node {
|
||||||
|
type: "UpdateExpression"
|
||||||
|
operator: UpdateOperator
|
||||||
|
argument: Expression
|
||||||
|
prefix: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type UpdateOperator = "++" | "--"
|
||||||
|
|
||||||
|
export interface BinaryExpression extends Node {
|
||||||
|
type: "BinaryExpression"
|
||||||
|
operator: BinaryOperator
|
||||||
|
left: Expression | PrivateIdentifier
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BinaryOperator = "==" | "!=" | "===" | "!==" | "<" | "<=" | ">" | ">=" | "<<" | ">>" | ">>>" | "+" | "-" | "*" | "/" | "%" | "|" | "^" | "&" | "in" | "instanceof" | "**"
|
||||||
|
|
||||||
|
export interface AssignmentExpression extends Node {
|
||||||
|
type: "AssignmentExpression"
|
||||||
|
operator: AssignmentOperator
|
||||||
|
left: Pattern
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AssignmentOperator = "=" | "+=" | "-=" | "*=" | "/=" | "%=" | "<<=" | ">>=" | ">>>=" | "|=" | "^=" | "&=" | "**=" | "||=" | "&&=" | "??="
|
||||||
|
|
||||||
|
export interface LogicalExpression extends Node {
|
||||||
|
type: "LogicalExpression"
|
||||||
|
operator: LogicalOperator
|
||||||
|
left: Expression
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type LogicalOperator = "||" | "&&" | "??"
|
||||||
|
|
||||||
|
export interface MemberExpression extends Node {
|
||||||
|
type: "MemberExpression"
|
||||||
|
object: Expression | Super
|
||||||
|
property: Expression | PrivateIdentifier
|
||||||
|
computed: boolean
|
||||||
|
optional: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConditionalExpression extends Node {
|
||||||
|
type: "ConditionalExpression"
|
||||||
|
test: Expression
|
||||||
|
alternate: Expression
|
||||||
|
consequent: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CallExpression extends Node {
|
||||||
|
type: "CallExpression"
|
||||||
|
callee: Expression | Super
|
||||||
|
arguments: Array<Expression | SpreadElement>
|
||||||
|
optional: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NewExpression extends Node {
|
||||||
|
type: "NewExpression"
|
||||||
|
callee: Expression
|
||||||
|
arguments: Array<Expression | SpreadElement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SequenceExpression extends Node {
|
||||||
|
type: "SequenceExpression"
|
||||||
|
expressions: Array<Expression>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForOfStatement extends Node {
|
||||||
|
type: "ForOfStatement"
|
||||||
|
left: VariableDeclaration | Pattern
|
||||||
|
right: Expression
|
||||||
|
body: Statement
|
||||||
|
await: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Super extends Node {
|
||||||
|
type: "Super"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpreadElement extends Node {
|
||||||
|
type: "SpreadElement"
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrowFunctionExpression extends Function {
|
||||||
|
type: "ArrowFunctionExpression"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface YieldExpression extends Node {
|
||||||
|
type: "YieldExpression"
|
||||||
|
argument?: Expression | null
|
||||||
|
delegate: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TemplateLiteral extends Node {
|
||||||
|
type: "TemplateLiteral"
|
||||||
|
quasis: Array<TemplateElement>
|
||||||
|
expressions: Array<Expression>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TaggedTemplateExpression extends Node {
|
||||||
|
type: "TaggedTemplateExpression"
|
||||||
|
tag: Expression
|
||||||
|
quasi: TemplateLiteral
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TemplateElement extends Node {
|
||||||
|
type: "TemplateElement"
|
||||||
|
tail: boolean
|
||||||
|
value: {
|
||||||
|
cooked?: string | null
|
||||||
|
raw: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AssignmentProperty extends Node {
|
||||||
|
type: "Property"
|
||||||
|
key: Expression
|
||||||
|
value: Pattern
|
||||||
|
kind: "init"
|
||||||
|
method: false
|
||||||
|
shorthand: boolean
|
||||||
|
computed: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ObjectPattern extends Node {
|
||||||
|
type: "ObjectPattern"
|
||||||
|
properties: Array<AssignmentProperty | RestElement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrayPattern extends Node {
|
||||||
|
type: "ArrayPattern"
|
||||||
|
elements: Array<Pattern | null>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RestElement extends Node {
|
||||||
|
type: "RestElement"
|
||||||
|
argument: Pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AssignmentPattern extends Node {
|
||||||
|
type: "AssignmentPattern"
|
||||||
|
left: Pattern
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Class extends Node {
|
||||||
|
id?: Identifier | null
|
||||||
|
superClass?: Expression | null
|
||||||
|
body: ClassBody
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassBody extends Node {
|
||||||
|
type: "ClassBody"
|
||||||
|
body: Array<MethodDefinition | PropertyDefinition | StaticBlock>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MethodDefinition extends Node {
|
||||||
|
type: "MethodDefinition"
|
||||||
|
key: Expression | PrivateIdentifier
|
||||||
|
value: FunctionExpression
|
||||||
|
kind: "constructor" | "method" | "get" | "set"
|
||||||
|
computed: boolean
|
||||||
|
static: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassDeclaration extends Class {
|
||||||
|
type: "ClassDeclaration"
|
||||||
|
id: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassExpression extends Class {
|
||||||
|
type: "ClassExpression"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MetaProperty extends Node {
|
||||||
|
type: "MetaProperty"
|
||||||
|
meta: Identifier
|
||||||
|
property: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportDeclaration extends Node {
|
||||||
|
type: "ImportDeclaration"
|
||||||
|
specifiers: Array<ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier>
|
||||||
|
source: Literal
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportSpecifier extends Node {
|
||||||
|
type: "ImportSpecifier"
|
||||||
|
imported: Identifier | Literal
|
||||||
|
local: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportDefaultSpecifier extends Node {
|
||||||
|
type: "ImportDefaultSpecifier"
|
||||||
|
local: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportNamespaceSpecifier extends Node {
|
||||||
|
type: "ImportNamespaceSpecifier"
|
||||||
|
local: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportNamedDeclaration extends Node {
|
||||||
|
type: "ExportNamedDeclaration"
|
||||||
|
declaration?: Declaration | null
|
||||||
|
specifiers: Array<ExportSpecifier>
|
||||||
|
source?: Literal | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportSpecifier extends Node {
|
||||||
|
type: "ExportSpecifier"
|
||||||
|
exported: Identifier | Literal
|
||||||
|
local: Identifier | Literal
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnonymousFunctionDeclaration extends Function {
|
||||||
|
type: "FunctionDeclaration"
|
||||||
|
id: null
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnonymousClassDeclaration extends Class {
|
||||||
|
type: "ClassDeclaration"
|
||||||
|
id: null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportDefaultDeclaration extends Node {
|
||||||
|
type: "ExportDefaultDeclaration"
|
||||||
|
declaration: AnonymousFunctionDeclaration | FunctionDeclaration | AnonymousClassDeclaration | ClassDeclaration | Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportAllDeclaration extends Node {
|
||||||
|
type: "ExportAllDeclaration"
|
||||||
|
source: Literal
|
||||||
|
exported?: Identifier | Literal | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AwaitExpression extends Node {
|
||||||
|
type: "AwaitExpression"
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChainExpression extends Node {
|
||||||
|
type: "ChainExpression"
|
||||||
|
expression: MemberExpression | CallExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportExpression extends Node {
|
||||||
|
type: "ImportExpression"
|
||||||
|
source: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParenthesizedExpression extends Node {
|
||||||
|
type: "ParenthesizedExpression"
|
||||||
|
expression: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PropertyDefinition extends Node {
|
||||||
|
type: "PropertyDefinition"
|
||||||
|
key: Expression | PrivateIdentifier
|
||||||
|
value?: Expression | null
|
||||||
|
computed: boolean
|
||||||
|
static: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PrivateIdentifier extends Node {
|
||||||
|
type: "PrivateIdentifier"
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StaticBlock extends Node {
|
||||||
|
type: "StaticBlock"
|
||||||
|
body: Array<Statement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Statement =
|
||||||
|
| ExpressionStatement
|
||||||
|
| BlockStatement
|
||||||
|
| EmptyStatement
|
||||||
|
| DebuggerStatement
|
||||||
|
| WithStatement
|
||||||
|
| ReturnStatement
|
||||||
|
| LabeledStatement
|
||||||
|
| BreakStatement
|
||||||
|
| ContinueStatement
|
||||||
|
| IfStatement
|
||||||
|
| SwitchStatement
|
||||||
|
| ThrowStatement
|
||||||
|
| TryStatement
|
||||||
|
| WhileStatement
|
||||||
|
| DoWhileStatement
|
||||||
|
| ForStatement
|
||||||
|
| ForInStatement
|
||||||
|
| ForOfStatement
|
||||||
|
| Declaration
|
||||||
|
|
||||||
|
export type Declaration =
|
||||||
|
| FunctionDeclaration
|
||||||
|
| VariableDeclaration
|
||||||
|
| ClassDeclaration
|
||||||
|
|
||||||
|
export type Expression =
|
||||||
|
| Identifier
|
||||||
|
| Literal
|
||||||
|
| ThisExpression
|
||||||
|
| ArrayExpression
|
||||||
|
| ObjectExpression
|
||||||
|
| FunctionExpression
|
||||||
|
| UnaryExpression
|
||||||
|
| UpdateExpression
|
||||||
|
| BinaryExpression
|
||||||
|
| AssignmentExpression
|
||||||
|
| LogicalExpression
|
||||||
|
| MemberExpression
|
||||||
|
| ConditionalExpression
|
||||||
|
| CallExpression
|
||||||
|
| NewExpression
|
||||||
|
| SequenceExpression
|
||||||
|
| ArrowFunctionExpression
|
||||||
|
| YieldExpression
|
||||||
|
| TemplateLiteral
|
||||||
|
| TaggedTemplateExpression
|
||||||
|
| ClassExpression
|
||||||
|
| MetaProperty
|
||||||
|
| AwaitExpression
|
||||||
|
| ChainExpression
|
||||||
|
| ImportExpression
|
||||||
|
| ParenthesizedExpression
|
||||||
|
|
||||||
|
export type Pattern =
|
||||||
|
| Identifier
|
||||||
|
| MemberExpression
|
||||||
|
| ObjectPattern
|
||||||
|
| ArrayPattern
|
||||||
|
| RestElement
|
||||||
|
| AssignmentPattern
|
||||||
|
|
||||||
|
export type ModuleDeclaration =
|
||||||
|
| ImportDeclaration
|
||||||
|
| ExportNamedDeclaration
|
||||||
|
| ExportDefaultDeclaration
|
||||||
|
| ExportAllDeclaration
|
||||||
|
|
||||||
|
export type AnyNode = Statement | Expression | Declaration | ModuleDeclaration | Literal | Program | SwitchCase | CatchClause | Property | Super | SpreadElement | TemplateElement | AssignmentProperty | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | ClassBody | MethodDefinition | MetaProperty | ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier | AnonymousFunctionDeclaration | AnonymousClassDeclaration | PropertyDefinition | PrivateIdentifier | StaticBlock
|
||||||
|
|
||||||
|
export function parse(input: string, options: Options): Program
|
||||||
|
|
||||||
|
export function parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||||
|
|
||||||
|
export function tokenizer(input: string, options: Options): {
|
||||||
|
getToken(): Token
|
||||||
|
[Symbol.iterator](): Iterator<Token>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ecmaVersion = 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 2023 | 2024 | "latest"
|
||||||
|
|
||||||
|
export interface Options {
|
||||||
|
/**
|
||||||
|
* `ecmaVersion` indicates the ECMAScript version to parse. Must be
|
||||||
|
* either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10
|
||||||
|
* (2019), 11 (2020), 12 (2021), 13 (2022), 14 (2023), or `"latest"`
|
||||||
|
* (the latest version the library supports). This influences
|
||||||
|
* support for strict mode, the set of reserved words, and support
|
||||||
|
* for new syntax features.
|
||||||
|
*/
|
||||||
|
ecmaVersion: ecmaVersion
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `sourceType` indicates the mode the code should be parsed in.
|
||||||
|
* Can be either `"script"` or `"module"`. This influences global
|
||||||
|
* strict mode and parsing of `import` and `export` declarations.
|
||||||
|
*/
|
||||||
|
sourceType?: "script" | "module"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* a callback that will be called when a semicolon is automatically inserted.
|
||||||
|
* @param lastTokEnd the position of the comma as an offset
|
||||||
|
* @param lastTokEndLoc location if {@link locations} is enabled
|
||||||
|
*/
|
||||||
|
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* similar to `onInsertedSemicolon`, but for trailing commas
|
||||||
|
* @param lastTokEnd the position of the comma as an offset
|
||||||
|
* @param lastTokEndLoc location if `locations` is enabled
|
||||||
|
*/
|
||||||
|
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, reserved words are only enforced if ecmaVersion >= 5.
|
||||||
|
* Set `allowReserved` to a boolean value to explicitly turn this on
|
||||||
|
* an off. When this option has the value "never", reserved words
|
||||||
|
* and keywords can also not be used as property names.
|
||||||
|
*/
|
||||||
|
allowReserved?: boolean | "never"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, a return at the top level is not considered an error.
|
||||||
|
*/
|
||||||
|
allowReturnOutsideFunction?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, import/export statements are not constrained to
|
||||||
|
* appearing at the top of the program, and an import.meta expression
|
||||||
|
* in a script isn't considered an error.
|
||||||
|
*/
|
||||||
|
allowImportExportEverywhere?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, `await` identifiers are allowed to appear at the top-level scope only if {@link ecmaVersion} >= 2022.
|
||||||
|
* When enabled, await identifiers are allowed to appear at the top-level scope,
|
||||||
|
* but they are still not allowed in non-async functions.
|
||||||
|
*/
|
||||||
|
allowAwaitOutsideFunction?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, super identifiers are not constrained to
|
||||||
|
* appearing in methods and do not raise an error when they appear elsewhere.
|
||||||
|
*/
|
||||||
|
allowSuperOutsideMethod?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, hashbang directive in the beginning of file is
|
||||||
|
* allowed and treated as a line comment. Enabled by default when
|
||||||
|
* {@link ecmaVersion} >= 2023.
|
||||||
|
*/
|
||||||
|
allowHashBang?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, the parser will verify that private properties are
|
||||||
|
* only used in places where they are valid and have been declared.
|
||||||
|
* Set this to false to turn such checks off.
|
||||||
|
*/
|
||||||
|
checkPrivateFields?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When `locations` is on, `loc` properties holding objects with
|
||||||
|
* `start` and `end` properties as {@link Position} objects will be attached to the
|
||||||
|
* nodes.
|
||||||
|
*/
|
||||||
|
locations?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* a callback that will cause Acorn to call that export function with object in the same
|
||||||
|
* format as tokens returned from `tokenizer().getToken()`. Note
|
||||||
|
* that you are not allowed to call the parser from the
|
||||||
|
* callback—that will corrupt its internal state.
|
||||||
|
*/
|
||||||
|
onToken?: ((token: Token) => void) | Token[]
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This takes a export function or an array.
|
||||||
|
*
|
||||||
|
* When a export function is passed, Acorn will call that export function with `(block, text, start,
|
||||||
|
* end)` parameters whenever a comment is skipped. `block` is a
|
||||||
|
* boolean indicating whether this is a block (`/* *\/`) comment,
|
||||||
|
* `text` is the content of the comment, and `start` and `end` are
|
||||||
|
* character offsets that denote the start and end of the comment.
|
||||||
|
* When the {@link locations} option is on, two more parameters are
|
||||||
|
* passed, the full locations of {@link Position} export type of the start and
|
||||||
|
* end of the comments.
|
||||||
|
*
|
||||||
|
* When a array is passed, each found comment of {@link Comment} export type is pushed to the array.
|
||||||
|
*
|
||||||
|
* Note that you are not allowed to call the
|
||||||
|
* parser from the callback—that will corrupt its internal state.
|
||||||
|
*/
|
||||||
|
onComment?: ((
|
||||||
|
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
|
||||||
|
endLoc?: Position
|
||||||
|
) => void) | Comment[]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Nodes have their start and end characters offsets recorded in
|
||||||
|
* `start` and `end` properties (directly on the node, rather than
|
||||||
|
* the `loc` object, which holds line/column data. To also add a
|
||||||
|
* [semi-standardized][range] `range` property holding a `[start,
|
||||||
|
* end]` array with the same numbers, set the `ranges` option to
|
||||||
|
* `true`.
|
||||||
|
*/
|
||||||
|
ranges?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It is possible to parse multiple files into a single AST by
|
||||||
|
* passing the tree produced by parsing the first file as
|
||||||
|
* `program` option in subsequent parses. This will add the
|
||||||
|
* toplevel forms of the parsed file to the `Program` (top) node
|
||||||
|
* of an existing parse tree.
|
||||||
|
*/
|
||||||
|
program?: Node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When {@link locations} is on, you can pass this to record the source
|
||||||
|
* file in every node's `loc` object.
|
||||||
|
*/
|
||||||
|
sourceFile?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This value, if given, is stored in every node, whether {@link locations} is on or off.
|
||||||
|
*/
|
||||||
|
directSourceFile?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, parenthesized expressions are represented by
|
||||||
|
* (non-standard) ParenthesizedExpression nodes
|
||||||
|
*/
|
||||||
|
preserveParens?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Parser {
|
||||||
|
options: Options
|
||||||
|
input: string
|
||||||
|
|
||||||
|
private constructor(options: Options, input: string, startPos?: number)
|
||||||
|
parse(): Program
|
||||||
|
|
||||||
|
static parse(input: string, options: Options): Program
|
||||||
|
static parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||||
|
static tokenizer(input: string, options: Options): {
|
||||||
|
getToken(): Token
|
||||||
|
[Symbol.iterator](): Iterator<Token>
|
||||||
|
}
|
||||||
|
static extend(...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
|
||||||
|
}
|
||||||
|
|
||||||
|
export const defaultOptions: Options
|
||||||
|
|
||||||
|
export function getLineInfo(input: string, offset: number): Position
|
||||||
|
|
||||||
|
export class TokenType {
|
||||||
|
label: string
|
||||||
|
keyword: string | undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
export const tokTypes: {
|
||||||
|
num: TokenType
|
||||||
|
regexp: TokenType
|
||||||
|
string: TokenType
|
||||||
|
name: TokenType
|
||||||
|
privateId: TokenType
|
||||||
|
eof: TokenType
|
||||||
|
|
||||||
|
bracketL: TokenType
|
||||||
|
bracketR: TokenType
|
||||||
|
braceL: TokenType
|
||||||
|
braceR: TokenType
|
||||||
|
parenL: TokenType
|
||||||
|
parenR: TokenType
|
||||||
|
comma: TokenType
|
||||||
|
semi: TokenType
|
||||||
|
colon: TokenType
|
||||||
|
dot: TokenType
|
||||||
|
question: TokenType
|
||||||
|
questionDot: TokenType
|
||||||
|
arrow: TokenType
|
||||||
|
template: TokenType
|
||||||
|
invalidTemplate: TokenType
|
||||||
|
ellipsis: TokenType
|
||||||
|
backQuote: TokenType
|
||||||
|
dollarBraceL: TokenType
|
||||||
|
|
||||||
|
eq: TokenType
|
||||||
|
assign: TokenType
|
||||||
|
incDec: TokenType
|
||||||
|
prefix: TokenType
|
||||||
|
logicalOR: TokenType
|
||||||
|
logicalAND: TokenType
|
||||||
|
bitwiseOR: TokenType
|
||||||
|
bitwiseXOR: TokenType
|
||||||
|
bitwiseAND: TokenType
|
||||||
|
equality: TokenType
|
||||||
|
relational: TokenType
|
||||||
|
bitShift: TokenType
|
||||||
|
plusMin: TokenType
|
||||||
|
modulo: TokenType
|
||||||
|
star: TokenType
|
||||||
|
slash: TokenType
|
||||||
|
starstar: TokenType
|
||||||
|
coalesce: TokenType
|
||||||
|
|
||||||
|
_break: TokenType
|
||||||
|
_case: TokenType
|
||||||
|
_catch: TokenType
|
||||||
|
_continue: TokenType
|
||||||
|
_debugger: TokenType
|
||||||
|
_default: TokenType
|
||||||
|
_do: TokenType
|
||||||
|
_else: TokenType
|
||||||
|
_finally: TokenType
|
||||||
|
_for: TokenType
|
||||||
|
_function: TokenType
|
||||||
|
_if: TokenType
|
||||||
|
_return: TokenType
|
||||||
|
_switch: TokenType
|
||||||
|
_throw: TokenType
|
||||||
|
_try: TokenType
|
||||||
|
_var: TokenType
|
||||||
|
_const: TokenType
|
||||||
|
_while: TokenType
|
||||||
|
_with: TokenType
|
||||||
|
_new: TokenType
|
||||||
|
_this: TokenType
|
||||||
|
_super: TokenType
|
||||||
|
_class: TokenType
|
||||||
|
_extends: TokenType
|
||||||
|
_export: TokenType
|
||||||
|
_import: TokenType
|
||||||
|
_null: TokenType
|
||||||
|
_true: TokenType
|
||||||
|
_false: TokenType
|
||||||
|
_in: TokenType
|
||||||
|
_instanceof: TokenType
|
||||||
|
_typeof: TokenType
|
||||||
|
_void: TokenType
|
||||||
|
_delete: TokenType
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Comment {
|
||||||
|
type: "Line" | "Block"
|
||||||
|
value: string
|
||||||
|
start: number
|
||||||
|
end: number
|
||||||
|
loc?: SourceLocation
|
||||||
|
range?: [number, number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Token {
|
||||||
|
type: TokenType
|
||||||
|
start: number
|
||||||
|
end: number
|
||||||
|
loc?: SourceLocation
|
||||||
|
range?: [number, number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const version: string
|
857
node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
857
node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
@ -0,0 +1,857 @@
|
|||||||
|
export interface Node {
|
||||||
|
start: number
|
||||||
|
end: number
|
||||||
|
type: string
|
||||||
|
range?: [number, number]
|
||||||
|
loc?: SourceLocation | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SourceLocation {
|
||||||
|
source?: string | null
|
||||||
|
start: Position
|
||||||
|
end: Position
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Position {
|
||||||
|
/** 1-based */
|
||||||
|
line: number
|
||||||
|
/** 0-based */
|
||||||
|
column: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Identifier extends Node {
|
||||||
|
type: "Identifier"
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Literal extends Node {
|
||||||
|
type: "Literal"
|
||||||
|
value?: string | boolean | null | number | RegExp | bigint
|
||||||
|
raw?: string
|
||||||
|
regex?: {
|
||||||
|
pattern: string
|
||||||
|
flags: string
|
||||||
|
}
|
||||||
|
bigint?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Program extends Node {
|
||||||
|
type: "Program"
|
||||||
|
body: Array<Statement | ModuleDeclaration>
|
||||||
|
sourceType: "script" | "module"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Function extends Node {
|
||||||
|
id?: Identifier | null
|
||||||
|
params: Array<Pattern>
|
||||||
|
body: BlockStatement | Expression
|
||||||
|
generator: boolean
|
||||||
|
expression: boolean
|
||||||
|
async: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExpressionStatement extends Node {
|
||||||
|
type: "ExpressionStatement"
|
||||||
|
expression: Expression | Literal
|
||||||
|
directive?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BlockStatement extends Node {
|
||||||
|
type: "BlockStatement"
|
||||||
|
body: Array<Statement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EmptyStatement extends Node {
|
||||||
|
type: "EmptyStatement"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DebuggerStatement extends Node {
|
||||||
|
type: "DebuggerStatement"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WithStatement extends Node {
|
||||||
|
type: "WithStatement"
|
||||||
|
object: Expression
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReturnStatement extends Node {
|
||||||
|
type: "ReturnStatement"
|
||||||
|
argument?: Expression | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LabeledStatement extends Node {
|
||||||
|
type: "LabeledStatement"
|
||||||
|
label: Identifier
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BreakStatement extends Node {
|
||||||
|
type: "BreakStatement"
|
||||||
|
label?: Identifier | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ContinueStatement extends Node {
|
||||||
|
type: "ContinueStatement"
|
||||||
|
label?: Identifier | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IfStatement extends Node {
|
||||||
|
type: "IfStatement"
|
||||||
|
test: Expression
|
||||||
|
consequent: Statement
|
||||||
|
alternate?: Statement | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SwitchStatement extends Node {
|
||||||
|
type: "SwitchStatement"
|
||||||
|
discriminant: Expression
|
||||||
|
cases: Array<SwitchCase>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SwitchCase extends Node {
|
||||||
|
type: "SwitchCase"
|
||||||
|
test?: Expression | null
|
||||||
|
consequent: Array<Statement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThrowStatement extends Node {
|
||||||
|
type: "ThrowStatement"
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TryStatement extends Node {
|
||||||
|
type: "TryStatement"
|
||||||
|
block: BlockStatement
|
||||||
|
handler?: CatchClause | null
|
||||||
|
finalizer?: BlockStatement | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CatchClause extends Node {
|
||||||
|
type: "CatchClause"
|
||||||
|
param?: Pattern | null
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WhileStatement extends Node {
|
||||||
|
type: "WhileStatement"
|
||||||
|
test: Expression
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DoWhileStatement extends Node {
|
||||||
|
type: "DoWhileStatement"
|
||||||
|
body: Statement
|
||||||
|
test: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForStatement extends Node {
|
||||||
|
type: "ForStatement"
|
||||||
|
init?: VariableDeclaration | Expression | null
|
||||||
|
test?: Expression | null
|
||||||
|
update?: Expression | null
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForInStatement extends Node {
|
||||||
|
type: "ForInStatement"
|
||||||
|
left: VariableDeclaration | Pattern
|
||||||
|
right: Expression
|
||||||
|
body: Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FunctionDeclaration extends Function {
|
||||||
|
type: "FunctionDeclaration"
|
||||||
|
id: Identifier
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableDeclaration extends Node {
|
||||||
|
type: "VariableDeclaration"
|
||||||
|
declarations: Array<VariableDeclarator>
|
||||||
|
kind: "var" | "let" | "const"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableDeclarator extends Node {
|
||||||
|
type: "VariableDeclarator"
|
||||||
|
id: Pattern
|
||||||
|
init?: Expression | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ThisExpression extends Node {
|
||||||
|
type: "ThisExpression"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrayExpression extends Node {
|
||||||
|
type: "ArrayExpression"
|
||||||
|
elements: Array<Expression | SpreadElement | null>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ObjectExpression extends Node {
|
||||||
|
type: "ObjectExpression"
|
||||||
|
properties: Array<Property | SpreadElement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Property extends Node {
|
||||||
|
type: "Property"
|
||||||
|
key: Expression
|
||||||
|
value: Expression
|
||||||
|
kind: "init" | "get" | "set"
|
||||||
|
method: boolean
|
||||||
|
shorthand: boolean
|
||||||
|
computed: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FunctionExpression extends Function {
|
||||||
|
type: "FunctionExpression"
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UnaryExpression extends Node {
|
||||||
|
type: "UnaryExpression"
|
||||||
|
operator: UnaryOperator
|
||||||
|
prefix: boolean
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete"
|
||||||
|
|
||||||
|
export interface UpdateExpression extends Node {
|
||||||
|
type: "UpdateExpression"
|
||||||
|
operator: UpdateOperator
|
||||||
|
argument: Expression
|
||||||
|
prefix: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type UpdateOperator = "++" | "--"
|
||||||
|
|
||||||
|
export interface BinaryExpression extends Node {
|
||||||
|
type: "BinaryExpression"
|
||||||
|
operator: BinaryOperator
|
||||||
|
left: Expression | PrivateIdentifier
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BinaryOperator = "==" | "!=" | "===" | "!==" | "<" | "<=" | ">" | ">=" | "<<" | ">>" | ">>>" | "+" | "-" | "*" | "/" | "%" | "|" | "^" | "&" | "in" | "instanceof" | "**"
|
||||||
|
|
||||||
|
export interface AssignmentExpression extends Node {
|
||||||
|
type: "AssignmentExpression"
|
||||||
|
operator: AssignmentOperator
|
||||||
|
left: Pattern
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AssignmentOperator = "=" | "+=" | "-=" | "*=" | "/=" | "%=" | "<<=" | ">>=" | ">>>=" | "|=" | "^=" | "&=" | "**=" | "||=" | "&&=" | "??="
|
||||||
|
|
||||||
|
export interface LogicalExpression extends Node {
|
||||||
|
type: "LogicalExpression"
|
||||||
|
operator: LogicalOperator
|
||||||
|
left: Expression
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export type LogicalOperator = "||" | "&&" | "??"
|
||||||
|
|
||||||
|
export interface MemberExpression extends Node {
|
||||||
|
type: "MemberExpression"
|
||||||
|
object: Expression | Super
|
||||||
|
property: Expression | PrivateIdentifier
|
||||||
|
computed: boolean
|
||||||
|
optional: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConditionalExpression extends Node {
|
||||||
|
type: "ConditionalExpression"
|
||||||
|
test: Expression
|
||||||
|
alternate: Expression
|
||||||
|
consequent: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CallExpression extends Node {
|
||||||
|
type: "CallExpression"
|
||||||
|
callee: Expression | Super
|
||||||
|
arguments: Array<Expression | SpreadElement>
|
||||||
|
optional: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NewExpression extends Node {
|
||||||
|
type: "NewExpression"
|
||||||
|
callee: Expression
|
||||||
|
arguments: Array<Expression | SpreadElement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SequenceExpression extends Node {
|
||||||
|
type: "SequenceExpression"
|
||||||
|
expressions: Array<Expression>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ForOfStatement extends Node {
|
||||||
|
type: "ForOfStatement"
|
||||||
|
left: VariableDeclaration | Pattern
|
||||||
|
right: Expression
|
||||||
|
body: Statement
|
||||||
|
await: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Super extends Node {
|
||||||
|
type: "Super"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpreadElement extends Node {
|
||||||
|
type: "SpreadElement"
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrowFunctionExpression extends Function {
|
||||||
|
type: "ArrowFunctionExpression"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface YieldExpression extends Node {
|
||||||
|
type: "YieldExpression"
|
||||||
|
argument?: Expression | null
|
||||||
|
delegate: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TemplateLiteral extends Node {
|
||||||
|
type: "TemplateLiteral"
|
||||||
|
quasis: Array<TemplateElement>
|
||||||
|
expressions: Array<Expression>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TaggedTemplateExpression extends Node {
|
||||||
|
type: "TaggedTemplateExpression"
|
||||||
|
tag: Expression
|
||||||
|
quasi: TemplateLiteral
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TemplateElement extends Node {
|
||||||
|
type: "TemplateElement"
|
||||||
|
tail: boolean
|
||||||
|
value: {
|
||||||
|
cooked?: string | null
|
||||||
|
raw: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AssignmentProperty extends Node {
|
||||||
|
type: "Property"
|
||||||
|
key: Expression
|
||||||
|
value: Pattern
|
||||||
|
kind: "init"
|
||||||
|
method: false
|
||||||
|
shorthand: boolean
|
||||||
|
computed: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ObjectPattern extends Node {
|
||||||
|
type: "ObjectPattern"
|
||||||
|
properties: Array<AssignmentProperty | RestElement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ArrayPattern extends Node {
|
||||||
|
type: "ArrayPattern"
|
||||||
|
elements: Array<Pattern | null>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RestElement extends Node {
|
||||||
|
type: "RestElement"
|
||||||
|
argument: Pattern
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AssignmentPattern extends Node {
|
||||||
|
type: "AssignmentPattern"
|
||||||
|
left: Pattern
|
||||||
|
right: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Class extends Node {
|
||||||
|
id?: Identifier | null
|
||||||
|
superClass?: Expression | null
|
||||||
|
body: ClassBody
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassBody extends Node {
|
||||||
|
type: "ClassBody"
|
||||||
|
body: Array<MethodDefinition | PropertyDefinition | StaticBlock>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MethodDefinition extends Node {
|
||||||
|
type: "MethodDefinition"
|
||||||
|
key: Expression | PrivateIdentifier
|
||||||
|
value: FunctionExpression
|
||||||
|
kind: "constructor" | "method" | "get" | "set"
|
||||||
|
computed: boolean
|
||||||
|
static: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassDeclaration extends Class {
|
||||||
|
type: "ClassDeclaration"
|
||||||
|
id: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassExpression extends Class {
|
||||||
|
type: "ClassExpression"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MetaProperty extends Node {
|
||||||
|
type: "MetaProperty"
|
||||||
|
meta: Identifier
|
||||||
|
property: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportDeclaration extends Node {
|
||||||
|
type: "ImportDeclaration"
|
||||||
|
specifiers: Array<ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier>
|
||||||
|
source: Literal
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportSpecifier extends Node {
|
||||||
|
type: "ImportSpecifier"
|
||||||
|
imported: Identifier | Literal
|
||||||
|
local: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportDefaultSpecifier extends Node {
|
||||||
|
type: "ImportDefaultSpecifier"
|
||||||
|
local: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportNamespaceSpecifier extends Node {
|
||||||
|
type: "ImportNamespaceSpecifier"
|
||||||
|
local: Identifier
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportNamedDeclaration extends Node {
|
||||||
|
type: "ExportNamedDeclaration"
|
||||||
|
declaration?: Declaration | null
|
||||||
|
specifiers: Array<ExportSpecifier>
|
||||||
|
source?: Literal | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportSpecifier extends Node {
|
||||||
|
type: "ExportSpecifier"
|
||||||
|
exported: Identifier | Literal
|
||||||
|
local: Identifier | Literal
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnonymousFunctionDeclaration extends Function {
|
||||||
|
type: "FunctionDeclaration"
|
||||||
|
id: null
|
||||||
|
body: BlockStatement
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnonymousClassDeclaration extends Class {
|
||||||
|
type: "ClassDeclaration"
|
||||||
|
id: null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportDefaultDeclaration extends Node {
|
||||||
|
type: "ExportDefaultDeclaration"
|
||||||
|
declaration: AnonymousFunctionDeclaration | FunctionDeclaration | AnonymousClassDeclaration | ClassDeclaration | Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportAllDeclaration extends Node {
|
||||||
|
type: "ExportAllDeclaration"
|
||||||
|
source: Literal
|
||||||
|
exported?: Identifier | Literal | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AwaitExpression extends Node {
|
||||||
|
type: "AwaitExpression"
|
||||||
|
argument: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChainExpression extends Node {
|
||||||
|
type: "ChainExpression"
|
||||||
|
expression: MemberExpression | CallExpression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ImportExpression extends Node {
|
||||||
|
type: "ImportExpression"
|
||||||
|
source: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParenthesizedExpression extends Node {
|
||||||
|
type: "ParenthesizedExpression"
|
||||||
|
expression: Expression
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PropertyDefinition extends Node {
|
||||||
|
type: "PropertyDefinition"
|
||||||
|
key: Expression | PrivateIdentifier
|
||||||
|
value?: Expression | null
|
||||||
|
computed: boolean
|
||||||
|
static: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PrivateIdentifier extends Node {
|
||||||
|
type: "PrivateIdentifier"
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StaticBlock extends Node {
|
||||||
|
type: "StaticBlock"
|
||||||
|
body: Array<Statement>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Statement =
|
||||||
|
| ExpressionStatement
|
||||||
|
| BlockStatement
|
||||||
|
| EmptyStatement
|
||||||
|
| DebuggerStatement
|
||||||
|
| WithStatement
|
||||||
|
| ReturnStatement
|
||||||
|
| LabeledStatement
|
||||||
|
| BreakStatement
|
||||||
|
| ContinueStatement
|
||||||
|
| IfStatement
|
||||||
|
| SwitchStatement
|
||||||
|
| ThrowStatement
|
||||||
|
| TryStatement
|
||||||
|
| WhileStatement
|
||||||
|
| DoWhileStatement
|
||||||
|
| ForStatement
|
||||||
|
| ForInStatement
|
||||||
|
| ForOfStatement
|
||||||
|
| Declaration
|
||||||
|
|
||||||
|
export type Declaration =
|
||||||
|
| FunctionDeclaration
|
||||||
|
| VariableDeclaration
|
||||||
|
| ClassDeclaration
|
||||||
|
|
||||||
|
export type Expression =
|
||||||
|
| Identifier
|
||||||
|
| Literal
|
||||||
|
| ThisExpression
|
||||||
|
| ArrayExpression
|
||||||
|
| ObjectExpression
|
||||||
|
| FunctionExpression
|
||||||
|
| UnaryExpression
|
||||||
|
| UpdateExpression
|
||||||
|
| BinaryExpression
|
||||||
|
| AssignmentExpression
|
||||||
|
| LogicalExpression
|
||||||
|
| MemberExpression
|
||||||
|
| ConditionalExpression
|
||||||
|
| CallExpression
|
||||||
|
| NewExpression
|
||||||
|
| SequenceExpression
|
||||||
|
| ArrowFunctionExpression
|
||||||
|
| YieldExpression
|
||||||
|
| TemplateLiteral
|
||||||
|
| TaggedTemplateExpression
|
||||||
|
| ClassExpression
|
||||||
|
| MetaProperty
|
||||||
|
| AwaitExpression
|
||||||
|
| ChainExpression
|
||||||
|
| ImportExpression
|
||||||
|
| ParenthesizedExpression
|
||||||
|
|
||||||
|
export type Pattern =
|
||||||
|
| Identifier
|
||||||
|
| MemberExpression
|
||||||
|
| ObjectPattern
|
||||||
|
| ArrayPattern
|
||||||
|
| RestElement
|
||||||
|
| AssignmentPattern
|
||||||
|
|
||||||
|
export type ModuleDeclaration =
|
||||||
|
| ImportDeclaration
|
||||||
|
| ExportNamedDeclaration
|
||||||
|
| ExportDefaultDeclaration
|
||||||
|
| ExportAllDeclaration
|
||||||
|
|
||||||
|
export type AnyNode = Statement | Expression | Declaration | ModuleDeclaration | Literal | Program | SwitchCase | CatchClause | Property | Super | SpreadElement | TemplateElement | AssignmentProperty | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | ClassBody | MethodDefinition | MetaProperty | ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier | AnonymousFunctionDeclaration | AnonymousClassDeclaration | PropertyDefinition | PrivateIdentifier | StaticBlock
|
||||||
|
|
||||||
|
export function parse(input: string, options: Options): Program
|
||||||
|
|
||||||
|
export function parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||||
|
|
||||||
|
export function tokenizer(input: string, options: Options): {
|
||||||
|
getToken(): Token
|
||||||
|
[Symbol.iterator](): Iterator<Token>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ecmaVersion = 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 2023 | 2024 | "latest"
|
||||||
|
|
||||||
|
export interface Options {
|
||||||
|
/**
|
||||||
|
* `ecmaVersion` indicates the ECMAScript version to parse. Must be
|
||||||
|
* either 3, 5, 6 (or 2015), 7 (2016), 8 (2017), 9 (2018), 10
|
||||||
|
* (2019), 11 (2020), 12 (2021), 13 (2022), 14 (2023), or `"latest"`
|
||||||
|
* (the latest version the library supports). This influences
|
||||||
|
* support for strict mode, the set of reserved words, and support
|
||||||
|
* for new syntax features.
|
||||||
|
*/
|
||||||
|
ecmaVersion: ecmaVersion
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `sourceType` indicates the mode the code should be parsed in.
|
||||||
|
* Can be either `"script"` or `"module"`. This influences global
|
||||||
|
* strict mode and parsing of `import` and `export` declarations.
|
||||||
|
*/
|
||||||
|
sourceType?: "script" | "module"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* a callback that will be called when a semicolon is automatically inserted.
|
||||||
|
* @param lastTokEnd the position of the comma as an offset
|
||||||
|
* @param lastTokEndLoc location if {@link locations} is enabled
|
||||||
|
*/
|
||||||
|
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* similar to `onInsertedSemicolon`, but for trailing commas
|
||||||
|
* @param lastTokEnd the position of the comma as an offset
|
||||||
|
* @param lastTokEndLoc location if `locations` is enabled
|
||||||
|
*/
|
||||||
|
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, reserved words are only enforced if ecmaVersion >= 5.
|
||||||
|
* Set `allowReserved` to a boolean value to explicitly turn this on
|
||||||
|
* an off. When this option has the value "never", reserved words
|
||||||
|
* and keywords can also not be used as property names.
|
||||||
|
*/
|
||||||
|
allowReserved?: boolean | "never"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, a return at the top level is not considered an error.
|
||||||
|
*/
|
||||||
|
allowReturnOutsideFunction?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, import/export statements are not constrained to
|
||||||
|
* appearing at the top of the program, and an import.meta expression
|
||||||
|
* in a script isn't considered an error.
|
||||||
|
*/
|
||||||
|
allowImportExportEverywhere?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, `await` identifiers are allowed to appear at the top-level scope only if {@link ecmaVersion} >= 2022.
|
||||||
|
* When enabled, await identifiers are allowed to appear at the top-level scope,
|
||||||
|
* but they are still not allowed in non-async functions.
|
||||||
|
*/
|
||||||
|
allowAwaitOutsideFunction?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, super identifiers are not constrained to
|
||||||
|
* appearing in methods and do not raise an error when they appear elsewhere.
|
||||||
|
*/
|
||||||
|
allowSuperOutsideMethod?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, hashbang directive in the beginning of file is
|
||||||
|
* allowed and treated as a line comment. Enabled by default when
|
||||||
|
* {@link ecmaVersion} >= 2023.
|
||||||
|
*/
|
||||||
|
allowHashBang?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, the parser will verify that private properties are
|
||||||
|
* only used in places where they are valid and have been declared.
|
||||||
|
* Set this to false to turn such checks off.
|
||||||
|
*/
|
||||||
|
checkPrivateFields?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When `locations` is on, `loc` properties holding objects with
|
||||||
|
* `start` and `end` properties as {@link Position} objects will be attached to the
|
||||||
|
* nodes.
|
||||||
|
*/
|
||||||
|
locations?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* a callback that will cause Acorn to call that export function with object in the same
|
||||||
|
* format as tokens returned from `tokenizer().getToken()`. Note
|
||||||
|
* that you are not allowed to call the parser from the
|
||||||
|
* callback—that will corrupt its internal state.
|
||||||
|
*/
|
||||||
|
onToken?: ((token: Token) => void) | Token[]
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This takes a export function or an array.
|
||||||
|
*
|
||||||
|
* When a export function is passed, Acorn will call that export function with `(block, text, start,
|
||||||
|
* end)` parameters whenever a comment is skipped. `block` is a
|
||||||
|
* boolean indicating whether this is a block (`/* *\/`) comment,
|
||||||
|
* `text` is the content of the comment, and `start` and `end` are
|
||||||
|
* character offsets that denote the start and end of the comment.
|
||||||
|
* When the {@link locations} option is on, two more parameters are
|
||||||
|
* passed, the full locations of {@link Position} export type of the start and
|
||||||
|
* end of the comments.
|
||||||
|
*
|
||||||
|
* When a array is passed, each found comment of {@link Comment} export type is pushed to the array.
|
||||||
|
*
|
||||||
|
* Note that you are not allowed to call the
|
||||||
|
* parser from the callback—that will corrupt its internal state.
|
||||||
|
*/
|
||||||
|
onComment?: ((
|
||||||
|
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
|
||||||
|
endLoc?: Position
|
||||||
|
) => void) | Comment[]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Nodes have their start and end characters offsets recorded in
|
||||||
|
* `start` and `end` properties (directly on the node, rather than
|
||||||
|
* the `loc` object, which holds line/column data. To also add a
|
||||||
|
* [semi-standardized][range] `range` property holding a `[start,
|
||||||
|
* end]` array with the same numbers, set the `ranges` option to
|
||||||
|
* `true`.
|
||||||
|
*/
|
||||||
|
ranges?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It is possible to parse multiple files into a single AST by
|
||||||
|
* passing the tree produced by parsing the first file as
|
||||||
|
* `program` option in subsequent parses. This will add the
|
||||||
|
* toplevel forms of the parsed file to the `Program` (top) node
|
||||||
|
* of an existing parse tree.
|
||||||
|
*/
|
||||||
|
program?: Node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When {@link locations} is on, you can pass this to record the source
|
||||||
|
* file in every node's `loc` object.
|
||||||
|
*/
|
||||||
|
sourceFile?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This value, if given, is stored in every node, whether {@link locations} is on or off.
|
||||||
|
*/
|
||||||
|
directSourceFile?: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When enabled, parenthesized expressions are represented by
|
||||||
|
* (non-standard) ParenthesizedExpression nodes
|
||||||
|
*/
|
||||||
|
preserveParens?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Parser {
|
||||||
|
options: Options
|
||||||
|
input: string
|
||||||
|
|
||||||
|
private constructor(options: Options, input: string, startPos?: number)
|
||||||
|
parse(): Program
|
||||||
|
|
||||||
|
static parse(input: string, options: Options): Program
|
||||||
|
static parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||||
|
static tokenizer(input: string, options: Options): {
|
||||||
|
getToken(): Token
|
||||||
|
[Symbol.iterator](): Iterator<Token>
|
||||||
|
}
|
||||||
|
static extend(...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
|
||||||
|
}
|
||||||
|
|
||||||
|
export const defaultOptions: Options
|
||||||
|
|
||||||
|
export function getLineInfo(input: string, offset: number): Position
|
||||||
|
|
||||||
|
export class TokenType {
|
||||||
|
label: string
|
||||||
|
keyword: string | undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
export const tokTypes: {
|
||||||
|
num: TokenType
|
||||||
|
regexp: TokenType
|
||||||
|
string: TokenType
|
||||||
|
name: TokenType
|
||||||
|
privateId: TokenType
|
||||||
|
eof: TokenType
|
||||||
|
|
||||||
|
bracketL: TokenType
|
||||||
|
bracketR: TokenType
|
||||||
|
braceL: TokenType
|
||||||
|
braceR: TokenType
|
||||||
|
parenL: TokenType
|
||||||
|
parenR: TokenType
|
||||||
|
comma: TokenType
|
||||||
|
semi: TokenType
|
||||||
|
colon: TokenType
|
||||||
|
dot: TokenType
|
||||||
|
question: TokenType
|
||||||
|
questionDot: TokenType
|
||||||
|
arrow: TokenType
|
||||||
|
template: TokenType
|
||||||
|
invalidTemplate: TokenType
|
||||||
|
ellipsis: TokenType
|
||||||
|
backQuote: TokenType
|
||||||
|
dollarBraceL: TokenType
|
||||||
|
|
||||||
|
eq: TokenType
|
||||||
|
assign: TokenType
|
||||||
|
incDec: TokenType
|
||||||
|
prefix: TokenType
|
||||||
|
logicalOR: TokenType
|
||||||
|
logicalAND: TokenType
|
||||||
|
bitwiseOR: TokenType
|
||||||
|
bitwiseXOR: TokenType
|
||||||
|
bitwiseAND: TokenType
|
||||||
|
equality: TokenType
|
||||||
|
relational: TokenType
|
||||||
|
bitShift: TokenType
|
||||||
|
plusMin: TokenType
|
||||||
|
modulo: TokenType
|
||||||
|
star: TokenType
|
||||||
|
slash: TokenType
|
||||||
|
starstar: TokenType
|
||||||
|
coalesce: TokenType
|
||||||
|
|
||||||
|
_break: TokenType
|
||||||
|
_case: TokenType
|
||||||
|
_catch: TokenType
|
||||||
|
_continue: TokenType
|
||||||
|
_debugger: TokenType
|
||||||
|
_default: TokenType
|
||||||
|
_do: TokenType
|
||||||
|
_else: TokenType
|
||||||
|
_finally: TokenType
|
||||||
|
_for: TokenType
|
||||||
|
_function: TokenType
|
||||||
|
_if: TokenType
|
||||||
|
_return: TokenType
|
||||||
|
_switch: TokenType
|
||||||
|
_throw: TokenType
|
||||||
|
_try: TokenType
|
||||||
|
_var: TokenType
|
||||||
|
_const: TokenType
|
||||||
|
_while: TokenType
|
||||||
|
_with: TokenType
|
||||||
|
_new: TokenType
|
||||||
|
_this: TokenType
|
||||||
|
_super: TokenType
|
||||||
|
_class: TokenType
|
||||||
|
_extends: TokenType
|
||||||
|
_export: TokenType
|
||||||
|
_import: TokenType
|
||||||
|
_null: TokenType
|
||||||
|
_true: TokenType
|
||||||
|
_false: TokenType
|
||||||
|
_in: TokenType
|
||||||
|
_instanceof: TokenType
|
||||||
|
_typeof: TokenType
|
||||||
|
_void: TokenType
|
||||||
|
_delete: TokenType
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Comment {
|
||||||
|
type: "Line" | "Block"
|
||||||
|
value: string
|
||||||
|
start: number
|
||||||
|
end: number
|
||||||
|
loc?: SourceLocation
|
||||||
|
range?: [number, number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Token {
|
||||||
|
type: TokenType
|
||||||
|
start: number
|
||||||
|
end: number
|
||||||
|
loc?: SourceLocation
|
||||||
|
range?: [number, number]
|
||||||
|
}
|
||||||
|
|
||||||
|
export const version: string
|
6002
node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
6002
node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5973
node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
5973
node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
90
node_modules/acorn/dist/bin.js
generated
vendored
Normal file
90
node_modules/acorn/dist/bin.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var path = require('path');
|
||||||
|
var fs = require('fs');
|
||||||
|
var acorn = require('./acorn.js');
|
||||||
|
|
||||||
|
function _interopNamespaceDefault(e) {
|
||||||
|
var n = Object.create(null);
|
||||||
|
if (e) {
|
||||||
|
Object.keys(e).forEach(function (k) {
|
||||||
|
if (k !== 'default') {
|
||||||
|
var d = Object.getOwnPropertyDescriptor(e, k);
|
||||||
|
Object.defineProperty(n, k, d.get ? d : {
|
||||||
|
enumerable: true,
|
||||||
|
get: function () { return e[k]; }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
n.default = e;
|
||||||
|
return Object.freeze(n);
|
||||||
|
}
|
||||||
|
|
||||||
|
var acorn__namespace = /*#__PURE__*/_interopNamespaceDefault(acorn);
|
||||||
|
|
||||||
|
var inputFilePaths = [], forceFileName = false, fileMode = false, silent = false, compact = false, tokenize = false;
|
||||||
|
var options = {};
|
||||||
|
|
||||||
|
function help(status) {
|
||||||
|
var print = (status === 0) ? console.log : console.error;
|
||||||
|
print("usage: " + path.basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|...|--ecma2015|--ecma2016|--ecma2017|--ecma2018|...]");
|
||||||
|
print(" [--tokenize] [--locations] [--allow-hash-bang] [--allow-await-outside-function] [--compact] [--silent] [--module] [--help] [--] [<infile>...]");
|
||||||
|
process.exit(status);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i = 2; i < process.argv.length; ++i) {
|
||||||
|
var arg = process.argv[i];
|
||||||
|
if (arg[0] !== "-" || arg === "-") { inputFilePaths.push(arg); }
|
||||||
|
else if (arg === "--") {
|
||||||
|
inputFilePaths.push.apply(inputFilePaths, process.argv.slice(i + 1));
|
||||||
|
forceFileName = true;
|
||||||
|
break
|
||||||
|
} else if (arg === "--locations") { options.locations = true; }
|
||||||
|
else if (arg === "--allow-hash-bang") { options.allowHashBang = true; }
|
||||||
|
else if (arg === "--allow-await-outside-function") { options.allowAwaitOutsideFunction = true; }
|
||||||
|
else if (arg === "--silent") { silent = true; }
|
||||||
|
else if (arg === "--compact") { compact = true; }
|
||||||
|
else if (arg === "--help") { help(0); }
|
||||||
|
else if (arg === "--tokenize") { tokenize = true; }
|
||||||
|
else if (arg === "--module") { options.sourceType = "module"; }
|
||||||
|
else {
|
||||||
|
var match = arg.match(/^--ecma(\d+)$/);
|
||||||
|
if (match)
|
||||||
|
{ options.ecmaVersion = +match[1]; }
|
||||||
|
else
|
||||||
|
{ help(1); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function run(codeList) {
|
||||||
|
var result = [], fileIdx = 0;
|
||||||
|
try {
|
||||||
|
codeList.forEach(function (code, idx) {
|
||||||
|
fileIdx = idx;
|
||||||
|
if (!tokenize) {
|
||||||
|
result = acorn__namespace.parse(code, options);
|
||||||
|
options.program = result;
|
||||||
|
} else {
|
||||||
|
var tokenizer = acorn__namespace.tokenizer(code, options), token;
|
||||||
|
do {
|
||||||
|
token = tokenizer.getToken();
|
||||||
|
result.push(token);
|
||||||
|
} while (token.type !== acorn__namespace.tokTypes.eof)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
console.error(fileMode ? e.message.replace(/\(\d+:\d+\)$/, function (m) { return m.slice(0, 1) + inputFilePaths[fileIdx] + " " + m.slice(1); }) : e.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
if (!silent) { console.log(JSON.stringify(result, null, compact ? null : 2)); }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileMode = inputFilePaths.length && (forceFileName || !inputFilePaths.includes("-") || inputFilePaths.length !== 1)) {
|
||||||
|
run(inputFilePaths.map(function (path) { return fs.readFileSync(path, "utf8"); }));
|
||||||
|
} else {
|
||||||
|
var code = "";
|
||||||
|
process.stdin.resume();
|
||||||
|
process.stdin.on("data", function (chunk) { return code += chunk; });
|
||||||
|
process.stdin.on("end", function () { return run([code]); });
|
||||||
|
}
|
50
node_modules/acorn/package.json
generated
vendored
Normal file
50
node_modules/acorn/package.json
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"name": "acorn",
|
||||||
|
"description": "ECMAScript parser",
|
||||||
|
"homepage": "https://github.com/acornjs/acorn",
|
||||||
|
"main": "dist/acorn.js",
|
||||||
|
"types": "dist/acorn.d.ts",
|
||||||
|
"module": "dist/acorn.mjs",
|
||||||
|
"exports": {
|
||||||
|
".": [
|
||||||
|
{
|
||||||
|
"import": "./dist/acorn.mjs",
|
||||||
|
"require": "./dist/acorn.js",
|
||||||
|
"default": "./dist/acorn.js"
|
||||||
|
},
|
||||||
|
"./dist/acorn.js"
|
||||||
|
],
|
||||||
|
"./package.json": "./package.json"
|
||||||
|
},
|
||||||
|
"version": "8.11.3",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.4.0"
|
||||||
|
},
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "Marijn Haverbeke",
|
||||||
|
"email": "marijnh@gmail.com",
|
||||||
|
"web": "https://marijnhaverbeke.nl"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Ingvar Stepanyan",
|
||||||
|
"email": "me@rreverser.com",
|
||||||
|
"web": "https://rreverser.com/"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Adrian Heine",
|
||||||
|
"web": "http://adrianheine.de"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/acornjs/acorn.git"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"scripts": {
|
||||||
|
"prepare": "cd ..; npm run build:main"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"acorn": "./bin/acorn"
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/arg/LICENSE.md
generated
vendored
Normal file
21
node_modules/arg/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2017-2019 Zeit, Inc.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
280
node_modules/arg/README.md
generated
vendored
Normal file
280
node_modules/arg/README.md
generated
vendored
Normal file
@ -0,0 +1,280 @@
|
|||||||
|
# Arg [![CircleCI](https://circleci.com/gh/zeit/arg.svg?style=svg)](https://circleci.com/gh/zeit/arg)
|
||||||
|
|
||||||
|
`arg` is yet another command line option parser.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Use Yarn or NPM to install.
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ yarn add arg
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ npm install arg
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
`arg()` takes either 1 or 2 arguments:
|
||||||
|
|
||||||
|
1. Command line specification object (see below)
|
||||||
|
2. Parse options (_Optional_, defaults to `{permissive: false, argv: process.argv.slice(2), stopAtPositional: false}`)
|
||||||
|
|
||||||
|
It returns an object with any values present on the command-line (missing options are thus
|
||||||
|
missing from the resulting object). Arg performs no validation/requirement checking - we
|
||||||
|
leave that up to the application.
|
||||||
|
|
||||||
|
All parameters that aren't consumed by options (commonly referred to as "extra" parameters)
|
||||||
|
are added to `result._`, which is _always_ an array (even if no extra parameters are passed,
|
||||||
|
in which case an empty array is returned).
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const arg = require('arg');
|
||||||
|
|
||||||
|
// `options` is an optional parameter
|
||||||
|
const args = arg(spec, options = {permissive: false, argv: process.argv.slice(2)});
|
||||||
|
```
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ node ./hello.js --verbose -vvv --port=1234 -n 'My name' foo bar --tag qux --tag=qix -- --foobar
|
||||||
|
```
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// hello.js
|
||||||
|
const arg = require('arg');
|
||||||
|
|
||||||
|
const args = arg({
|
||||||
|
// Types
|
||||||
|
'--help': Boolean,
|
||||||
|
'--version': Boolean,
|
||||||
|
'--verbose': arg.COUNT, // Counts the number of times --verbose is passed
|
||||||
|
'--port': Number, // --port <number> or --port=<number>
|
||||||
|
'--name': String, // --name <string> or --name=<string>
|
||||||
|
'--tag': [String], // --tag <string> or --tag=<string>
|
||||||
|
|
||||||
|
// Aliases
|
||||||
|
'-v': '--verbose',
|
||||||
|
'-n': '--name', // -n <string>; result is stored in --name
|
||||||
|
'--label': '--name' // --label <string> or --label=<string>;
|
||||||
|
// result is stored in --name
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(args);
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
_: ["foo", "bar", "--foobar"],
|
||||||
|
'--port': 1234,
|
||||||
|
'--verbose': 4,
|
||||||
|
'--name': "My name",
|
||||||
|
'--tag': ["qux", "qix"]
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
```
|
||||||
|
|
||||||
|
The values for each key=>value pair is either a type (function or [function]) or a string (indicating an alias).
|
||||||
|
|
||||||
|
- In the case of a function, the string value of the argument's value is passed to it,
|
||||||
|
and the return value is used as the ultimate value.
|
||||||
|
|
||||||
|
- In the case of an array, the only element _must_ be a type function. Array types indicate
|
||||||
|
that the argument may be passed multiple times, and as such the resulting value in the returned
|
||||||
|
object is an array with all of the values that were passed using the specified flag.
|
||||||
|
|
||||||
|
- In the case of a string, an alias is established. If a flag is passed that matches the _key_,
|
||||||
|
then the _value_ is substituted in its place.
|
||||||
|
|
||||||
|
Type functions are passed three arguments:
|
||||||
|
|
||||||
|
1. The parameter value (always a string)
|
||||||
|
2. The parameter name (e.g. `--label`)
|
||||||
|
3. The previous value for the destination (useful for reduce-like operations or for supporting `-v` multiple times, etc.)
|
||||||
|
|
||||||
|
This means the built-in `String`, `Number`, and `Boolean` type constructors "just work" as type functions.
|
||||||
|
|
||||||
|
Note that `Boolean` and `[Boolean]` have special treatment - an option argument is _not_ consumed or passed, but instead `true` is
|
||||||
|
returned. These options are called "flags".
|
||||||
|
|
||||||
|
For custom handlers that wish to behave as flags, you may pass the function through `arg.flag()`:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const arg = require('arg');
|
||||||
|
|
||||||
|
const argv = ['--foo', 'bar', '-ff', 'baz', '--foo', '--foo', 'qux', '-fff', 'qix'];
|
||||||
|
|
||||||
|
function myHandler(value, argName, previousValue) {
|
||||||
|
/* `value` is always `true` */
|
||||||
|
return 'na ' + (previousValue || 'batman!');
|
||||||
|
}
|
||||||
|
|
||||||
|
const args = arg({
|
||||||
|
'--foo': arg.flag(myHandler),
|
||||||
|
'-f': '--foo'
|
||||||
|
}, {
|
||||||
|
argv
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(args);
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
_: ['bar', 'baz', 'qux', 'qix'],
|
||||||
|
'--foo': 'na na na na na na na na batman!'
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
```
|
||||||
|
|
||||||
|
As well, `arg` supplies a helper argument handler called `arg.COUNT`, which equivalent to a `[Boolean]` argument's `.length`
|
||||||
|
property - effectively counting the number of times the boolean flag, denoted by the key, is passed on the command line..
|
||||||
|
For example, this is how you could implement `ssh`'s multiple levels of verbosity (`-vvvv` being the most verbose).
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const arg = require('arg');
|
||||||
|
|
||||||
|
const argv = ['-AAAA', '-BBBB'];
|
||||||
|
|
||||||
|
const args = arg({
|
||||||
|
'-A': arg.COUNT,
|
||||||
|
'-B': [Boolean]
|
||||||
|
}, {
|
||||||
|
argv
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(args);
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
_: [],
|
||||||
|
'-A': 4,
|
||||||
|
'-B': [true, true, true, true]
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
If a second parameter is specified and is an object, it specifies parsing options to modify the behavior of `arg()`.
|
||||||
|
|
||||||
|
#### `argv`
|
||||||
|
|
||||||
|
If you have already sliced or generated a number of raw arguments to be parsed (as opposed to letting `arg`
|
||||||
|
slice them from `process.argv`) you may specify them in the `argv` option.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const args = arg(
|
||||||
|
{
|
||||||
|
'--foo': String
|
||||||
|
}, {
|
||||||
|
argv: ['hello', '--foo', 'world']
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
results in:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const args = {
|
||||||
|
_: ['hello'],
|
||||||
|
'--foo': 'world'
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `permissive`
|
||||||
|
|
||||||
|
When `permissive` set to `true`, `arg` will push any unknown arguments
|
||||||
|
onto the "extra" argument array (`result._`) instead of throwing an error about
|
||||||
|
an unknown flag.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const arg = require('arg');
|
||||||
|
|
||||||
|
const argv = ['--foo', 'hello', '--qux', 'qix', '--bar', '12345', 'hello again'];
|
||||||
|
|
||||||
|
const args = arg(
|
||||||
|
{
|
||||||
|
'--foo': String,
|
||||||
|
'--bar': Number
|
||||||
|
}, {
|
||||||
|
argv,
|
||||||
|
permissive: true
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
results in:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const args = {
|
||||||
|
_: ['--qux', 'qix', 'hello again'],
|
||||||
|
'--foo': 'hello',
|
||||||
|
'--bar': 12345
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `stopAtPositional`
|
||||||
|
|
||||||
|
When `stopAtPositional` is set to `true`, `arg` will halt parsing at the first
|
||||||
|
positional argument.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const arg = require('arg');
|
||||||
|
|
||||||
|
const argv = ['--foo', 'hello', '--bar'];
|
||||||
|
|
||||||
|
const args = arg(
|
||||||
|
{
|
||||||
|
'--foo': Boolean,
|
||||||
|
'--bar': Boolean
|
||||||
|
}, {
|
||||||
|
argv,
|
||||||
|
stopAtPositional: true
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
results in:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const args = {
|
||||||
|
_: ['hello', '--bar'],
|
||||||
|
'--foo': true
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Errors
|
||||||
|
|
||||||
|
Some errors that `arg` throws provide a `.code` property in order to aid in recovering from user error, or to
|
||||||
|
differentiate between user error and developer error (bug).
|
||||||
|
|
||||||
|
##### ARG_UNKNOWN_OPTION
|
||||||
|
|
||||||
|
If an unknown option (not defined in the spec object) is passed, an error with code `ARG_UNKNOWN_OPTION` will be thrown:
|
||||||
|
```js
|
||||||
|
// cli.js
|
||||||
|
try {
|
||||||
|
require('arg')({ '--hi': String });
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code === 'ARG_UNKNOWN_OPTION') {
|
||||||
|
console.log(err.message);
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```shell
|
||||||
|
node cli.js --extraneous true
|
||||||
|
Unknown or unexpected option: --extraneous
|
||||||
|
```
|
||||||
|
|
||||||
|
# License
|
||||||
|
|
||||||
|
Copyright © 2017-2019 by ZEIT, Inc. Released under the [MIT License](LICENSE.md).
|
31
node_modules/arg/index.d.ts
generated
vendored
Normal file
31
node_modules/arg/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
declare const flagSymbol: unique symbol;
|
||||||
|
|
||||||
|
declare function arg<T extends arg.Spec>(spec: T, options?: arg.Options): arg.Result<T>;
|
||||||
|
|
||||||
|
declare namespace arg {
|
||||||
|
export function flag<T>(fn: T): T & { [flagSymbol]: true };
|
||||||
|
|
||||||
|
export const COUNT: Handler<number> & { [flagSymbol]: true };
|
||||||
|
|
||||||
|
export type Handler <T = any> = (value: string, name: string, previousValue?: T) => T;
|
||||||
|
|
||||||
|
export interface Spec {
|
||||||
|
[key: string]: string | Handler | [Handler];
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Result<T extends Spec> = { _: string[] } & {
|
||||||
|
[K in keyof T]?: T[K] extends Handler
|
||||||
|
? ReturnType<T[K]>
|
||||||
|
: T[K] extends [Handler]
|
||||||
|
? Array<ReturnType<T[K][0]>>
|
||||||
|
: never
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface Options {
|
||||||
|
argv?: string[];
|
||||||
|
permissive?: boolean;
|
||||||
|
stopAtPositional?: boolean;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export = arg;
|
144
node_modules/arg/index.js
generated
vendored
Normal file
144
node_modules/arg/index.js
generated
vendored
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
const flagSymbol = Symbol('arg flag');
|
||||||
|
|
||||||
|
function arg(opts, {argv = process.argv.slice(2), permissive = false, stopAtPositional = false} = {}) {
|
||||||
|
if (!opts) {
|
||||||
|
throw new Error('Argument specification object is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = {_: []};
|
||||||
|
|
||||||
|
const aliases = {};
|
||||||
|
const handlers = {};
|
||||||
|
|
||||||
|
for (const key of Object.keys(opts)) {
|
||||||
|
if (!key) {
|
||||||
|
throw new TypeError('Argument key cannot be an empty string');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (key[0] !== '-') {
|
||||||
|
throw new TypeError(`Argument key must start with '-' but found: '${key}'`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (key.length === 1) {
|
||||||
|
throw new TypeError(`Argument key must have a name; singular '-' keys are not allowed: ${key}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof opts[key] === 'string') {
|
||||||
|
aliases[key] = opts[key];
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let type = opts[key];
|
||||||
|
let isFlag = false;
|
||||||
|
|
||||||
|
if (Array.isArray(type) && type.length === 1 && typeof type[0] === 'function') {
|
||||||
|
const [fn] = type;
|
||||||
|
type = (value, name, prev = []) => {
|
||||||
|
prev.push(fn(value, name, prev[prev.length - 1]));
|
||||||
|
return prev;
|
||||||
|
};
|
||||||
|
isFlag = fn === Boolean || fn[flagSymbol] === true;
|
||||||
|
} else if (typeof type === 'function') {
|
||||||
|
isFlag = type === Boolean || type[flagSymbol] === true;
|
||||||
|
} else {
|
||||||
|
throw new TypeError(`Type missing or not a function or valid array type: ${key}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (key[1] !== '-' && key.length > 2) {
|
||||||
|
throw new TypeError(`Short argument keys (with a single hyphen) must have only one character: ${key}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
handlers[key] = [type, isFlag];
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0, len = argv.length; i < len; i++) {
|
||||||
|
const wholeArg = argv[i];
|
||||||
|
|
||||||
|
if (stopAtPositional && result._.length > 0) {
|
||||||
|
result._ = result._.concat(argv.slice(i));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (wholeArg === '--') {
|
||||||
|
result._ = result._.concat(argv.slice(i + 1));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (wholeArg.length > 1 && wholeArg[0] === '-') {
|
||||||
|
/* eslint-disable operator-linebreak */
|
||||||
|
const separatedArguments = (wholeArg[1] === '-' || wholeArg.length === 2)
|
||||||
|
? [wholeArg]
|
||||||
|
: wholeArg.slice(1).split('').map(a => `-${a}`);
|
||||||
|
/* eslint-enable operator-linebreak */
|
||||||
|
|
||||||
|
for (let j = 0; j < separatedArguments.length; j++) {
|
||||||
|
const arg = separatedArguments[j];
|
||||||
|
const [originalArgName, argStr] = arg[1] === '-' ? arg.split(/=(.*)/, 2) : [arg, undefined];
|
||||||
|
|
||||||
|
let argName = originalArgName;
|
||||||
|
while (argName in aliases) {
|
||||||
|
argName = aliases[argName];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(argName in handlers)) {
|
||||||
|
if (permissive) {
|
||||||
|
result._.push(arg);
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
const err = new Error(`Unknown or unexpected option: ${originalArgName}`);
|
||||||
|
err.code = 'ARG_UNKNOWN_OPTION';
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [type, isFlag] = handlers[argName];
|
||||||
|
|
||||||
|
if (!isFlag && ((j + 1) < separatedArguments.length)) {
|
||||||
|
throw new TypeError(`Option requires argument (but was followed by another short argument): ${originalArgName}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isFlag) {
|
||||||
|
result[argName] = type(true, argName, result[argName]);
|
||||||
|
} else if (argStr === undefined) {
|
||||||
|
if (
|
||||||
|
argv.length < i + 2 ||
|
||||||
|
(
|
||||||
|
argv[i + 1].length > 1 &&
|
||||||
|
(argv[i + 1][0] === '-') &&
|
||||||
|
!(
|
||||||
|
argv[i + 1].match(/^-?\d*(\.(?=\d))?\d*$/) &&
|
||||||
|
(
|
||||||
|
type === Number ||
|
||||||
|
// eslint-disable-next-line no-undef
|
||||||
|
(typeof BigInt !== 'undefined' && type === BigInt)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
const extended = originalArgName === argName ? '' : ` (alias for ${argName})`;
|
||||||
|
throw new Error(`Option requires argument: ${originalArgName}${extended}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
result[argName] = type(argv[i + 1], argName, result[argName]);
|
||||||
|
++i;
|
||||||
|
} else {
|
||||||
|
result[argName] = type(argStr, argName, result[argName]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
result._.push(wholeArg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
arg.flag = fn => {
|
||||||
|
fn[flagSymbol] = true;
|
||||||
|
return fn;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Utility types
|
||||||
|
arg.COUNT = arg.flag((v, name, existingCount) => (existingCount || 0) + 1);
|
||||||
|
|
||||||
|
module.exports = arg;
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user