courrier
This commit is contained in:
parent
931fe1a1cb
commit
26ec4864cf
84
app/api/sync-users/route.ts
Normal file
84
app/api/sync-users/route.ts
Normal file
@ -0,0 +1,84 @@
|
||||
import { NextResponse } from 'next/server';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import bcrypt from 'bcryptjs';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
// Fetch users from equipes endpoint
|
||||
const response = await fetch(process.env.EQUIPES_API_URL || 'https://equipes-api-url/users', {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${process.env.EQUIPES_API_TOKEN}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API responded with status: ${response.status}`);
|
||||
}
|
||||
|
||||
const equipeUsers = await response.json();
|
||||
|
||||
const results = {
|
||||
total: equipeUsers.length,
|
||||
created: 0,
|
||||
updated: 0,
|
||||
failed: 0,
|
||||
};
|
||||
|
||||
// Process each user from equipes
|
||||
for (const user of equipeUsers) {
|
||||
try {
|
||||
// Check if the keycloak ID exists in our database
|
||||
const existingUser = await prisma.user.findUnique({
|
||||
where: { id: user.id },
|
||||
});
|
||||
|
||||
if (existingUser) {
|
||||
// Update existing user
|
||||
await prisma.user.update({
|
||||
where: { id: user.id },
|
||||
data: {
|
||||
email: user.email,
|
||||
// Don't update password as it might be locally changed
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
results.updated++;
|
||||
} else {
|
||||
// Create new user
|
||||
// Generate a temporary random password
|
||||
const tempPassword = await bcrypt.hash(Math.random().toString(36).slice(-10), 10);
|
||||
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
id: user.id, // Use the keycloak ID as our primary ID
|
||||
email: user.email,
|
||||
password: tempPassword,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
results.created++;
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error(`Error processing user ${user.id}:`, error);
|
||||
results.failed++;
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
results,
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error('Error syncing users:', error);
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error.message
|
||||
},
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
1
node_modules/.bin/acorn
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../acorn/bin/acorn
|
||||
1
node_modules/.bin/bcrypt
generated
vendored
Symbolic link
1
node_modules/.bin/bcrypt
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../bcryptjs/bin/bcrypt
|
||||
1
node_modules/.bin/ts-node
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin.js
|
||||
1
node_modules/.bin/ts-node-cwd
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-cwd
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-cwd.js
|
||||
1
node_modules/.bin/ts-node-esm
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-esm
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-esm.js
|
||||
1
node_modules/.bin/ts-node-script
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-script
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-script.js
|
||||
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
Symbolic link
1
node_modules/.bin/ts-node-transpile-only
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-transpile.js
|
||||
1
node_modules/.bin/ts-script
generated
vendored
Symbolic link
1
node_modules/.bin/ts-script
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../ts-node/dist/bin-script-deprecated.js
|
||||
185
node_modules/.package-lock.json
generated
vendored
185
node_modules/.package-lock.json
generated
vendored
@ -954,6 +954,30 @@
|
||||
"node-fetch": "^3.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@cspotcode/source-map-support": {
|
||||
"version": "0.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "0.3.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": {
|
||||
"version": "0.3.9",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
|
||||
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.0.3",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
},
|
||||
"node_modules/@csstools/color-helpers": {
|
||||
"version": "5.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.0.2.tgz",
|
||||
@ -4123,6 +4147,40 @@
|
||||
"tslib": "^2.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tsconfig/node10": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
|
||||
"integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@tsconfig/node12": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
|
||||
"integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@tsconfig/node14": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
|
||||
"integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@tsconfig/node16": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
|
||||
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/bcryptjs": {
|
||||
"version": "2.4.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/bcryptjs/-/bcryptjs-2.4.6.tgz",
|
||||
"integrity": "sha512-9xlo6R2qDs5uixm0bcIqCeMCE6HiQsIyel9KQySStiyqNl2tnj2mP3DX1Nf56MD6KMenNNlBBsy3LJ7gUEQPXQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/crypto-js": {
|
||||
"version": "4.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/crypto-js/-/crypto-js-4.2.2.tgz",
|
||||
@ -4353,6 +4411,32 @@
|
||||
"node": ">=14.6"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "8.14.1",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz",
|
||||
"integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/acorn-walk": {
|
||||
"version": "8.3.4",
|
||||
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
|
||||
"integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"acorn": "^8.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "7.1.3",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
|
||||
@ -4533,6 +4617,15 @@
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/bcryptjs": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-3.0.2.tgz",
|
||||
"integrity": "sha512-k38b3XOZKv60C4E2hVsXTolJWfkGRMbILBIe2IBITXciy5bOsTKot5kDrf3ZfufQtQOUN5mXceUEpU1rTl9Uog==",
|
||||
"license": "BSD-3-Clause",
|
||||
"bin": {
|
||||
"bcrypt": "bin/bcrypt"
|
||||
}
|
||||
},
|
||||
"node_modules/binary-extensions": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
|
||||
@ -4928,6 +5021,13 @@
|
||||
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/create-require": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
|
||||
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||
@ -5220,6 +5320,16 @@
|
||||
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
|
||||
"integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="
|
||||
},
|
||||
"node_modules/diff": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
|
||||
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
|
||||
"devOptional": true,
|
||||
"license": "BSD-3-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/dlv": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz",
|
||||
@ -6572,6 +6682,13 @@
|
||||
"libqp": "2.1.1"
|
||||
}
|
||||
},
|
||||
"node_modules/make-error": {
|
||||
"version": "1.3.6",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
|
||||
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
|
||||
"devOptional": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/math-intrinsics": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||
@ -8699,6 +8816,57 @@
|
||||
"resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz",
|
||||
"integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="
|
||||
},
|
||||
"node_modules/ts-node": {
|
||||
"version": "10.9.2",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
|
||||
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@cspotcode/source-map-support": "^0.8.0",
|
||||
"@tsconfig/node10": "^1.0.7",
|
||||
"@tsconfig/node12": "^1.0.7",
|
||||
"@tsconfig/node14": "^1.0.0",
|
||||
"@tsconfig/node16": "^1.0.2",
|
||||
"acorn": "^8.4.1",
|
||||
"acorn-walk": "^8.1.1",
|
||||
"arg": "^4.1.0",
|
||||
"create-require": "^1.1.0",
|
||||
"diff": "^4.0.1",
|
||||
"make-error": "^1.1.1",
|
||||
"v8-compile-cache-lib": "^3.0.1",
|
||||
"yn": "3.1.1"
|
||||
},
|
||||
"bin": {
|
||||
"ts-node": "dist/bin.js",
|
||||
"ts-node-cwd": "dist/bin-cwd.js",
|
||||
"ts-node-esm": "dist/bin-esm.js",
|
||||
"ts-node-script": "dist/bin-script.js",
|
||||
"ts-node-transpile-only": "dist/bin-transpile.js",
|
||||
"ts-script": "dist/bin-script-deprecated.js"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@swc/core": ">=1.2.50",
|
||||
"@swc/wasm": ">=1.2.50",
|
||||
"@types/node": "*",
|
||||
"typescript": ">=2.7"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@swc/core": {
|
||||
"optional": true
|
||||
},
|
||||
"@swc/wasm": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/ts-node/node_modules/arg": {
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
|
||||
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.8.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||
@ -8877,6 +9045,13 @@
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/v8-compile-cache-lib": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
|
||||
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
|
||||
"devOptional": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/vaul": {
|
||||
"version": "0.9.9",
|
||||
"resolved": "https://registry.npmjs.org/vaul/-/vaul-0.9.9.tgz",
|
||||
@ -9221,6 +9396,16 @@
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/yn": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
|
||||
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
|
||||
"devOptional": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/zod": {
|
||||
"version": "3.24.3",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-3.24.3.tgz",
|
||||
|
||||
21
node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
Normal file
21
node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Evan Wallace
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
289
node_modules/@cspotcode/source-map-support/README.md
generated
vendored
Normal file
289
node_modules/@cspotcode/source-map-support/README.md
generated
vendored
Normal file
@ -0,0 +1,289 @@
|
||||
# Source Map Support
|
||||
|
||||
[](https://npmjs.org/package/@cspotcode/source-map-support)
|
||||
[](https://npmjs.org/package/@cspotcode/source-map-support)
|
||||
[](https://github.com/cspotcode/node-source-map-support/actions?query=workflow%3A%22Continuous+Integration%22)
|
||||
|
||||
This module provides source map support for stack traces in node via the [V8 stack trace API](https://github.com/v8/v8/wiki/Stack-Trace-API). It uses the [source-map](https://github.com/mozilla/source-map) module to replace the paths and line numbers of source-mapped files with their original paths and line numbers. The output mimics node's stack trace format with the goal of making every compile-to-JS language more of a first-class citizen. Source maps are completely general (not specific to any one language) so you can use source maps with multiple compile-to-JS languages in the same node process.
|
||||
|
||||
## Installation and Usage
|
||||
|
||||
#### Node support
|
||||
|
||||
```
|
||||
$ npm install @cspotcode/source-map-support
|
||||
```
|
||||
|
||||
Source maps can be generated using libraries such as [source-map-index-generator](https://github.com/twolfson/source-map-index-generator). Once you have a valid source map, place a source mapping comment somewhere in the file (usually done automatically or with an option by your transpiler):
|
||||
|
||||
```
|
||||
//# sourceMappingURL=path/to/source.map
|
||||
```
|
||||
|
||||
If multiple sourceMappingURL comments exist in one file, the last sourceMappingURL comment will be
|
||||
respected (e.g. if a file mentions the comment in code, or went through multiple transpilers).
|
||||
The path should either be absolute or relative to the compiled file.
|
||||
|
||||
From here you have two options.
|
||||
|
||||
##### CLI Usage
|
||||
|
||||
```bash
|
||||
node -r @cspotcode/source-map-support/register compiled.js
|
||||
# Or to enable hookRequire
|
||||
node -r @cspotcode/source-map-support/register-hook-require compiled.js
|
||||
```
|
||||
|
||||
##### Programmatic Usage
|
||||
|
||||
Put the following line at the top of the compiled file.
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install();
|
||||
```
|
||||
|
||||
It is also possible to install the source map support directly by
|
||||
requiring the `register` module which can be handy with ES6:
|
||||
|
||||
```js
|
||||
import '@cspotcode/source-map-support/register'
|
||||
|
||||
// Instead of:
|
||||
import sourceMapSupport from '@cspotcode/source-map-support'
|
||||
sourceMapSupport.install()
|
||||
```
|
||||
Note: if you're using babel-register, it includes source-map-support already.
|
||||
|
||||
It is also very useful with Mocha:
|
||||
|
||||
```
|
||||
$ mocha --require @cspotcode/source-map-support/register tests/
|
||||
```
|
||||
|
||||
#### Browser support
|
||||
|
||||
This library also works in Chrome. While the DevTools console already supports source maps, the V8 engine doesn't and `Error.prototype.stack` will be incorrect without this library. Everything will just work if you deploy your source files using [browserify](http://browserify.org/). Just make sure to pass the `--debug` flag to the browserify command so your source maps are included in the bundled code.
|
||||
|
||||
This library also works if you use another build process or just include the source files directly. In this case, include the file `browser-source-map-support.js` in your page and call `sourceMapSupport.install()`. It contains the whole library already bundled for the browser using browserify.
|
||||
|
||||
```html
|
||||
<script src="browser-source-map-support.js"></script>
|
||||
<script>sourceMapSupport.install();</script>
|
||||
```
|
||||
|
||||
This library also works if you use AMD (Asynchronous Module Definition), which is used in tools like [RequireJS](http://requirejs.org/). Just list `browser-source-map-support` as a dependency:
|
||||
|
||||
```html
|
||||
<script>
|
||||
define(['browser-source-map-support'], function(sourceMapSupport) {
|
||||
sourceMapSupport.install();
|
||||
});
|
||||
</script>
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
This module installs two things: a change to the `stack` property on `Error` objects and a handler for uncaught exceptions that mimics node's default exception handler (the handler can be seen in the demos below). You may want to disable the handler if you have your own uncaught exception handler. This can be done by passing an argument to the installer:
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
handleUncaughtExceptions: false
|
||||
});
|
||||
```
|
||||
|
||||
This module loads source maps from the filesystem by default. You can provide alternate loading behavior through a callback as shown below. For example, [Meteor](https://github.com/meteor) keeps all source maps cached in memory to avoid disk access.
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
retrieveSourceMap: function(source) {
|
||||
if (source === 'compiled.js') {
|
||||
return {
|
||||
url: 'original.js',
|
||||
map: fs.readFileSync('compiled.js.map', 'utf8')
|
||||
};
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
The module will by default assume a browser environment if XMLHttpRequest and window are defined. If either of these do not exist it will instead assume a node environment.
|
||||
In some rare cases, e.g. when running a browser emulation and where both variables are also set, you can explictly specify the environment to be either 'browser' or 'node'.
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
environment: 'node'
|
||||
});
|
||||
```
|
||||
|
||||
To support files with inline source maps, the `hookRequire` options can be specified, which will monitor all source files for inline source maps.
|
||||
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install({
|
||||
hookRequire: true
|
||||
});
|
||||
```
|
||||
|
||||
This monkey patches the `require` module loading chain, so is not enabled by default and is not recommended for any sort of production usage.
|
||||
|
||||
## Demos
|
||||
|
||||
#### Basic Demo
|
||||
|
||||
original.js:
|
||||
|
||||
```js
|
||||
throw new Error('test'); // This is the original code
|
||||
```
|
||||
|
||||
compiled.js:
|
||||
|
||||
```js
|
||||
require('@cspotcode/source-map-support').install();
|
||||
|
||||
throw new Error('test'); // This is the compiled code
|
||||
// The next line defines the sourceMapping.
|
||||
//# sourceMappingURL=compiled.js.map
|
||||
```
|
||||
|
||||
compiled.js.map:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": 3,
|
||||
"file": "compiled.js",
|
||||
"sources": ["original.js"],
|
||||
"names": [],
|
||||
"mappings": ";;AAAA,MAAM,IAAI"
|
||||
}
|
||||
```
|
||||
|
||||
Run compiled.js using node (notice how the stack trace uses original.js instead of compiled.js):
|
||||
|
||||
```
|
||||
$ node compiled.js
|
||||
|
||||
original.js:1
|
||||
throw new Error('test'); // This is the original code
|
||||
^
|
||||
Error: test
|
||||
at Object.<anonymous> (original.js:1:7)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
at node.js:901:3
|
||||
```
|
||||
|
||||
#### TypeScript Demo
|
||||
|
||||
demo.ts:
|
||||
|
||||
```typescript
|
||||
declare function require(name: string);
|
||||
require('@cspotcode/source-map-support').install();
|
||||
class Foo {
|
||||
constructor() { this.bar(); }
|
||||
bar() { throw new Error('this is a demo'); }
|
||||
}
|
||||
new Foo();
|
||||
```
|
||||
|
||||
Compile and run the file using the TypeScript compiler from the terminal:
|
||||
|
||||
```
|
||||
$ npm install source-map-support typescript
|
||||
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
||||
$ node demo.js
|
||||
|
||||
demo.ts:5
|
||||
bar() { throw new Error('this is a demo'); }
|
||||
^
|
||||
Error: this is a demo
|
||||
at Foo.bar (demo.ts:5:17)
|
||||
at new Foo (demo.ts:4:24)
|
||||
at Object.<anonymous> (demo.ts:7:1)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
at node.js:901:3
|
||||
```
|
||||
|
||||
There is also the option to use `-r source-map-support/register` with typescript, without the need add the `require('@cspotcode/source-map-support').install()` in the code base:
|
||||
|
||||
```
|
||||
$ npm install source-map-support typescript
|
||||
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
||||
$ node -r source-map-support/register demo.js
|
||||
|
||||
demo.ts:5
|
||||
bar() { throw new Error('this is a demo'); }
|
||||
^
|
||||
Error: this is a demo
|
||||
at Foo.bar (demo.ts:5:17)
|
||||
at new Foo (demo.ts:4:24)
|
||||
at Object.<anonymous> (demo.ts:7:1)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
at node.js:901:3
|
||||
```
|
||||
|
||||
#### CoffeeScript Demo
|
||||
|
||||
demo.coffee:
|
||||
|
||||
```coffee
|
||||
require('@cspotcode/source-map-support').install()
|
||||
foo = ->
|
||||
bar = -> throw new Error 'this is a demo'
|
||||
bar()
|
||||
foo()
|
||||
```
|
||||
|
||||
Compile and run the file using the CoffeeScript compiler from the terminal:
|
||||
|
||||
```sh
|
||||
$ npm install @cspotcode/source-map-support coffeescript
|
||||
$ node_modules/.bin/coffee --map --compile demo.coffee
|
||||
$ node demo.js
|
||||
|
||||
demo.coffee:3
|
||||
bar = -> throw new Error 'this is a demo'
|
||||
^
|
||||
Error: this is a demo
|
||||
at bar (demo.coffee:3:22)
|
||||
at foo (demo.coffee:4:3)
|
||||
at Object.<anonymous> (demo.coffee:5:1)
|
||||
at Object.<anonymous> (demo.coffee:1:1)
|
||||
at Module._compile (module.js:456:26)
|
||||
at Object.Module._extensions..js (module.js:474:10)
|
||||
at Module.load (module.js:356:32)
|
||||
at Function.Module._load (module.js:312:12)
|
||||
at Function.Module.runMain (module.js:497:10)
|
||||
at startup (node.js:119:16)
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
This repo contains both automated tests for node and manual tests for the browser. The automated tests can be run using mocha (type `mocha` in the root directory). To run the manual tests:
|
||||
|
||||
* Build the tests using `build.js`
|
||||
* Launch the HTTP server (`npm run serve-tests`) and visit
|
||||
* http://127.0.0.1:1336/amd-test
|
||||
* http://127.0.0.1:1336/browser-test
|
||||
* http://127.0.0.1:1336/browserify-test - **Currently not working** due to a bug with browserify (see [pull request #66](https://github.com/evanw/node-source-map-support/pull/66) for details).
|
||||
* For `header-test`, run `server.js` inside that directory and visit http://127.0.0.1:1337/
|
||||
|
||||
## License
|
||||
|
||||
This code is available under the [MIT license](http://opensource.org/licenses/MIT).
|
||||
114
node_modules/@cspotcode/source-map-support/browser-source-map-support.js
generated
vendored
Normal file
114
node_modules/@cspotcode/source-map-support/browser-source-map-support.js
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
/*
|
||||
* Support for source maps in V8 stack traces
|
||||
* https://github.com/evanw/node-source-map-support
|
||||
*/
|
||||
/*
|
||||
The buffer module from node.js, for the browser.
|
||||
|
||||
@author Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
|
||||
license MIT
|
||||
*/
|
||||
(this.define||function(R,U){this.sourceMapSupport=U()})("browser-source-map-support",function(R){(function e(C,J,A){function p(f,c){if(!J[f]){if(!C[f]){var l="function"==typeof require&&require;if(!c&&l)return l(f,!0);if(t)return t(f,!0);throw Error("Cannot find module '"+f+"'");}l=J[f]={exports:{}};C[f][0].call(l.exports,function(q){var r=C[f][1][q];return p(r?r:q)},l,l.exports,e,C,J,A)}return J[f].exports}for(var t="function"==typeof require&&require,m=0;m<A.length;m++)p(A[m]);return p})({1:[function(C,
|
||||
J,A){R=C("./source-map-support")},{"./source-map-support":21}],2:[function(C,J,A){(function(e){function p(m){m=m.charCodeAt(0);if(43===m)return 62;if(47===m)return 63;if(48>m)return-1;if(58>m)return m-48+52;if(91>m)return m-65;if(123>m)return m-97+26}var t="undefined"!==typeof Uint8Array?Uint8Array:Array;e.toByteArray=function(m){function f(d){q[k++]=d}if(0<m.length%4)throw Error("Invalid string. Length must be a multiple of 4");var c=m.length;var l="="===m.charAt(c-2)?2:"="===m.charAt(c-1)?1:0;var q=
|
||||
new t(3*m.length/4-l);var r=0<l?m.length-4:m.length;var k=0;for(c=0;c<r;c+=4){var u=p(m.charAt(c))<<18|p(m.charAt(c+1))<<12|p(m.charAt(c+2))<<6|p(m.charAt(c+3));f((u&16711680)>>16);f((u&65280)>>8);f(u&255)}2===l?(u=p(m.charAt(c))<<2|p(m.charAt(c+1))>>4,f(u&255)):1===l&&(u=p(m.charAt(c))<<10|p(m.charAt(c+1))<<4|p(m.charAt(c+2))>>2,f(u>>8&255),f(u&255));return q};e.fromByteArray=function(m){var f=m.length%3,c="",l;var q=0;for(l=m.length-f;q<l;q+=3){var r=(m[q]<<16)+(m[q+1]<<8)+m[q+2];r="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>
|
||||
18&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>12&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>6&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r&63);c+=r}switch(f){case 1:r=m[m.length-1];c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>2);c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r<<4&63);c+="==";break;case 2:r=(m[m.length-2]<<8)+
|
||||
m[m.length-1],c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>10),c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r>>4&63),c+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(r<<2&63),c+="="}return c}})("undefined"===typeof A?this.base64js={}:A)},{}],3:[function(C,J,A){},{}],4:[function(C,J,A){(function(e){var p=Object.prototype.toString,t="function"===typeof e.alloc&&"function"===typeof e.allocUnsafe&&"function"===
|
||||
typeof e.from;J.exports=function(m,f,c){if("number"===typeof m)throw new TypeError('"value" argument must not be a number');if("ArrayBuffer"===p.call(m).slice(8,-1)){f>>>=0;var l=m.byteLength-f;if(0>l)throw new RangeError("'offset' is out of bounds");if(void 0===c)c=l;else if(c>>>=0,c>l)throw new RangeError("'length' is out of bounds");return t?e.from(m.slice(f,f+c)):new e(new Uint8Array(m.slice(f,f+c)))}if("string"===typeof m){c=f;if("string"!==typeof c||""===c)c="utf8";if(!e.isEncoding(c))throw new TypeError('"encoding" must be a valid string encoding');
|
||||
return t?e.from(m,c):new e(m,c)}return t?e.from(m):new e(m)}}).call(this,C("buffer").Buffer)},{buffer:5}],5:[function(C,J,A){function e(a,b,h){if(!(this instanceof e))return new e(a,b,h);var w=typeof a;if("number"===w)var y=0<a?a>>>0:0;else if("string"===w){if("base64"===b)for(a=(a.trim?a.trim():a.replace(/^\s+|\s+$/g,"")).replace(L,"");0!==a.length%4;)a+="=";y=e.byteLength(a,b)}else if("object"===w&&null!==a)"Buffer"===a.type&&z(a.data)&&(a=a.data),y=0<+a.length?Math.floor(+a.length):0;else throw new TypeError("must start with number, buffer, array or string");
|
||||
if(this.length>G)throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+G.toString(16)+" bytes");if(e.TYPED_ARRAY_SUPPORT)var I=e._augment(new Uint8Array(y));else I=this,I.length=y,I._isBuffer=!0;if(e.TYPED_ARRAY_SUPPORT&&"number"===typeof a.byteLength)I._set(a);else{var K=a;if(z(K)||e.isBuffer(K)||K&&"object"===typeof K&&"number"===typeof K.length)if(e.isBuffer(a))for(b=0;b<y;b++)I[b]=a.readUInt8(b);else for(b=0;b<y;b++)I[b]=(a[b]%256+256)%256;else if("string"===w)I.write(a,
|
||||
0,b);else if("number"===w&&!e.TYPED_ARRAY_SUPPORT&&!h)for(b=0;b<y;b++)I[b]=0}return I}function p(a,b,h){var w="";for(h=Math.min(a.length,h);b<h;b++)w+=String.fromCharCode(a[b]);return w}function t(a,b,h){if(0!==a%1||0>a)throw new RangeError("offset is not uint");if(a+b>h)throw new RangeError("Trying to access beyond buffer length");}function m(a,b,h,w,y,I){if(!e.isBuffer(a))throw new TypeError("buffer must be a Buffer instance");if(b>y||b<I)throw new TypeError("value is out of bounds");if(h+w>a.length)throw new TypeError("index out of range");
|
||||
}function f(a,b,h,w){0>b&&(b=65535+b+1);for(var y=0,I=Math.min(a.length-h,2);y<I;y++)a[h+y]=(b&255<<8*(w?y:1-y))>>>8*(w?y:1-y)}function c(a,b,h,w){0>b&&(b=4294967295+b+1);for(var y=0,I=Math.min(a.length-h,4);y<I;y++)a[h+y]=b>>>8*(w?y:3-y)&255}function l(a,b,h,w,y,I){if(b>y||b<I)throw new TypeError("value is out of bounds");if(h+w>a.length)throw new TypeError("index out of range");}function q(a,b,h,w,y){y||l(a,b,h,4,3.4028234663852886E38,-3.4028234663852886E38);v.write(a,b,h,w,23,4);return h+4}function r(a,
|
||||
b,h,w,y){y||l(a,b,h,8,1.7976931348623157E308,-1.7976931348623157E308);v.write(a,b,h,w,52,8);return h+8}function k(a){for(var b=[],h=0;h<a.length;h++){var w=a.charCodeAt(h);if(127>=w)b.push(w);else{var y=h;55296<=w&&57343>=w&&h++;w=encodeURIComponent(a.slice(y,h+1)).substr(1).split("%");for(y=0;y<w.length;y++)b.push(parseInt(w[y],16))}}return b}function u(a){for(var b=[],h=0;h<a.length;h++)b.push(a.charCodeAt(h)&255);return b}function d(a,b,h,w,y){y&&(w-=w%y);for(y=0;y<w&&!(y+h>=b.length||y>=a.length);y++)b[y+
|
||||
h]=a[y];return y}function g(a){try{return decodeURIComponent(a)}catch(b){return String.fromCharCode(65533)}}var n=C("base64-js"),v=C("ieee754"),z=C("is-array");A.Buffer=e;A.SlowBuffer=e;A.INSPECT_MAX_BYTES=50;e.poolSize=8192;var G=1073741823;e.TYPED_ARRAY_SUPPORT=function(){try{var a=new ArrayBuffer(0),b=new Uint8Array(a);b.foo=function(){return 42};return 42===b.foo()&&"function"===typeof b.subarray&&0===(new Uint8Array(1)).subarray(1,1).byteLength}catch(h){return!1}}();e.isBuffer=function(a){return!(null==
|
||||
a||!a._isBuffer)};e.compare=function(a,b){if(!e.isBuffer(a)||!e.isBuffer(b))throw new TypeError("Arguments must be Buffers");for(var h=a.length,w=b.length,y=0,I=Math.min(h,w);y<I&&a[y]===b[y];y++);y!==I&&(h=a[y],w=b[y]);return h<w?-1:w<h?1:0};e.isEncoding=function(a){switch(String(a).toLowerCase()){case "hex":case "utf8":case "utf-8":case "ascii":case "binary":case "base64":case "raw":case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":return!0;default:return!1}};e.concat=function(a,b){if(!z(a))throw new TypeError("Usage: Buffer.concat(list[, length])");
|
||||
if(0===a.length)return new e(0);if(1===a.length)return a[0];var h;if(void 0===b)for(h=b=0;h<a.length;h++)b+=a[h].length;var w=new e(b),y=0;for(h=0;h<a.length;h++){var I=a[h];I.copy(w,y);y+=I.length}return w};e.byteLength=function(a,b){a+="";switch(b||"utf8"){case "ascii":case "binary":case "raw":var h=a.length;break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":h=2*a.length;break;case "hex":h=a.length>>>1;break;case "utf8":case "utf-8":h=k(a).length;break;case "base64":h=n.toByteArray(a).length;
|
||||
break;default:h=a.length}return h};e.prototype.length=void 0;e.prototype.parent=void 0;e.prototype.toString=function(a,b,h){var w=!1;b>>>=0;h=void 0===h||Infinity===h?this.length:h>>>0;a||(a="utf8");0>b&&(b=0);h>this.length&&(h=this.length);if(h<=b)return"";for(;;)switch(a){case "hex":a=b;b=h;h=this.length;if(!a||0>a)a=0;if(!b||0>b||b>h)b=h;w="";for(h=a;h<b;h++)a=w,w=this[h],w=16>w?"0"+w.toString(16):w.toString(16),w=a+w;return w;case "utf8":case "utf-8":w=a="";for(h=Math.min(this.length,h);b<h;b++)127>=
|
||||
this[b]?(a+=g(w)+String.fromCharCode(this[b]),w=""):w+="%"+this[b].toString(16);return a+g(w);case "ascii":return p(this,b,h);case "binary":return p(this,b,h);case "base64":return b=0===b&&h===this.length?n.fromByteArray(this):n.fromByteArray(this.slice(b,h)),b;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":b=this.slice(b,h);h="";for(a=0;a<b.length;a+=2)h+=String.fromCharCode(b[a]+256*b[a+1]);return h;default:if(w)throw new TypeError("Unknown encoding: "+a);a=(a+"").toLowerCase();w=!0}};
|
||||
e.prototype.equals=function(a){if(!e.isBuffer(a))throw new TypeError("Argument must be a Buffer");return 0===e.compare(this,a)};e.prototype.inspect=function(){var a="",b=A.INSPECT_MAX_BYTES;0<this.length&&(a=this.toString("hex",0,b).match(/.{2}/g).join(" "),this.length>b&&(a+=" ... "));return"<Buffer "+a+">"};e.prototype.compare=function(a){if(!e.isBuffer(a))throw new TypeError("Argument must be a Buffer");return e.compare(this,a)};e.prototype.get=function(a){console.log(".get() is deprecated. Access using array indexes instead.");
|
||||
return this.readUInt8(a)};e.prototype.set=function(a,b){console.log(".set() is deprecated. Access using array indexes instead.");return this.writeUInt8(a,b)};e.prototype.write=function(a,b,h,w){if(isFinite(b))isFinite(h)||(w=h,h=void 0);else{var y=w;w=b;b=h;h=y}b=Number(b)||0;y=this.length-b;h?(h=Number(h),h>y&&(h=y)):h=y;w=String(w||"utf8").toLowerCase();switch(w){case "hex":b=Number(b)||0;w=this.length-b;h?(h=Number(h),h>w&&(h=w)):h=w;w=a.length;if(0!==w%2)throw Error("Invalid hex string");h>w/
|
||||
2&&(h=w/2);for(w=0;w<h;w++){y=parseInt(a.substr(2*w,2),16);if(isNaN(y))throw Error("Invalid hex string");this[b+w]=y}a=w;break;case "utf8":case "utf-8":a=d(k(a),this,b,h);break;case "ascii":a=d(u(a),this,b,h);break;case "binary":a=d(u(a),this,b,h);break;case "base64":a=d(n.toByteArray(a),this,b,h);break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":y=[];for(var I=0;I<a.length;I++){var K=a.charCodeAt(I);w=K>>8;K%=256;y.push(K);y.push(w)}a=d(y,this,b,h,2);break;default:throw new TypeError("Unknown encoding: "+
|
||||
w);}return a};e.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};e.prototype.slice=function(a,b){var h=this.length;a=~~a;b=void 0===b?h:~~b;0>a?(a+=h,0>a&&(a=0)):a>h&&(a=h);0>b?(b+=h,0>b&&(b=0)):b>h&&(b=h);b<a&&(b=a);if(e.TYPED_ARRAY_SUPPORT)return e._augment(this.subarray(a,b));h=b-a;for(var w=new e(h,void 0,!0),y=0;y<h;y++)w[y]=this[y+a];return w};e.prototype.readUInt8=function(a,b){b||t(a,1,this.length);return this[a]};e.prototype.readUInt16LE=
|
||||
function(a,b){b||t(a,2,this.length);return this[a]|this[a+1]<<8};e.prototype.readUInt16BE=function(a,b){b||t(a,2,this.length);return this[a]<<8|this[a+1]};e.prototype.readUInt32LE=function(a,b){b||t(a,4,this.length);return(this[a]|this[a+1]<<8|this[a+2]<<16)+16777216*this[a+3]};e.prototype.readUInt32BE=function(a,b){b||t(a,4,this.length);return 16777216*this[a]+(this[a+1]<<16|this[a+2]<<8|this[a+3])};e.prototype.readInt8=function(a,b){b||t(a,1,this.length);return this[a]&128?-1*(255-this[a]+1):this[a]};
|
||||
e.prototype.readInt16LE=function(a,b){b||t(a,2,this.length);var h=this[a]|this[a+1]<<8;return h&32768?h|4294901760:h};e.prototype.readInt16BE=function(a,b){b||t(a,2,this.length);var h=this[a+1]|this[a]<<8;return h&32768?h|4294901760:h};e.prototype.readInt32LE=function(a,b){b||t(a,4,this.length);return this[a]|this[a+1]<<8|this[a+2]<<16|this[a+3]<<24};e.prototype.readInt32BE=function(a,b){b||t(a,4,this.length);return this[a]<<24|this[a+1]<<16|this[a+2]<<8|this[a+3]};e.prototype.readFloatLE=function(a,
|
||||
b){b||t(a,4,this.length);return v.read(this,a,!0,23,4)};e.prototype.readFloatBE=function(a,b){b||t(a,4,this.length);return v.read(this,a,!1,23,4)};e.prototype.readDoubleLE=function(a,b){b||t(a,8,this.length);return v.read(this,a,!0,52,8)};e.prototype.readDoubleBE=function(a,b){b||t(a,8,this.length);return v.read(this,a,!1,52,8)};e.prototype.writeUInt8=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,1,255,0);e.TYPED_ARRAY_SUPPORT||(a=Math.floor(a));this[b]=a;return b+1};e.prototype.writeUInt16LE=function(a,
|
||||
b,h){a=+a;b>>>=0;h||m(this,a,b,2,65535,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8):f(this,a,b,!0);return b+2};e.prototype.writeUInt16BE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,2,65535,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>8,this[b+1]=a):f(this,a,b,!1);return b+2};e.prototype.writeUInt32LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,4294967295,0);e.TYPED_ARRAY_SUPPORT?(this[b+3]=a>>>24,this[b+2]=a>>>16,this[b+1]=a>>>8,this[b]=a):c(this,a,b,!0);return b+4};e.prototype.writeUInt32BE=function(a,
|
||||
b,h){a=+a;b>>>=0;h||m(this,a,b,4,4294967295,0);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>24,this[b+1]=a>>>16,this[b+2]=a>>>8,this[b+3]=a):c(this,a,b,!1);return b+4};e.prototype.writeInt8=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,1,127,-128);e.TYPED_ARRAY_SUPPORT||(a=Math.floor(a));0>a&&(a=255+a+1);this[b]=a;return b+1};e.prototype.writeInt16LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,2,32767,-32768);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8):f(this,a,b,!0);return b+2};e.prototype.writeInt16BE=function(a,
|
||||
b,h){a=+a;b>>>=0;h||m(this,a,b,2,32767,-32768);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>8,this[b+1]=a):f(this,a,b,!1);return b+2};e.prototype.writeInt32LE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,2147483647,-2147483648);e.TYPED_ARRAY_SUPPORT?(this[b]=a,this[b+1]=a>>>8,this[b+2]=a>>>16,this[b+3]=a>>>24):c(this,a,b,!0);return b+4};e.prototype.writeInt32BE=function(a,b,h){a=+a;b>>>=0;h||m(this,a,b,4,2147483647,-2147483648);0>a&&(a=4294967295+a+1);e.TYPED_ARRAY_SUPPORT?(this[b]=a>>>24,this[b+1]=a>>>16,this[b+
|
||||
2]=a>>>8,this[b+3]=a):c(this,a,b,!1);return b+4};e.prototype.writeFloatLE=function(a,b,h){return q(this,a,b,!0,h)};e.prototype.writeFloatBE=function(a,b,h){return q(this,a,b,!1,h)};e.prototype.writeDoubleLE=function(a,b,h){return r(this,a,b,!0,h)};e.prototype.writeDoubleBE=function(a,b,h){return r(this,a,b,!1,h)};e.prototype.copy=function(a,b,h,w){h||(h=0);w||0===w||(w=this.length);b||(b=0);if(w!==h&&0!==a.length&&0!==this.length){if(w<h)throw new TypeError("sourceEnd < sourceStart");if(0>b||b>=a.length)throw new TypeError("targetStart out of bounds");
|
||||
if(0>h||h>=this.length)throw new TypeError("sourceStart out of bounds");if(0>w||w>this.length)throw new TypeError("sourceEnd out of bounds");w>this.length&&(w=this.length);a.length-b<w-h&&(w=a.length-b+h);w-=h;if(1E3>w||!e.TYPED_ARRAY_SUPPORT)for(var y=0;y<w;y++)a[y+b]=this[y+h];else a._set(this.subarray(h,h+w),b)}};e.prototype.fill=function(a,b,h){a||(a=0);b||(b=0);h||(h=this.length);if(h<b)throw new TypeError("end < start");if(h!==b&&0!==this.length){if(0>b||b>=this.length)throw new TypeError("start out of bounds");
|
||||
if(0>h||h>this.length)throw new TypeError("end out of bounds");if("number"===typeof a)for(;b<h;b++)this[b]=a;else{a=k(a.toString());for(var w=a.length;b<h;b++)this[b]=a[b%w]}return this}};e.prototype.toArrayBuffer=function(){if("undefined"!==typeof Uint8Array){if(e.TYPED_ARRAY_SUPPORT)return(new e(this)).buffer;for(var a=new Uint8Array(this.length),b=0,h=a.length;b<h;b+=1)a[b]=this[b];return a.buffer}throw new TypeError("Buffer.toArrayBuffer not supported in this browser");};var D=e.prototype;e._augment=
|
||||
function(a){a.constructor=e;a._isBuffer=!0;a._get=a.get;a._set=a.set;a.get=D.get;a.set=D.set;a.write=D.write;a.toString=D.toString;a.toLocaleString=D.toString;a.toJSON=D.toJSON;a.equals=D.equals;a.compare=D.compare;a.copy=D.copy;a.slice=D.slice;a.readUInt8=D.readUInt8;a.readUInt16LE=D.readUInt16LE;a.readUInt16BE=D.readUInt16BE;a.readUInt32LE=D.readUInt32LE;a.readUInt32BE=D.readUInt32BE;a.readInt8=D.readInt8;a.readInt16LE=D.readInt16LE;a.readInt16BE=D.readInt16BE;a.readInt32LE=D.readInt32LE;a.readInt32BE=
|
||||
D.readInt32BE;a.readFloatLE=D.readFloatLE;a.readFloatBE=D.readFloatBE;a.readDoubleLE=D.readDoubleLE;a.readDoubleBE=D.readDoubleBE;a.writeUInt8=D.writeUInt8;a.writeUInt16LE=D.writeUInt16LE;a.writeUInt16BE=D.writeUInt16BE;a.writeUInt32LE=D.writeUInt32LE;a.writeUInt32BE=D.writeUInt32BE;a.writeInt8=D.writeInt8;a.writeInt16LE=D.writeInt16LE;a.writeInt16BE=D.writeInt16BE;a.writeInt32LE=D.writeInt32LE;a.writeInt32BE=D.writeInt32BE;a.writeFloatLE=D.writeFloatLE;a.writeFloatBE=D.writeFloatBE;a.writeDoubleLE=
|
||||
D.writeDoubleLE;a.writeDoubleBE=D.writeDoubleBE;a.fill=D.fill;a.inspect=D.inspect;a.toArrayBuffer=D.toArrayBuffer;return a};var L=/[^+\/0-9A-z]/g},{"base64-js":2,ieee754:6,"is-array":7}],6:[function(C,J,A){A.read=function(e,p,t,m,f){var c=8*f-m-1;var l=(1<<c)-1,q=l>>1,r=-7;f=t?f-1:0;var k=t?-1:1,u=e[p+f];f+=k;t=u&(1<<-r)-1;u>>=-r;for(r+=c;0<r;t=256*t+e[p+f],f+=k,r-=8);c=t&(1<<-r)-1;t>>=-r;for(r+=m;0<r;c=256*c+e[p+f],f+=k,r-=8);if(0===t)t=1-q;else{if(t===l)return c?NaN:Infinity*(u?-1:1);c+=Math.pow(2,
|
||||
m);t-=q}return(u?-1:1)*c*Math.pow(2,t-m)};A.write=function(e,p,t,m,f,c){var l,q=8*c-f-1,r=(1<<q)-1,k=r>>1,u=23===f?Math.pow(2,-24)-Math.pow(2,-77):0;c=m?0:c-1;var d=m?1:-1,g=0>p||0===p&&0>1/p?1:0;p=Math.abs(p);isNaN(p)||Infinity===p?(p=isNaN(p)?1:0,m=r):(m=Math.floor(Math.log(p)/Math.LN2),1>p*(l=Math.pow(2,-m))&&(m--,l*=2),p=1<=m+k?p+u/l:p+u*Math.pow(2,1-k),2<=p*l&&(m++,l/=2),m+k>=r?(p=0,m=r):1<=m+k?(p=(p*l-1)*Math.pow(2,f),m+=k):(p=p*Math.pow(2,k-1)*Math.pow(2,f),m=0));for(;8<=f;e[t+c]=p&255,c+=
|
||||
d,p/=256,f-=8);m=m<<f|p;for(q+=f;0<q;e[t+c]=m&255,c+=d,m/=256,q-=8);e[t+c-d]|=128*g}},{}],7:[function(C,J,A){var e=Object.prototype.toString;J.exports=Array.isArray||function(p){return!!p&&"[object Array]"==e.call(p)}},{}],8:[function(C,J,A){(function(e){function p(c,l){for(var q=0,r=c.length-1;0<=r;r--){var k=c[r];"."===k?c.splice(r,1):".."===k?(c.splice(r,1),q++):q&&(c.splice(r,1),q--)}if(l)for(;q--;q)c.unshift("..");return c}function t(c,l){if(c.filter)return c.filter(l);for(var q=[],r=0;r<c.length;r++)l(c[r],
|
||||
r,c)&&q.push(c[r]);return q}var m=/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/;A.resolve=function(){for(var c="",l=!1,q=arguments.length-1;-1<=q&&!l;q--){var r=0<=q?arguments[q]:e.cwd();if("string"!==typeof r)throw new TypeError("Arguments to path.resolve must be strings");r&&(c=r+"/"+c,l="/"===r.charAt(0))}c=p(t(c.split("/"),function(k){return!!k}),!l).join("/");return(l?"/":"")+c||"."};A.normalize=function(c){var l=A.isAbsolute(c),q="/"===f(c,-1);(c=p(t(c.split("/"),function(r){return!!r}),
|
||||
!l).join("/"))||l||(c=".");c&&q&&(c+="/");return(l?"/":"")+c};A.isAbsolute=function(c){return"/"===c.charAt(0)};A.join=function(){var c=Array.prototype.slice.call(arguments,0);return A.normalize(t(c,function(l,q){if("string"!==typeof l)throw new TypeError("Arguments to path.join must be strings");return l}).join("/"))};A.relative=function(c,l){function q(n){for(var v=0;v<n.length&&""===n[v];v++);for(var z=n.length-1;0<=z&&""===n[z];z--);return v>z?[]:n.slice(v,z-v+1)}c=A.resolve(c).substr(1);l=A.resolve(l).substr(1);
|
||||
for(var r=q(c.split("/")),k=q(l.split("/")),u=Math.min(r.length,k.length),d=u,g=0;g<u;g++)if(r[g]!==k[g]){d=g;break}u=[];for(g=d;g<r.length;g++)u.push("..");u=u.concat(k.slice(d));return u.join("/")};A.sep="/";A.delimiter=":";A.dirname=function(c){var l=m.exec(c).slice(1);c=l[0];l=l[1];if(!c&&!l)return".";l&&(l=l.substr(0,l.length-1));return c+l};A.basename=function(c,l){var q=m.exec(c).slice(1)[2];l&&q.substr(-1*l.length)===l&&(q=q.substr(0,q.length-l.length));return q};A.extname=function(c){return m.exec(c).slice(1)[3]};
|
||||
var f="b"==="ab".substr(-1)?function(c,l,q){return c.substr(l,q)}:function(c,l,q){0>l&&(l=c.length+l);return c.substr(l,q)}}).call(this,C("g5I+bs"))},{"g5I+bs":9}],9:[function(C,J,A){function e(){}C=J.exports={};C.nextTick=function(){if("undefined"!==typeof window&&window.setImmediate)return function(t){return window.setImmediate(t)};if("undefined"!==typeof window&&window.postMessage&&window.addEventListener){var p=[];window.addEventListener("message",function(t){var m=t.source;m!==window&&null!==
|
||||
m||"process-tick"!==t.data||(t.stopPropagation(),0<p.length&&p.shift()())},!0);return function(t){p.push(t);window.postMessage("process-tick","*")}}return function(t){setTimeout(t,0)}}();C.title="browser";C.browser=!0;C.env={};C.argv=[];C.on=e;C.addListener=e;C.once=e;C.off=e;C.removeListener=e;C.removeAllListeners=e;C.emit=e;C.binding=function(p){throw Error("process.binding is not supported");};C.cwd=function(){return"/"};C.chdir=function(p){throw Error("process.chdir is not supported");}},{}],
|
||||
10:[function(C,J,A){function e(){this._array=[];this._set=m?new Map:Object.create(null)}var p=C("./util"),t=Object.prototype.hasOwnProperty,m="undefined"!==typeof Map;e.fromArray=function(f,c){for(var l=new e,q=0,r=f.length;q<r;q++)l.add(f[q],c);return l};e.prototype.size=function(){return m?this._set.size:Object.getOwnPropertyNames(this._set).length};e.prototype.add=function(f,c){var l=m?f:p.toSetString(f),q=m?this.has(f):t.call(this._set,l),r=this._array.length;q&&!c||this._array.push(f);q||(m?
|
||||
this._set.set(f,r):this._set[l]=r)};e.prototype.has=function(f){if(m)return this._set.has(f);f=p.toSetString(f);return t.call(this._set,f)};e.prototype.indexOf=function(f){if(m){var c=this._set.get(f);if(0<=c)return c}else if(c=p.toSetString(f),t.call(this._set,c))return this._set[c];throw Error('"'+f+'" is not in the set.');};e.prototype.at=function(f){if(0<=f&&f<this._array.length)return this._array[f];throw Error("No element indexed by "+f);};e.prototype.toArray=function(){return this._array.slice()};
|
||||
A.ArraySet=e},{"./util":19}],11:[function(C,J,A){var e=C("./base64");A.encode=function(p){var t="",m=0>p?(-p<<1)+1:p<<1;do p=m&31,m>>>=5,0<m&&(p|=32),t+=e.encode(p);while(0<m);return t};A.decode=function(p,t,m){var f=p.length,c=0,l=0;do{if(t>=f)throw Error("Expected more digits in base 64 VLQ value.");var q=e.decode(p.charCodeAt(t++));if(-1===q)throw Error("Invalid base64 digit: "+p.charAt(t-1));var r=!!(q&32);q&=31;c+=q<<l;l+=5}while(r);p=c>>1;m.value=1===(c&1)?-p:p;m.rest=t}},{"./base64":12}],12:[function(C,
|
||||
J,A){var e="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");A.encode=function(p){if(0<=p&&p<e.length)return e[p];throw new TypeError("Must be between 0 and 63: "+p);};A.decode=function(p){return 65<=p&&90>=p?p-65:97<=p&&122>=p?p-97+26:48<=p&&57>=p?p-48+52:43==p?62:47==p?63:-1}},{}],13:[function(C,J,A){function e(p,t,m,f,c,l){var q=Math.floor((t-p)/2)+p,r=c(m,f[q],!0);return 0===r?q:0<r?1<t-q?e(q,t,m,f,c,l):l==A.LEAST_UPPER_BOUND?t<f.length?t:-1:q:1<q-p?e(p,q,m,f,c,l):l==
|
||||
A.LEAST_UPPER_BOUND?q:0>p?-1:p}A.GREATEST_LOWER_BOUND=1;A.LEAST_UPPER_BOUND=2;A.search=function(p,t,m,f){if(0===t.length)return-1;p=e(-1,t.length,p,t,m,f||A.GREATEST_LOWER_BOUND);if(0>p)return-1;for(;0<=p-1&&0===m(t[p],t[p-1],!0);)--p;return p}},{}],14:[function(C,J,A){function e(){this._array=[];this._sorted=!0;this._last={generatedLine:-1,generatedColumn:0}}var p=C("./util");e.prototype.unsortedForEach=function(t,m){this._array.forEach(t,m)};e.prototype.add=function(t){var m=this._last,f=m.generatedLine,
|
||||
c=t.generatedLine,l=m.generatedColumn,q=t.generatedColumn;c>f||c==f&&q>=l||0>=p.compareByGeneratedPositionsInflated(m,t)?this._last=t:this._sorted=!1;this._array.push(t)};e.prototype.toArray=function(){this._sorted||(this._array.sort(p.compareByGeneratedPositionsInflated),this._sorted=!0);return this._array};A.MappingList=e},{"./util":19}],15:[function(C,J,A){function e(t,m,f){var c=t[m];t[m]=t[f];t[f]=c}function p(t,m,f,c){if(f<c){var l=f-1;e(t,Math.round(f+Math.random()*(c-f)),c);for(var q=t[c],
|
||||
r=f;r<c;r++)0>=m(t[r],q)&&(l+=1,e(t,l,r));e(t,l+1,r);l+=1;p(t,m,f,l-1);p(t,m,l+1,c)}}A.quickSort=function(t,m){p(t,m,0,t.length-1)}},{}],16:[function(C,J,A){function e(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));return null!=d.sections?new m(d,u):new p(d,u)}function p(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));var g=f.getArg(d,"version"),n=f.getArg(d,"sources"),v=f.getArg(d,"names",[]),z=f.getArg(d,"sourceRoot",null),G=f.getArg(d,"sourcesContent",null),D=f.getArg(d,
|
||||
"mappings");d=f.getArg(d,"file",null);if(g!=this._version)throw Error("Unsupported version: "+g);z&&(z=f.normalize(z));n=n.map(String).map(f.normalize).map(function(L){return z&&f.isAbsolute(z)&&f.isAbsolute(L)?f.relative(z,L):L});this._names=l.fromArray(v.map(String),!0);this._sources=l.fromArray(n,!0);this.sourceRoot=z;this.sourcesContent=G;this._mappings=D;this._sourceMapURL=u;this.file=d}function t(){this.generatedColumn=this.generatedLine=0;this.name=this.originalColumn=this.originalLine=this.source=
|
||||
null}function m(k,u){var d=k;"string"===typeof k&&(d=f.parseSourceMapInput(k));var g=f.getArg(d,"version");d=f.getArg(d,"sections");if(g!=this._version)throw Error("Unsupported version: "+g);this._sources=new l;this._names=new l;var n={line:-1,column:0};this._sections=d.map(function(v){if(v.url)throw Error("Support for url field in sections not implemented.");var z=f.getArg(v,"offset"),G=f.getArg(z,"line"),D=f.getArg(z,"column");if(G<n.line||G===n.line&&D<n.column)throw Error("Section offsets must be ordered and non-overlapping.");
|
||||
n=z;return{generatedOffset:{generatedLine:G+1,generatedColumn:D+1},consumer:new e(f.getArg(v,"map"),u)}})}var f=C("./util"),c=C("./binary-search"),l=C("./array-set").ArraySet,q=C("./base64-vlq"),r=C("./quick-sort").quickSort;e.fromSourceMap=function(k){return p.fromSourceMap(k)};e.prototype._version=3;e.prototype.__generatedMappings=null;Object.defineProperty(e.prototype,"_generatedMappings",{configurable:!0,enumerable:!0,get:function(){this.__generatedMappings||this._parseMappings(this._mappings,
|
||||
this.sourceRoot);return this.__generatedMappings}});e.prototype.__originalMappings=null;Object.defineProperty(e.prototype,"_originalMappings",{configurable:!0,enumerable:!0,get:function(){this.__originalMappings||this._parseMappings(this._mappings,this.sourceRoot);return this.__originalMappings}});e.prototype._charIsMappingSeparator=function(k,u){var d=k.charAt(u);return";"===d||","===d};e.prototype._parseMappings=function(k,u){throw Error("Subclasses must implement _parseMappings");};e.GENERATED_ORDER=
|
||||
1;e.ORIGINAL_ORDER=2;e.GREATEST_LOWER_BOUND=1;e.LEAST_UPPER_BOUND=2;e.prototype.eachMapping=function(k,u,d){u=u||null;switch(d||e.GENERATED_ORDER){case e.GENERATED_ORDER:d=this._generatedMappings;break;case e.ORIGINAL_ORDER:d=this._originalMappings;break;default:throw Error("Unknown order of iteration.");}var g=this.sourceRoot;d.map(function(n){var v=null===n.source?null:this._sources.at(n.source);v=f.computeSourceURL(g,v,this._sourceMapURL);return{source:v,generatedLine:n.generatedLine,generatedColumn:n.generatedColumn,
|
||||
originalLine:n.originalLine,originalColumn:n.originalColumn,name:null===n.name?null:this._names.at(n.name)}},this).forEach(k,u)};e.prototype.allGeneratedPositionsFor=function(k){var u=f.getArg(k,"line"),d={source:f.getArg(k,"source"),originalLine:u,originalColumn:f.getArg(k,"column",0)};null!=this.sourceRoot&&(d.source=f.relative(this.sourceRoot,d.source));if(!this._sources.has(d.source))return[];d.source=this._sources.indexOf(d.source);var g=[];d=this._findMapping(d,this._originalMappings,"originalLine",
|
||||
"originalColumn",f.compareByOriginalPositions,c.LEAST_UPPER_BOUND);if(0<=d){var n=this._originalMappings[d];if(void 0===k.column)for(u=n.originalLine;n&&n.originalLine===u;)g.push({line:f.getArg(n,"generatedLine",null),column:f.getArg(n,"generatedColumn",null),lastColumn:f.getArg(n,"lastGeneratedColumn",null)}),n=this._originalMappings[++d];else for(k=n.originalColumn;n&&n.originalLine===u&&n.originalColumn==k;)g.push({line:f.getArg(n,"generatedLine",null),column:f.getArg(n,"generatedColumn",null),
|
||||
lastColumn:f.getArg(n,"lastGeneratedColumn",null)}),n=this._originalMappings[++d]}return g};A.SourceMapConsumer=e;p.prototype=Object.create(e.prototype);p.prototype.consumer=e;p.fromSourceMap=function(k,u){var d=Object.create(p.prototype),g=d._names=l.fromArray(k._names.toArray(),!0),n=d._sources=l.fromArray(k._sources.toArray(),!0);d.sourceRoot=k._sourceRoot;d.sourcesContent=k._generateSourcesContent(d._sources.toArray(),d.sourceRoot);d.file=k._file;d._sourceMapURL=u;for(var v=k._mappings.toArray().slice(),
|
||||
z=d.__generatedMappings=[],G=d.__originalMappings=[],D=0,L=v.length;D<L;D++){var a=v[D],b=new t;b.generatedLine=a.generatedLine;b.generatedColumn=a.generatedColumn;a.source&&(b.source=n.indexOf(a.source),b.originalLine=a.originalLine,b.originalColumn=a.originalColumn,a.name&&(b.name=g.indexOf(a.name)),G.push(b));z.push(b)}r(d.__originalMappings,f.compareByOriginalPositions);return d};p.prototype._version=3;Object.defineProperty(p.prototype,"sources",{get:function(){return this._sources.toArray().map(function(k){return f.computeSourceURL(this.sourceRoot,
|
||||
k,this._sourceMapURL)},this)}});p.prototype._parseMappings=function(k,u){for(var d=1,g=0,n=0,v=0,z=0,G=0,D=k.length,L=0,a={},b={},h=[],w=[],y,I,K,N,P;L<D;)if(";"===k.charAt(L))d++,L++,g=0;else if(","===k.charAt(L))L++;else{y=new t;y.generatedLine=d;for(N=L;N<D&&!this._charIsMappingSeparator(k,N);N++);I=k.slice(L,N);if(K=a[I])L+=I.length;else{for(K=[];L<N;)q.decode(k,L,b),P=b.value,L=b.rest,K.push(P);if(2===K.length)throw Error("Found a source, but no line and column");if(3===K.length)throw Error("Found a source and line, but no column");
|
||||
a[I]=K}y.generatedColumn=g+K[0];g=y.generatedColumn;1<K.length&&(y.source=z+K[1],z+=K[1],y.originalLine=n+K[2],n=y.originalLine,y.originalLine+=1,y.originalColumn=v+K[3],v=y.originalColumn,4<K.length&&(y.name=G+K[4],G+=K[4]));w.push(y);"number"===typeof y.originalLine&&h.push(y)}r(w,f.compareByGeneratedPositionsDeflated);this.__generatedMappings=w;r(h,f.compareByOriginalPositions);this.__originalMappings=h};p.prototype._findMapping=function(k,u,d,g,n,v){if(0>=k[d])throw new TypeError("Line must be greater than or equal to 1, got "+
|
||||
k[d]);if(0>k[g])throw new TypeError("Column must be greater than or equal to 0, got "+k[g]);return c.search(k,u,n,v)};p.prototype.computeColumnSpans=function(){for(var k=0;k<this._generatedMappings.length;++k){var u=this._generatedMappings[k];if(k+1<this._generatedMappings.length){var d=this._generatedMappings[k+1];if(u.generatedLine===d.generatedLine){u.lastGeneratedColumn=d.generatedColumn-1;continue}}u.lastGeneratedColumn=Infinity}};p.prototype.originalPositionFor=function(k){var u={generatedLine:f.getArg(k,
|
||||
"line"),generatedColumn:f.getArg(k,"column")};k=this._findMapping(u,this._generatedMappings,"generatedLine","generatedColumn",f.compareByGeneratedPositionsDeflated,f.getArg(k,"bias",e.GREATEST_LOWER_BOUND));if(0<=k&&(k=this._generatedMappings[k],k.generatedLine===u.generatedLine)){u=f.getArg(k,"source",null);null!==u&&(u=this._sources.at(u),u=f.computeSourceURL(this.sourceRoot,u,this._sourceMapURL));var d=f.getArg(k,"name",null);null!==d&&(d=this._names.at(d));return{source:u,line:f.getArg(k,"originalLine",
|
||||
null),column:f.getArg(k,"originalColumn",null),name:d}}return{source:null,line:null,column:null,name:null}};p.prototype.hasContentsOfAllSources=function(){return this.sourcesContent?this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some(function(k){return null==k}):!1};p.prototype.sourceContentFor=function(k,u){if(!this.sourcesContent)return null;var d=k;null!=this.sourceRoot&&(d=f.relative(this.sourceRoot,d));if(this._sources.has(d))return this.sourcesContent[this._sources.indexOf(d)];
|
||||
var g=this.sources,n;for(n=0;n<g.length;++n)if(g[n]==k)return this.sourcesContent[n];var v;if(null!=this.sourceRoot&&(v=f.urlParse(this.sourceRoot))){g=d.replace(/^file:\/\//,"");if("file"==v.scheme&&this._sources.has(g))return this.sourcesContent[this._sources.indexOf(g)];if((!v.path||"/"==v.path)&&this._sources.has("/"+d))return this.sourcesContent[this._sources.indexOf("/"+d)]}if(u)return null;throw Error('"'+d+'" is not in the SourceMap.');};p.prototype.generatedPositionFor=function(k){var u=
|
||||
f.getArg(k,"source");null!=this.sourceRoot&&(u=f.relative(this.sourceRoot,u));if(!this._sources.has(u))return{line:null,column:null,lastColumn:null};u=this._sources.indexOf(u);u={source:u,originalLine:f.getArg(k,"line"),originalColumn:f.getArg(k,"column")};k=this._findMapping(u,this._originalMappings,"originalLine","originalColumn",f.compareByOriginalPositions,f.getArg(k,"bias",e.GREATEST_LOWER_BOUND));return 0<=k&&(k=this._originalMappings[k],k.source===u.source)?{line:f.getArg(k,"generatedLine",
|
||||
null),column:f.getArg(k,"generatedColumn",null),lastColumn:f.getArg(k,"lastGeneratedColumn",null)}:{line:null,column:null,lastColumn:null}};A.BasicSourceMapConsumer=p;m.prototype=Object.create(e.prototype);m.prototype.constructor=e;m.prototype._version=3;Object.defineProperty(m.prototype,"sources",{get:function(){for(var k=[],u=0;u<this._sections.length;u++)for(var d=0;d<this._sections[u].consumer.sources.length;d++)k.push(this._sections[u].consumer.sources[d]);return k}});m.prototype.originalPositionFor=
|
||||
function(k){var u={generatedLine:f.getArg(k,"line"),generatedColumn:f.getArg(k,"column")},d=c.search(u,this._sections,function(g,n){var v=g.generatedLine-n.generatedOffset.generatedLine;return v?v:g.generatedColumn-n.generatedOffset.generatedColumn});return(d=this._sections[d])?d.consumer.originalPositionFor({line:u.generatedLine-(d.generatedOffset.generatedLine-1),column:u.generatedColumn-(d.generatedOffset.generatedLine===u.generatedLine?d.generatedOffset.generatedColumn-1:0),bias:k.bias}):{source:null,
|
||||
line:null,column:null,name:null}};m.prototype.hasContentsOfAllSources=function(){return this._sections.every(function(k){return k.consumer.hasContentsOfAllSources()})};m.prototype.sourceContentFor=function(k,u){for(var d=0;d<this._sections.length;d++){var g=this._sections[d].consumer.sourceContentFor(k,!0);if(g)return g}if(u)return null;throw Error('"'+k+'" is not in the SourceMap.');};m.prototype.generatedPositionFor=function(k){for(var u=0;u<this._sections.length;u++){var d=this._sections[u];if(-1!==
|
||||
d.consumer.sources.indexOf(f.getArg(k,"source"))){var g=d.consumer.generatedPositionFor(k);if(g)return{line:g.line+(d.generatedOffset.generatedLine-1),column:g.column+(d.generatedOffset.generatedLine===g.line?d.generatedOffset.generatedColumn-1:0)}}}return{line:null,column:null}};m.prototype._parseMappings=function(k,u){this.__generatedMappings=[];this.__originalMappings=[];for(var d=0;d<this._sections.length;d++)for(var g=this._sections[d],n=g.consumer._generatedMappings,v=0;v<n.length;v++){var z=
|
||||
n[v],G=g.consumer._sources.at(z.source);G=f.computeSourceURL(g.consumer.sourceRoot,G,this._sourceMapURL);this._sources.add(G);G=this._sources.indexOf(G);var D=null;z.name&&(D=g.consumer._names.at(z.name),this._names.add(D),D=this._names.indexOf(D));z={source:G,generatedLine:z.generatedLine+(g.generatedOffset.generatedLine-1),generatedColumn:z.generatedColumn+(g.generatedOffset.generatedLine===z.generatedLine?g.generatedOffset.generatedColumn-1:0),originalLine:z.originalLine,originalColumn:z.originalColumn,
|
||||
name:D};this.__generatedMappings.push(z);"number"===typeof z.originalLine&&this.__originalMappings.push(z)}r(this.__generatedMappings,f.compareByGeneratedPositionsDeflated);r(this.__originalMappings,f.compareByOriginalPositions)};A.IndexedSourceMapConsumer=m},{"./array-set":10,"./base64-vlq":11,"./binary-search":13,"./quick-sort":15,"./util":19}],17:[function(C,J,A){function e(c){c||(c={});this._file=t.getArg(c,"file",null);this._sourceRoot=t.getArg(c,"sourceRoot",null);this._skipValidation=t.getArg(c,
|
||||
"skipValidation",!1);this._sources=new m;this._names=new m;this._mappings=new f;this._sourcesContents=null}var p=C("./base64-vlq"),t=C("./util"),m=C("./array-set").ArraySet,f=C("./mapping-list").MappingList;e.prototype._version=3;e.fromSourceMap=function(c){var l=c.sourceRoot,q=new e({file:c.file,sourceRoot:l});c.eachMapping(function(r){var k={generated:{line:r.generatedLine,column:r.generatedColumn}};null!=r.source&&(k.source=r.source,null!=l&&(k.source=t.relative(l,k.source)),k.original={line:r.originalLine,
|
||||
column:r.originalColumn},null!=r.name&&(k.name=r.name));q.addMapping(k)});c.sources.forEach(function(r){var k=r;null!==l&&(k=t.relative(l,r));q._sources.has(k)||q._sources.add(k);k=c.sourceContentFor(r);null!=k&&q.setSourceContent(r,k)});return q};e.prototype.addMapping=function(c){var l=t.getArg(c,"generated"),q=t.getArg(c,"original",null),r=t.getArg(c,"source",null);c=t.getArg(c,"name",null);this._skipValidation||this._validateMapping(l,q,r,c);null!=r&&(r=String(r),this._sources.has(r)||this._sources.add(r));
|
||||
null!=c&&(c=String(c),this._names.has(c)||this._names.add(c));this._mappings.add({generatedLine:l.line,generatedColumn:l.column,originalLine:null!=q&&q.line,originalColumn:null!=q&&q.column,source:r,name:c})};e.prototype.setSourceContent=function(c,l){var q=c;null!=this._sourceRoot&&(q=t.relative(this._sourceRoot,q));null!=l?(this._sourcesContents||(this._sourcesContents=Object.create(null)),this._sourcesContents[t.toSetString(q)]=l):this._sourcesContents&&(delete this._sourcesContents[t.toSetString(q)],
|
||||
0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))};e.prototype.applySourceMap=function(c,l,q){var r=l;if(null==l){if(null==c.file)throw Error('SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, or the source map\'s "file" property. Both were omitted.');r=c.file}var k=this._sourceRoot;null!=k&&(r=t.relative(k,r));var u=new m,d=new m;this._mappings.unsortedForEach(function(g){if(g.source===r&&null!=g.originalLine){var n=c.originalPositionFor({line:g.originalLine,
|
||||
column:g.originalColumn});null!=n.source&&(g.source=n.source,null!=q&&(g.source=t.join(q,g.source)),null!=k&&(g.source=t.relative(k,g.source)),g.originalLine=n.line,g.originalColumn=n.column,null!=n.name&&(g.name=n.name))}n=g.source;null==n||u.has(n)||u.add(n);g=g.name;null==g||d.has(g)||d.add(g)},this);this._sources=u;this._names=d;c.sources.forEach(function(g){var n=c.sourceContentFor(g);null!=n&&(null!=q&&(g=t.join(q,g)),null!=k&&(g=t.relative(k,g)),this.setSourceContent(g,n))},this)};e.prototype._validateMapping=
|
||||
function(c,l,q,r){if(l&&"number"!==typeof l.line&&"number"!==typeof l.column)throw Error("original.line and original.column are not numbers -- you probably meant to omit the original mapping entirely and only map the generated position. If so, pass null for the original mapping instead of an object with empty or null values.");if(!(c&&"line"in c&&"column"in c&&0<c.line&&0<=c.column&&!l&&!q&&!r||c&&"line"in c&&"column"in c&&l&&"line"in l&&"column"in l&&0<c.line&&0<=c.column&&0<l.line&&0<=l.column&&
|
||||
q))throw Error("Invalid mapping: "+JSON.stringify({generated:c,source:q,original:l,name:r}));};e.prototype._serializeMappings=function(){for(var c=0,l=1,q=0,r=0,k=0,u=0,d="",g,n,v,z=this._mappings.toArray(),G=0,D=z.length;G<D;G++){n=z[G];g="";if(n.generatedLine!==l)for(c=0;n.generatedLine!==l;)g+=";",l++;else if(0<G){if(!t.compareByGeneratedPositionsInflated(n,z[G-1]))continue;g+=","}g+=p.encode(n.generatedColumn-c);c=n.generatedColumn;null!=n.source&&(v=this._sources.indexOf(n.source),g+=p.encode(v-
|
||||
u),u=v,g+=p.encode(n.originalLine-1-r),r=n.originalLine-1,g+=p.encode(n.originalColumn-q),q=n.originalColumn,null!=n.name&&(n=this._names.indexOf(n.name),g+=p.encode(n-k),k=n));d+=g}return d};e.prototype._generateSourcesContent=function(c,l){return c.map(function(q){if(!this._sourcesContents)return null;null!=l&&(q=t.relative(l,q));q=t.toSetString(q);return Object.prototype.hasOwnProperty.call(this._sourcesContents,q)?this._sourcesContents[q]:null},this)};e.prototype.toJSON=function(){var c={version:this._version,
|
||||
sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};null!=this._file&&(c.file=this._file);null!=this._sourceRoot&&(c.sourceRoot=this._sourceRoot);this._sourcesContents&&(c.sourcesContent=this._generateSourcesContent(c.sources,c.sourceRoot));return c};e.prototype.toString=function(){return JSON.stringify(this.toJSON())};A.SourceMapGenerator=e},{"./array-set":10,"./base64-vlq":11,"./mapping-list":14,"./util":19}],18:[function(C,J,A){function e(f,c,l,q,r){this.children=
|
||||
[];this.sourceContents={};this.line=null==f?null:f;this.column=null==c?null:c;this.source=null==l?null:l;this.name=null==r?null:r;this.$$$isSourceNode$$$=!0;null!=q&&this.add(q)}var p=C("./source-map-generator").SourceMapGenerator,t=C("./util"),m=/(\r?\n)/;e.fromStringWithSourceMap=function(f,c,l){function q(z,G){if(null===z||void 0===z.source)r.add(G);else{var D=l?t.join(l,z.source):z.source;r.add(new e(z.originalLine,z.originalColumn,D,G,z.name))}}var r=new e,k=f.split(m),u=0,d=function(){var z=
|
||||
u<k.length?k[u++]:void 0,G=(u<k.length?k[u++]:void 0)||"";return z+G},g=1,n=0,v=null;c.eachMapping(function(z){if(null!==v)if(g<z.generatedLine)q(v,d()),g++,n=0;else{var G=k[u]||"",D=G.substr(0,z.generatedColumn-n);k[u]=G.substr(z.generatedColumn-n);n=z.generatedColumn;q(v,D);v=z;return}for(;g<z.generatedLine;)r.add(d()),g++;n<z.generatedColumn&&(G=k[u]||"",r.add(G.substr(0,z.generatedColumn)),k[u]=G.substr(z.generatedColumn),n=z.generatedColumn);v=z},this);u<k.length&&(v&&q(v,d()),r.add(k.splice(u).join("")));
|
||||
c.sources.forEach(function(z){var G=c.sourceContentFor(z);null!=G&&(null!=l&&(z=t.join(l,z)),r.setSourceContent(z,G))});return r};e.prototype.add=function(f){if(Array.isArray(f))f.forEach(function(c){this.add(c)},this);else if(f.$$$isSourceNode$$$||"string"===typeof f)f&&this.children.push(f);else throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+f);return this};e.prototype.prepend=function(f){if(Array.isArray(f))for(var c=f.length-1;0<=c;c--)this.prepend(f[c]);
|
||||
else if(f.$$$isSourceNode$$$||"string"===typeof f)this.children.unshift(f);else throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+f);return this};e.prototype.walk=function(f){for(var c,l=0,q=this.children.length;l<q;l++)c=this.children[l],c.$$$isSourceNode$$$?c.walk(f):""!==c&&f(c,{source:this.source,line:this.line,column:this.column,name:this.name})};e.prototype.join=function(f){var c,l=this.children.length;if(0<l){var q=[];for(c=0;c<l-1;c++)q.push(this.children[c]),
|
||||
q.push(f);q.push(this.children[c]);this.children=q}return this};e.prototype.replaceRight=function(f,c){var l=this.children[this.children.length-1];l.$$$isSourceNode$$$?l.replaceRight(f,c):"string"===typeof l?this.children[this.children.length-1]=l.replace(f,c):this.children.push("".replace(f,c));return this};e.prototype.setSourceContent=function(f,c){this.sourceContents[t.toSetString(f)]=c};e.prototype.walkSourceContents=function(f){for(var c=0,l=this.children.length;c<l;c++)this.children[c].$$$isSourceNode$$$&&
|
||||
this.children[c].walkSourceContents(f);var q=Object.keys(this.sourceContents);c=0;for(l=q.length;c<l;c++)f(t.fromSetString(q[c]),this.sourceContents[q[c]])};e.prototype.toString=function(){var f="";this.walk(function(c){f+=c});return f};e.prototype.toStringWithSourceMap=function(f){var c="",l=1,q=0,r=new p(f),k=!1,u=null,d=null,g=null,n=null;this.walk(function(v,z){c+=v;null!==z.source&&null!==z.line&&null!==z.column?(u===z.source&&d===z.line&&g===z.column&&n===z.name||r.addMapping({source:z.source,
|
||||
original:{line:z.line,column:z.column},generated:{line:l,column:q},name:z.name}),u=z.source,d=z.line,g=z.column,n=z.name,k=!0):k&&(r.addMapping({generated:{line:l,column:q}}),u=null,k=!1);for(var G=0,D=v.length;G<D;G++)10===v.charCodeAt(G)?(l++,q=0,G+1===D?(u=null,k=!1):k&&r.addMapping({source:z.source,original:{line:z.line,column:z.column},generated:{line:l,column:q},name:z.name})):q++});this.walkSourceContents(function(v,z){r.setSourceContent(v,z)});return{code:c,map:r}};A.SourceNode=e},{"./source-map-generator":17,
|
||||
"./util":19}],19:[function(C,J,A){function e(d){return(d=d.match(k))?{scheme:d[1],auth:d[2],host:d[3],port:d[4],path:d[5]}:null}function p(d){var g="";d.scheme&&(g+=d.scheme+":");g+="//";d.auth&&(g+=d.auth+"@");d.host&&(g+=d.host);d.port&&(g+=":"+d.port);d.path&&(g+=d.path);return g}function t(d){var g=d,n=e(d);if(n){if(!n.path)return d;g=n.path}d=A.isAbsolute(g);g=g.split(/\/+/);for(var v,z=0,G=g.length-1;0<=G;G--)v=g[G],"."===v?g.splice(G,1):".."===v?z++:0<z&&(""===v?(g.splice(G+1,z),z=0):(g.splice(G,
|
||||
2),z--));g=g.join("/");""===g&&(g=d?"/":".");return n?(n.path=g,p(n)):g}function m(d,g){""===d&&(d=".");""===g&&(g=".");var n=e(g),v=e(d);v&&(d=v.path||"/");if(n&&!n.scheme)return v&&(n.scheme=v.scheme),p(n);if(n||g.match(u))return g;if(v&&!v.host&&!v.path)return v.host=g,p(v);n="/"===g.charAt(0)?g:t(d.replace(/\/+$/,"")+"/"+g);return v?(v.path=n,p(v)):n}function f(d){return d}function c(d){return q(d)?"$"+d:d}function l(d){return q(d)?d.slice(1):d}function q(d){if(!d)return!1;var g=d.length;if(9>
|
||||
g||95!==d.charCodeAt(g-1)||95!==d.charCodeAt(g-2)||111!==d.charCodeAt(g-3)||116!==d.charCodeAt(g-4)||111!==d.charCodeAt(g-5)||114!==d.charCodeAt(g-6)||112!==d.charCodeAt(g-7)||95!==d.charCodeAt(g-8)||95!==d.charCodeAt(g-9))return!1;for(g-=10;0<=g;g--)if(36!==d.charCodeAt(g))return!1;return!0}function r(d,g){return d===g?0:null===d?1:null===g?-1:d>g?1:-1}A.getArg=function(d,g,n){if(g in d)return d[g];if(3===arguments.length)return n;throw Error('"'+g+'" is a required argument.');};var k=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/,
|
||||
u=/^data:.+,.+$/;A.urlParse=e;A.urlGenerate=p;A.normalize=t;A.join=m;A.isAbsolute=function(d){return"/"===d.charAt(0)||k.test(d)};A.relative=function(d,g){""===d&&(d=".");d=d.replace(/\/$/,"");for(var n=0;0!==g.indexOf(d+"/");){var v=d.lastIndexOf("/");if(0>v)return g;d=d.slice(0,v);if(d.match(/^([^\/]+:\/)?\/*$/))return g;++n}return Array(n+1).join("../")+g.substr(d.length+1)};C=!("__proto__"in Object.create(null));A.toSetString=C?f:c;A.fromSetString=C?f:l;A.compareByOriginalPositions=function(d,
|
||||
g,n){var v=r(d.source,g.source);if(0!==v)return v;v=d.originalLine-g.originalLine;if(0!==v)return v;v=d.originalColumn-g.originalColumn;if(0!==v||n)return v;v=d.generatedColumn-g.generatedColumn;if(0!==v)return v;v=d.generatedLine-g.generatedLine;return 0!==v?v:r(d.name,g.name)};A.compareByGeneratedPositionsDeflated=function(d,g,n){var v=d.generatedLine-g.generatedLine;if(0!==v)return v;v=d.generatedColumn-g.generatedColumn;if(0!==v||n)return v;v=r(d.source,g.source);if(0!==v)return v;v=d.originalLine-
|
||||
g.originalLine;if(0!==v)return v;v=d.originalColumn-g.originalColumn;return 0!==v?v:r(d.name,g.name)};A.compareByGeneratedPositionsInflated=function(d,g){var n=d.generatedLine-g.generatedLine;if(0!==n)return n;n=d.generatedColumn-g.generatedColumn;if(0!==n)return n;n=r(d.source,g.source);if(0!==n)return n;n=d.originalLine-g.originalLine;if(0!==n)return n;n=d.originalColumn-g.originalColumn;return 0!==n?n:r(d.name,g.name)};A.parseSourceMapInput=function(d){return JSON.parse(d.replace(/^\)]}'[^\n]*\n/,
|
||||
""))};A.computeSourceURL=function(d,g,n){g=g||"";d&&("/"!==d[d.length-1]&&"/"!==g[0]&&(d+="/"),g=d+g);if(n){d=e(n);if(!d)throw Error("sourceMapURL could not be parsed");d.path&&(n=d.path.lastIndexOf("/"),0<=n&&(d.path=d.path.substring(0,n+1)));g=m(p(d),g)}return t(g)}},{}],20:[function(C,J,A){A.SourceMapGenerator=C("./lib/source-map-generator").SourceMapGenerator;A.SourceMapConsumer=C("./lib/source-map-consumer").SourceMapConsumer;A.SourceNode=C("./lib/source-node").SourceNode},{"./lib/source-map-consumer":16,
|
||||
"./lib/source-map-generator":17,"./lib/source-node":18}],21:[function(C,J,A){(function(e){function p(){return"browser"===a?!0:"node"===a?!1:"undefined"!==typeof window&&"function"===typeof XMLHttpRequest&&!(window.require&&window.module&&window.process&&"renderer"===window.process.type)}function t(x){return function(B){for(var F=0;F<x.length;F++){var E=x[F](B);if(E)return E}return null}}function m(x,B){if(!x)return B;var F=n.dirname(x),E=/^\w+:\/\/[^\/]*/.exec(F);E=E?E[0]:"";var H=F.slice(E.length);
|
||||
return E&&/^\/\w:/.test(H)?(E+="/",E+n.resolve(F.slice(E.length),B).replace(/\\/g,"/")):E+n.resolve(F.slice(E.length),B)}function f(x){var B=h[x.source];if(!B){var F=N(x.source);F?(B=h[x.source]={url:F.url,map:new g(F.map)},B.map.sourcesContent&&B.map.sources.forEach(function(E,H){var M=B.map.sourcesContent[H];if(M){var S=m(B.url,E);b[S]=M}})):B=h[x.source]={url:null,map:null}}return B&&B.map&&"function"===typeof B.map.originalPositionFor&&(F=B.map.originalPositionFor(x),null!==F.source)?(F.source=
|
||||
m(B.url,F.source),F):x}function c(x){var B=/^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(x);return B?(x=f({source:B[2],line:+B[3],column:B[4]-1}),"eval at "+B[1]+" ("+x.source+":"+x.line+":"+(x.column+1)+")"):(B=/^eval at ([^(]+) \((.+)\)$/.exec(x))?"eval at "+B[1]+" ("+c(B[2])+")":x}function l(){var x="";if(this.isNative())x="native";else{var B=this.getScriptNameOrSourceURL();!B&&this.isEval()&&(x=this.getEvalOrigin(),x+=", ");x=B?x+B:x+"<anonymous>";B=this.getLineNumber();null!=B&&(x+=":"+B,(B=
|
||||
this.getColumnNumber())&&(x+=":"+B))}B="";var F=this.getFunctionName(),E=!0,H=this.isConstructor();if(this.isToplevel()||H)H?B+="new "+(F||"<anonymous>"):F?B+=F:(B+=x,E=!1);else{H=this.getTypeName();"[object Object]"===H&&(H="null");var M=this.getMethodName();F?(H&&0!=F.indexOf(H)&&(B+=H+"."),B+=F,M&&F.indexOf("."+M)!=F.length-M.length-1&&(B+=" [as "+M+"]")):B+=H+"."+(M||"<anonymous>")}E&&(B+=" ("+x+")");return B}function q(x){var B={};Object.getOwnPropertyNames(Object.getPrototypeOf(x)).forEach(function(F){B[F]=
|
||||
/^(?:is|get)/.test(F)?function(){return x[F].call(x)}:x[F]});B.toString=l;return B}function r(x,B){void 0===B&&(B={nextPosition:null,curPosition:null});if(x.isNative())return B.curPosition=null,x;var F=x.getFileName()||x.getScriptNameOrSourceURL();if(F){var E=x.getLineNumber(),H=x.getColumnNumber()-1,M=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/,S=M.test;var V="object"===typeof e&&null!==e?e.version:"";M=S.call(M,V)?0:62;1===E&&H>M&&!p()&&!x.isEval()&&(H-=M);var O=
|
||||
f({source:F,line:E,column:H});B.curPosition=O;x=q(x);var T=x.getFunctionName;x.getFunctionName=function(){return null==B.nextPosition?T():B.nextPosition.name||T()};x.getFileName=function(){return O.source};x.getLineNumber=function(){return O.line};x.getColumnNumber=function(){return O.column+1};x.getScriptNameOrSourceURL=function(){return O.source};return x}var Q=x.isEval()&&x.getEvalOrigin();Q&&(Q=c(Q),x=q(x),x.getEvalOrigin=function(){return Q});return x}function k(x,B){L&&(b={},h={});for(var F=
|
||||
(x.name||"Error")+": "+(x.message||""),E={nextPosition:null,curPosition:null},H=[],M=B.length-1;0<=M;M--)H.push("\n at "+r(B[M],E)),E.nextPosition=E.curPosition;E.curPosition=E.nextPosition=null;return F+H.reverse().join("")}function u(x){var B=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(x.stack);if(B){x=B[1];var F=+B[2];B=+B[3];var E=b[x];if(!E&&v&&v.existsSync(x))try{E=v.readFileSync(x,"utf8")}catch(H){E=""}if(E&&(E=E.split(/(?:\r\n|\r|\n)/)[F-1]))return x+":"+F+"\n"+E+"\n"+Array(B).join(" ")+
|
||||
"^"}return null}function d(){var x=e.emit;e.emit=function(B){if("uncaughtException"===B){var F=arguments[1]&&arguments[1].stack,E=0<this.listeners(B).length;if(F&&!E){F=arguments[1];E=u(F);var H="object"===typeof e&&null!==e?e.stderr:void 0;H&&H._handle&&H._handle.setBlocking&&H._handle.setBlocking(!0);E&&(console.error(),console.error(E));console.error(F.stack);"object"===typeof e&&null!==e&&"function"===typeof e.exit&&e.exit(1);return}}return x.apply(this,arguments)}}var g=C("source-map").SourceMapConsumer,
|
||||
n=C("path");try{var v=C("fs");v.existsSync&&v.readFileSync||(v=null)}catch(x){}var z=C("buffer-from"),G=!1,D=!1,L=!1,a="auto",b={},h={},w=/^data:application\/json[^,]+base64,/,y=[],I=[],K=t(y);y.push(function(x){x=x.trim();/^file:/.test(x)&&(x=x.replace(/file:\/\/\/(\w:)?/,function(E,H){return H?"":"/"}));if(x in b)return b[x];var B="";try{if(v)v.existsSync(x)&&(B=v.readFileSync(x,"utf8"));else{var F=new XMLHttpRequest;F.open("GET",x,!1);F.send(null);4===F.readyState&&200===F.status&&(B=F.responseText)}}catch(E){}return b[x]=
|
||||
B});var N=t(I);I.push(function(x){a:{if(p())try{var B=new XMLHttpRequest;B.open("GET",x,!1);B.send(null);var F=B.getResponseHeader("SourceMap")||B.getResponseHeader("X-SourceMap");if(F){var E=F;break a}}catch(M){}E=K(x);B=/(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg;for(var H;F=B.exec(E);)H=F;E=H?H[1]:null}if(!E)return null;w.test(E)?(H=E.slice(E.indexOf(",")+1),H=z(H,"base64").toString(),E=x):(E=m(x,E),H=K(E));return H?{url:E,
|
||||
map:H}:null});var P=y.slice(0),W=I.slice(0);A.wrapCallSite=r;A.getErrorSource=u;A.mapSourcePosition=f;A.retrieveSourceMap=N;A.install=function(x){x=x||{};if(x.environment&&(a=x.environment,-1===["node","browser","auto"].indexOf(a)))throw Error("environment "+a+" was unknown. Available options are {auto, browser, node}");x.retrieveFile&&(x.overrideRetrieveFile&&(y.length=0),y.unshift(x.retrieveFile));x.retrieveSourceMap&&(x.overrideRetrieveSourceMap&&(I.length=0),I.unshift(x.retrieveSourceMap));if(x.hookRequire&&
|
||||
!p()){var B=J.require("module"),F=B.prototype._compile;F.__sourceMapSupport||(B.prototype._compile=function(E,H){b[H]=E;h[H]=void 0;return F.call(this,E,H)},B.prototype._compile.__sourceMapSupport=!0)}L||(L="emptyCacheBetweenOperations"in x?x.emptyCacheBetweenOperations:!1);G||(G=!0,Error.prepareStackTrace=k);if(!D){x="handleUncaughtExceptions"in x?x.handleUncaughtExceptions:!0;try{!1===J.require("worker_threads").isMainThread&&(x=!1)}catch(E){}x&&"object"===typeof e&&null!==e&&"function"===typeof e.on&&
|
||||
(D=!0,d())}};A.resetRetrieveHandlers=function(){y.length=0;I.length=0;y=P.slice(0);I=W.slice(0);N=t(I);K=t(y)}}).call(this,C("g5I+bs"))},{"buffer-from":4,fs:3,"g5I+bs":9,path:8,"source-map":20}]},{},[1]);return R});
|
||||
19
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
193
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
193
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@ -0,0 +1,193 @@
|
||||
# @jridgewell/trace-mapping
|
||||
|
||||
> Trace the original position through a source map
|
||||
|
||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||
original location in the source file through a source map.
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/trace-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { TraceMap, originalPositionFor, generatedPositionFor } from '@jridgewell/trace-mapping';
|
||||
|
||||
const tracer = new TraceMap({
|
||||
version: 3,
|
||||
sources: ['input.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'KAyCIA',
|
||||
});
|
||||
|
||||
// Lines start at line 1, columns at column 0.
|
||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||
assert.deepEqual(traced, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
const generated = generatedPositionFor(tracer, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
});
|
||||
assert.deepEqual(generated, {
|
||||
line: 1,
|
||||
column: 5,
|
||||
});
|
||||
```
|
||||
|
||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||
|
||||
```typescript
|
||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||
|
||||
// line is 0-base.
|
||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||
|
||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
// Again, line is 0-base and so is sourceLine
|
||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||
```
|
||||
|
||||
### SectionedSourceMaps
|
||||
|
||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||
`TraceMap` instance:
|
||||
|
||||
```typescript
|
||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||
const fooOutput = 'foo';
|
||||
const barOutput = 'bar';
|
||||
const output = [fooOutput, barOutput].join('\n');
|
||||
|
||||
const sectioned = new AnyMap({
|
||||
version: 3,
|
||||
sections: [
|
||||
{
|
||||
// 0-base line and column
|
||||
offset: { line: 0, column: 0 },
|
||||
// fooOutput's sourcemap
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['foo.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
{
|
||||
// barOutput's sourcemap will not affect the first line, only the second
|
||||
offset: { line: 1, column: 0 },
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['bar.js'],
|
||||
names: ['bar'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const traced = originalPositionFor(sectioned, {
|
||||
line: 2,
|
||||
column: 0,
|
||||
});
|
||||
|
||||
assert.deepEqual(traced, {
|
||||
source: 'bar.js',
|
||||
line: 1,
|
||||
column: 0,
|
||||
name: 'bar',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map
|
||||
trace-mapping: decoded JSON input x 183 ops/sec ±0.41% (87 runs sampled)
|
||||
trace-mapping: encoded JSON input x 384 ops/sec ±0.89% (89 runs sampled)
|
||||
trace-mapping: decoded Object input x 3,085 ops/sec ±0.24% (100 runs sampled)
|
||||
trace-mapping: encoded Object input x 452 ops/sec ±0.80% (84 runs sampled)
|
||||
source-map-js: encoded Object input x 88.82 ops/sec ±0.45% (77 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 38.39 ops/sec ±1.88% (52 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 4,025,347 ops/sec ±0.15% (97 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 3,333,136 ops/sec ±1.26% (90 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 824,978 ops/sec ±1.06% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 741,300 ops/sec ±0.93% (92 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 2,587,603 ops/sec ±0.75% (97 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
***
|
||||
|
||||
babel.min.js.map
|
||||
trace-mapping: decoded JSON input x 17.43 ops/sec ±8.81% (33 runs sampled)
|
||||
trace-mapping: encoded JSON input x 34.18 ops/sec ±4.67% (50 runs sampled)
|
||||
trace-mapping: decoded Object input x 1,010 ops/sec ±0.41% (98 runs sampled)
|
||||
trace-mapping: encoded Object input x 39.45 ops/sec ±4.01% (52 runs sampled)
|
||||
source-map-js: encoded Object input x 6.57 ops/sec ±3.04% (21 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 4.23 ops/sec ±2.93% (15 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 7,576,265 ops/sec ±0.74% (96 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 5,019,743 ops/sec ±0.74% (94 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 3,396,137 ops/sec ±42.32% (95 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 3,753,176 ops/sec ±0.72% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 6,423,633 ops/sec ±0.74% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
***
|
||||
|
||||
preact.js.map
|
||||
trace-mapping: decoded JSON input x 3,499 ops/sec ±0.18% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 6,078 ops/sec ±0.25% (99 runs sampled)
|
||||
trace-mapping: decoded Object input x 254,788 ops/sec ±0.13% (100 runs sampled)
|
||||
trace-mapping: encoded Object input x 14,063 ops/sec ±0.27% (94 runs sampled)
|
||||
source-map-js: encoded Object input x 2,465 ops/sec ±0.25% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 1,174 ops/sec ±1.90% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 7,720,171 ops/sec ±0.14% (97 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 6,864,485 ops/sec ±0.16% (101 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 2,387,219 ops/sec ±0.28% (98 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 1,565,339 ops/sec ±0.32% (101 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 3,819,732 ops/sec ±0.38% (98 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
***
|
||||
|
||||
react.js.map
|
||||
trace-mapping: decoded JSON input x 1,719 ops/sec ±0.19% (99 runs sampled)
|
||||
trace-mapping: encoded JSON input x 4,284 ops/sec ±0.51% (99 runs sampled)
|
||||
trace-mapping: decoded Object input x 94,668 ops/sec ±0.08% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 5,287 ops/sec ±0.24% (99 runs sampled)
|
||||
source-map-js: encoded Object input x 814 ops/sec ±0.20% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 429 ops/sec ±0.24% (94 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
trace-mapping: decoded originalPositionFor x 28,927,989 ops/sec ±0.61% (94 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 27,394,475 ops/sec ±0.55% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 16,856,730 ops/sec ±0.45% (96 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 12,258,950 ops/sec ±0.41% (97 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 22,272,990 ops/sec ±0.58% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
||||
514
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
514
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@ -0,0 +1,514 @@
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; i--, index--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||
if (!('sections' in parsed))
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const { sections } = parsed;
|
||||
let i = 0;
|
||||
for (; i < sections.length - 1; i++) {
|
||||
const no = sections[i + 1].offset;
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||
}
|
||||
if (sections.length > 0) {
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||
}
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||
const map = AnyMap(section.map, mapUrl);
|
||||
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||
append(names, map.names);
|
||||
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||
for (let i = mappings.length; i <= lineOffset; i++)
|
||||
mappings.push([]);
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range.
|
||||
const stopI = stopLine - lineOffset;
|
||||
const len = Math.min(decoded.length, stopI + 1);
|
||||
for (let i = 0; i < len; i++) {
|
||||
const line = decoded[i];
|
||||
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||
// loop above.
|
||||
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (i === stopI && column >= stopColumn)
|
||||
break;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
if (seg.length === 4) {
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||
continue;
|
||||
}
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||
// sourcemap would desynchronize the sources/contents.
|
||||
function fillSourcesContent(len) {
|
||||
const sourcesContent = [];
|
||||
for (let i = 0; i < len; i++)
|
||||
sourcesContent[i] = null;
|
||||
return sourcesContent;
|
||||
}
|
||||
|
||||
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null,
|
||||
});
|
||||
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||
line: null,
|
||||
column: null,
|
||||
});
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
let encodedMappings;
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
let decodedMappings;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
let traceSegment;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
let originalPositionFor;
|
||||
/**
|
||||
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||
*
|
||||
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||
*/
|
||||
let generatedPositionFor;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
let eachMapping;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
let presortedDecodedMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let decodedMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let encodedMap;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map.constructor === TraceMap)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
if (sourceRoot || mapUrl) {
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
}
|
||||
else {
|
||||
this.resolvedSources = sources.map((s) => s || '');
|
||||
}
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
encodedMappings = (map) => {
|
||||
var _a;
|
||||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
|
||||
};
|
||||
decodedMappings = (map) => {
|
||||
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||
};
|
||||
traceSegment = (map, line, column) => {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
};
|
||||
originalPositionFor = (map, { line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
if (segment.length == 1)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const { names, resolvedSources } = map;
|
||||
return {
|
||||
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||
line: segment[SOURCE_LINE] + 1,
|
||||
column: segment[SOURCE_COLUMN],
|
||||
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||
};
|
||||
};
|
||||
generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||
const memos = map._bySourceMemos;
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
return {
|
||||
line: segment[REV_GENERATED_LINE] + 1,
|
||||
column: segment[REV_GENERATED_COLUMN],
|
||||
};
|
||||
};
|
||||
eachMapping = (map, cb) => {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
presortedDecodedMap = (map, mapUrl) => {
|
||||
const clone = Object.assign({}, map);
|
||||
clone.mappings = [];
|
||||
const tracer = new TraceMap(clone, mapUrl);
|
||||
tracer._decoded = map.mappings;
|
||||
return tracer;
|
||||
};
|
||||
decodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: decodedMappings(map),
|
||||
};
|
||||
};
|
||||
encodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: encodedMappings(map),
|
||||
};
|
||||
};
|
||||
})();
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return null;
|
||||
return segments[index];
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
||||
1
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
528
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
528
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@ -0,0 +1,528 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
||||
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri__default["default"](input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; i++, index++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; i--, index--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||
if (!('sections' in parsed))
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const { sections } = parsed;
|
||||
let i = 0;
|
||||
for (; i < sections.length - 1; i++) {
|
||||
const no = sections[i + 1].offset;
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column);
|
||||
}
|
||||
if (sections.length > 0) {
|
||||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity);
|
||||
}
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
return exports.presortedDecodedMap(joined);
|
||||
};
|
||||
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) {
|
||||
const map = AnyMap(section.map, mapUrl);
|
||||
const { line: lineOffset, column: columnOffset } = section.offset;
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = exports.decodedMappings(map);
|
||||
const { resolvedSources } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length));
|
||||
append(names, map.names);
|
||||
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
||||
for (let i = mappings.length; i <= lineOffset; i++)
|
||||
mappings.push([]);
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range.
|
||||
const stopI = stopLine - lineOffset;
|
||||
const len = Math.min(decoded.length, stopI + 1);
|
||||
for (let i = 0; i < len; i++) {
|
||||
const line = decoded[i];
|
||||
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
||||
// loop above.
|
||||
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (i === stopI && column >= stopColumn)
|
||||
break;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
if (seg.length === 4) {
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn]);
|
||||
continue;
|
||||
}
|
||||
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
||||
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
||||
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
||||
// sourcemap would desynchronize the sources/contents.
|
||||
function fillSourcesContent(len) {
|
||||
const sourcesContent = [];
|
||||
for (let i = 0; i < len; i++)
|
||||
sourcesContent[i] = null;
|
||||
return sourcesContent;
|
||||
}
|
||||
|
||||
const INVALID_ORIGINAL_MAPPING = Object.freeze({
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null,
|
||||
});
|
||||
const INVALID_GENERATED_MAPPING = Object.freeze({
|
||||
line: null,
|
||||
column: null,
|
||||
});
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
exports.encodedMappings = void 0;
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
exports.decodedMappings = void 0;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
exports.traceSegment = void 0;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
exports.originalPositionFor = void 0;
|
||||
/**
|
||||
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||
*
|
||||
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||
*/
|
||||
exports.generatedPositionFor = void 0;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
exports.eachMapping = void 0;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
exports.presortedDecodedMap = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.decodedMap = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.encodedMap = void 0;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map.constructor === TraceMap)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
if (sourceRoot || mapUrl) {
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
}
|
||||
else {
|
||||
this.resolvedSources = sources.map((s) => s || '');
|
||||
}
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
exports.encodedMappings = (map) => {
|
||||
var _a;
|
||||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
|
||||
};
|
||||
exports.decodedMappings = (map) => {
|
||||
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
|
||||
};
|
||||
exports.traceSegment = (map, line, column) => {
|
||||
const decoded = exports.decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
};
|
||||
exports.originalPositionFor = (map, { line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = exports.decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
if (segment.length == 1)
|
||||
return INVALID_ORIGINAL_MAPPING;
|
||||
const { names, resolvedSources } = map;
|
||||
return {
|
||||
source: resolvedSources[segment[SOURCES_INDEX]],
|
||||
line: segment[SOURCE_LINE] + 1,
|
||||
column: segment[SOURCE_COLUMN],
|
||||
name: segment.length === 5 ? names[segment[NAMES_INDEX]] : null,
|
||||
};
|
||||
};
|
||||
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||
const memos = map._bySourceMemos;
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (segment == null)
|
||||
return INVALID_GENERATED_MAPPING;
|
||||
return {
|
||||
line: segment[REV_GENERATED_LINE] + 1,
|
||||
column: segment[REV_GENERATED_COLUMN],
|
||||
};
|
||||
};
|
||||
exports.eachMapping = (map, cb) => {
|
||||
const decoded = exports.decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
exports.presortedDecodedMap = (map, mapUrl) => {
|
||||
const clone = Object.assign({}, map);
|
||||
clone.mappings = [];
|
||||
const tracer = new TraceMap(clone, mapUrl);
|
||||
tracer._decoded = map.mappings;
|
||||
return tracer;
|
||||
};
|
||||
exports.decodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: exports.decodedMappings(map),
|
||||
};
|
||||
};
|
||||
exports.encodedMap = (map) => {
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings: exports.encodedMappings(map),
|
||||
};
|
||||
};
|
||||
})();
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return null;
|
||||
return segments[index];
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
||||
1
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import { TraceMap } from './trace-mapping';
|
||||
import type { SectionedSourceMapInput } from './types';
|
||||
declare type AnyMap = {
|
||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
};
|
||||
export declare const AnyMap: AnyMap;
|
||||
export {};
|
||||
32
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||
export declare type MemoState = {
|
||||
lastKey: number;
|
||||
lastNeedle: number;
|
||||
lastIndex: number;
|
||||
};
|
||||
export declare let found: boolean;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function memoizedState(): MemoState;
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
||||
7
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||
import type { MemoState } from './binary-search';
|
||||
export declare type Source = {
|
||||
__proto__: null;
|
||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||
};
|
||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
||||
1
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
||||
2
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
||||
16
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
declare type GeneratedColumn = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type SourceLine = number;
|
||||
declare type SourceColumn = number;
|
||||
declare type NamesIndex = number;
|
||||
declare type GeneratedLine = number;
|
||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export declare const REV_GENERATED_LINE = 1;
|
||||
export declare const REV_GENERATED_COLUMN = 2;
|
||||
export {};
|
||||
4
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
export default function stripFilename(path: string | undefined | null): string;
|
||||
70
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
70
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||
export type { SourceMapSegment } from './sourcemap-segment';
|
||||
export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types';
|
||||
export declare const LEAST_UPPER_BOUND = -1;
|
||||
export declare const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping;
|
||||
/**
|
||||
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
|
||||
* the found mapping is from the same source and line as the originalPositionFor mapping.
|
||||
*
|
||||
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
|
||||
* using the same needle that would return `id` when calling `originalPositionFor`.
|
||||
*/
|
||||
export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
};
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let encodedMap: (map: TraceMap) => EncodedSourceMap;
|
||||
export { AnyMap } from './any-map';
|
||||
export declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
||||
85
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
85
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { TraceMap } from './trace-mapping';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
export interface Section {
|
||||
offset: {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||
}
|
||||
export interface SectionedSourceMap {
|
||||
file?: string | null;
|
||||
sections: Section[];
|
||||
version: 3;
|
||||
}
|
||||
export declare type OriginalMapping = {
|
||||
source: string | null;
|
||||
line: number;
|
||||
column: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare type InvalidOriginalMapping = {
|
||||
source: null;
|
||||
line: null;
|
||||
column: null;
|
||||
name: null;
|
||||
};
|
||||
export declare type GeneratedMapping = {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
export declare type InvalidGeneratedMapping = {
|
||||
line: null;
|
||||
column: null;
|
||||
};
|
||||
export declare type SourceMapInput = string | EncodedSourceMap | DecodedSourceMap | TraceMap;
|
||||
export declare type SectionedSourceMapInput = SourceMapInput | SectionedSourceMap;
|
||||
export declare type Needle = {
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: 1 | -1;
|
||||
};
|
||||
export declare type SourceNeedle = {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: 1 | -1;
|
||||
};
|
||||
export declare type EachMapping = {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: null;
|
||||
originalLine: null;
|
||||
originalColumn: null;
|
||||
name: null;
|
||||
} | {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: string | null;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
}
|
||||
70
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
70
node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
{
|
||||
"name": "@jridgewell/trace-mapping",
|
||||
"version": "0.3.9",
|
||||
"description": "Trace the original position through a source map",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/trace-mapping.umd.js",
|
||||
"module": "dist/trace-mapping.mjs",
|
||||
"typings": "dist/types/trace-mapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"browser": "./dist/trace-mapping.umd.js",
|
||||
"require": "./dist/trace-mapping.umd.js",
|
||||
"import": "./dist/trace-mapping.mjs"
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node benchmark/index.mjs",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "ava debug",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 ava",
|
||||
"test:watch": "ava --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"ava": "4.0.1",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.0",
|
||||
"esbuild": "0.14.14",
|
||||
"esbuild-node-loader": "0.6.4",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"typescript": "4.5.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.0.3",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10"
|
||||
}
|
||||
}
|
||||
50
node_modules/@cspotcode/source-map-support/package.json
generated
vendored
Normal file
50
node_modules/@cspotcode/source-map-support/package.json
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"name": "@cspotcode/source-map-support",
|
||||
"description": "Fixes stack traces for files with source maps",
|
||||
"version": "0.8.1",
|
||||
"main": "./source-map-support.js",
|
||||
"types": "./source-map-support.d.ts",
|
||||
"scripts": {
|
||||
"build": "node build.js",
|
||||
"serve-tests": "http-server -p 1336",
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"/register.d.ts",
|
||||
"/register.js",
|
||||
"/register-hook-require.d.ts",
|
||||
"/register-hook-require.js",
|
||||
"/source-map-support.d.ts",
|
||||
"/source-map-support.js",
|
||||
"/browser-source-map-support.js"
|
||||
],
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "0.3.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/lodash": "^4.14.182",
|
||||
"browserify": "^4.2.3",
|
||||
"coffeescript": "^1.12.7",
|
||||
"http-server": "^0.11.1",
|
||||
"lodash": "^4.17.21",
|
||||
"mocha": "^3.5.3",
|
||||
"semver": "^7.3.7",
|
||||
"source-map": "0.6.1",
|
||||
"webpack": "^1.15.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/cspotcode/node-source-map-support"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/cspotcode/node-source-map-support/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"volta": {
|
||||
"node": "16.11.0",
|
||||
"npm": "7.24.2"
|
||||
}
|
||||
}
|
||||
7
node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
Executable file
7
node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
// tslint:disable:no-useless-files
|
||||
|
||||
// For following usage:
|
||||
// import '@cspotcode/source-map-support/register-hook-require'
|
||||
// Instead of:
|
||||
// import sourceMapSupport from '@cspotcode/source-map-support'
|
||||
// sourceMapSupport.install({hookRequire: true})
|
||||
3
node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
Normal file
3
node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
require('./').install({
|
||||
hookRequire: true
|
||||
});
|
||||
7
node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
Executable file
7
node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
// tslint:disable:no-useless-files
|
||||
|
||||
// For following usage:
|
||||
// import '@cspotcode/source-map-support/register'
|
||||
// Instead of:
|
||||
// import sourceMapSupport from '@cspotcode/source-map-support'
|
||||
// sourceMapSupport.install()
|
||||
1
node_modules/@cspotcode/source-map-support/register.js
generated
vendored
Normal file
1
node_modules/@cspotcode/source-map-support/register.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
require('./').install();
|
||||
76
node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
Executable file
76
node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
Executable file
@ -0,0 +1,76 @@
|
||||
// Type definitions for source-map-support 0.5
|
||||
// Project: https://github.com/evanw/node-source-map-support
|
||||
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
|
||||
// Jason Cheatham <https://github.com/jason0x43>
|
||||
// Alcedo Nathaniel De Guzman Jr <https://github.com/natealcedo>
|
||||
// Griffin Yourick <https://github.com/tough-griff>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
export interface RawSourceMap {
|
||||
version: 3;
|
||||
sources: string[];
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sourcesContent?: string[];
|
||||
mappings: string;
|
||||
file: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Output of retrieveSourceMap().
|
||||
* From source-map-support:
|
||||
* The map field may be either a string or the parsed JSON object (i.e.,
|
||||
* it must be a valid argument to the SourceMapConsumer constructor).
|
||||
*/
|
||||
export interface UrlAndMap {
|
||||
url: string;
|
||||
map: string | RawSourceMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options to install().
|
||||
*/
|
||||
export interface Options {
|
||||
handleUncaughtExceptions?: boolean | undefined;
|
||||
hookRequire?: boolean | undefined;
|
||||
emptyCacheBetweenOperations?: boolean | undefined;
|
||||
environment?: 'auto' | 'browser' | 'node' | undefined;
|
||||
overrideRetrieveFile?: boolean | undefined;
|
||||
overrideRetrieveSourceMap?: boolean | undefined;
|
||||
retrieveFile?(path: string): string;
|
||||
retrieveSourceMap?(source: string): UrlAndMap | null;
|
||||
/**
|
||||
* Set false to disable redirection of require / import `source-map-support` to `@cspotcode/source-map-support`
|
||||
*/
|
||||
redirectConflictingLibrary?: boolean;
|
||||
/**
|
||||
* Callback will be called every time we redirect due to `redirectConflictingLibrary`
|
||||
* This allows consumers to log helpful warnings if they choose.
|
||||
* @param parent NodeJS.Module which made the require() or require.resolve() call
|
||||
* @param options options object internally passed to node's `_resolveFilename` hook
|
||||
*/
|
||||
onConflictingLibraryRedirect?: (request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void;
|
||||
}
|
||||
|
||||
export interface Position {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
|
||||
export function wrapCallSite(frame: any /* StackFrame */): any /* StackFrame */;
|
||||
export function getErrorSource(error: Error): string | null;
|
||||
export function mapSourcePosition(position: Position): Position;
|
||||
export function retrieveSourceMap(source: string): UrlAndMap | null;
|
||||
export function resetRetrieveHandlers(): void;
|
||||
|
||||
/**
|
||||
* Install SourceMap support.
|
||||
* @param options Can be used to e.g. disable uncaughtException handler.
|
||||
*/
|
||||
export function install(options?: Options): void;
|
||||
|
||||
/**
|
||||
* Uninstall SourceMap support.
|
||||
*/
|
||||
export function uninstall(): void;
|
||||
938
node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
Normal file
938
node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
Normal file
@ -0,0 +1,938 @@
|
||||
const { TraceMap, originalPositionFor, AnyMap } = require('@jridgewell/trace-mapping');
|
||||
var path = require('path');
|
||||
const { fileURLToPath, pathToFileURL } = require('url');
|
||||
var util = require('util');
|
||||
|
||||
var fs;
|
||||
try {
|
||||
fs = require('fs');
|
||||
if (!fs.existsSync || !fs.readFileSync) {
|
||||
// fs doesn't have all methods we need
|
||||
fs = null;
|
||||
}
|
||||
} catch (err) {
|
||||
/* nop */
|
||||
}
|
||||
|
||||
/**
|
||||
* Requires a module which is protected against bundler minification.
|
||||
*
|
||||
* @param {NodeModule} mod
|
||||
* @param {string} request
|
||||
*/
|
||||
function dynamicRequire(mod, request) {
|
||||
return mod.require(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* enabled: boolean;
|
||||
* originalValue: any;
|
||||
* installedValue: any;
|
||||
* }} HookState
|
||||
* Used for installing and uninstalling hooks
|
||||
*/
|
||||
|
||||
// Increment this if the format of sharedData changes in a breaking way.
|
||||
var sharedDataVersion = 1;
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T} defaults
|
||||
* @returns {T}
|
||||
*/
|
||||
function initializeSharedData(defaults) {
|
||||
var sharedDataKey = 'source-map-support/sharedData';
|
||||
if (typeof Symbol !== 'undefined') {
|
||||
sharedDataKey = Symbol.for(sharedDataKey);
|
||||
}
|
||||
var sharedData = this[sharedDataKey];
|
||||
if (!sharedData) {
|
||||
sharedData = { version: sharedDataVersion };
|
||||
if (Object.defineProperty) {
|
||||
Object.defineProperty(this, sharedDataKey, { value: sharedData });
|
||||
} else {
|
||||
this[sharedDataKey] = sharedData;
|
||||
}
|
||||
}
|
||||
if (sharedDataVersion !== sharedData.version) {
|
||||
throw new Error("Multiple incompatible instances of source-map-support were loaded");
|
||||
}
|
||||
for (var key in defaults) {
|
||||
if (!(key in sharedData)) {
|
||||
sharedData[key] = defaults[key];
|
||||
}
|
||||
}
|
||||
return sharedData;
|
||||
}
|
||||
|
||||
// If multiple instances of source-map-support are loaded into the same
|
||||
// context, they shouldn't overwrite each other. By storing handlers, caches,
|
||||
// and other state on a shared object, different instances of
|
||||
// source-map-support can work together in a limited way. This does require
|
||||
// that future versions of source-map-support continue to support the fields on
|
||||
// this object. If this internal contract ever needs to be broken, increment
|
||||
// sharedDataVersion. (This version number is not the same as any of the
|
||||
// package's version numbers, which should reflect the *external* API of
|
||||
// source-map-support.)
|
||||
var sharedData = initializeSharedData({
|
||||
|
||||
// Only install once if called multiple times
|
||||
// Remember how the environment looked before installation so we can restore if able
|
||||
/** @type {HookState} */
|
||||
errorPrepareStackTraceHook: undefined,
|
||||
/** @type {HookState} */
|
||||
processEmitHook: undefined,
|
||||
/** @type {HookState} */
|
||||
moduleResolveFilenameHook: undefined,
|
||||
|
||||
/** @type {Array<(request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void>} */
|
||||
onConflictingLibraryRedirectArr: [],
|
||||
|
||||
// If true, the caches are reset before a stack trace formatting operation
|
||||
emptyCacheBetweenOperations: false,
|
||||
|
||||
// Maps a file path to a string containing the file contents
|
||||
fileContentsCache: Object.create(null),
|
||||
|
||||
// Maps a file path to a source map for that file
|
||||
/** @type {Record<string, {url: string, map: TraceMap}} */
|
||||
sourceMapCache: Object.create(null),
|
||||
|
||||
// Priority list of retrieve handlers
|
||||
retrieveFileHandlers: [],
|
||||
retrieveMapHandlers: [],
|
||||
|
||||
// Priority list of internally-implemented handlers.
|
||||
// When resetting state, we must keep these.
|
||||
internalRetrieveFileHandlers: [],
|
||||
internalRetrieveMapHandlers: [],
|
||||
|
||||
});
|
||||
|
||||
// Supports {browser, node, auto}
|
||||
var environment = "auto";
|
||||
|
||||
// Regex for detecting source maps
|
||||
var reSourceMap = /^data:application\/json[^,]+base64,/;
|
||||
|
||||
function isInBrowser() {
|
||||
if (environment === "browser")
|
||||
return true;
|
||||
if (environment === "node")
|
||||
return false;
|
||||
return ((typeof window !== 'undefined') && (typeof XMLHttpRequest === 'function') && !(window.require && window.module && window.process && window.process.type === "renderer"));
|
||||
}
|
||||
|
||||
function hasGlobalProcessEventEmitter() {
|
||||
return ((typeof process === 'object') && (process !== null) && (typeof process.on === 'function'));
|
||||
}
|
||||
|
||||
function tryFileURLToPath(v) {
|
||||
if(isFileUrl(v)) {
|
||||
return fileURLToPath(v);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
// TODO un-copy these from resolve-uri; see if they can be exported from that lib
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
|
||||
// #region Caches
|
||||
/** @param {string} pathOrFileUrl */
|
||||
function getCacheKey(pathOrFileUrl) {
|
||||
if(pathOrFileUrl.startsWith('node:')) return pathOrFileUrl;
|
||||
if(isFileUrl(pathOrFileUrl)) {
|
||||
// Must normalize spaces to %20, stuff like that
|
||||
return new URL(pathOrFileUrl).toString();
|
||||
} else {
|
||||
try {
|
||||
return pathToFileURL(pathOrFileUrl).toString();
|
||||
} catch {
|
||||
return pathOrFileUrl;
|
||||
}
|
||||
}
|
||||
}
|
||||
function getFileContentsCache(key) {
|
||||
return sharedData.fileContentsCache[getCacheKey(key)];
|
||||
}
|
||||
function hasFileContentsCacheFromKey(key) {
|
||||
return Object.prototype.hasOwnProperty.call(sharedData.fileContentsCache, key);
|
||||
}
|
||||
function getFileContentsCacheFromKey(key) {
|
||||
return sharedData.fileContentsCache[key];
|
||||
}
|
||||
function setFileContentsCache(key, value) {
|
||||
return sharedData.fileContentsCache[getCacheKey(key)] = value;
|
||||
}
|
||||
function getSourceMapCache(key) {
|
||||
return sharedData.sourceMapCache[getCacheKey(key)];
|
||||
}
|
||||
function setSourceMapCache(key, value) {
|
||||
return sharedData.sourceMapCache[getCacheKey(key)] = value;
|
||||
}
|
||||
function clearCaches() {
|
||||
sharedData.fileContentsCache = Object.create(null);
|
||||
sharedData.sourceMapCache = Object.create(null);
|
||||
}
|
||||
// #endregion Caches
|
||||
|
||||
function handlerExec(list, internalList) {
|
||||
return function(arg) {
|
||||
for (var i = 0; i < list.length; i++) {
|
||||
var ret = list[i](arg);
|
||||
if (ret) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
for (var i = 0; i < internalList.length; i++) {
|
||||
var ret = internalList[i](arg);
|
||||
if (ret) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
|
||||
var retrieveFile = handlerExec(sharedData.retrieveFileHandlers, sharedData.internalRetrieveFileHandlers);
|
||||
|
||||
sharedData.internalRetrieveFileHandlers.push(function(path) {
|
||||
// Trim the path to make sure there is no extra whitespace.
|
||||
path = path.trim();
|
||||
if (/^file:/.test(path)) {
|
||||
// existsSync/readFileSync can't handle file protocol, but once stripped, it works
|
||||
path = path.replace(/file:\/\/\/(\w:)?/, function(protocol, drive) {
|
||||
return drive ?
|
||||
'' : // file:///C:/dir/file -> C:/dir/file
|
||||
'/'; // file:///root-dir/file -> /root-dir/file
|
||||
});
|
||||
}
|
||||
const key = getCacheKey(path);
|
||||
if(hasFileContentsCacheFromKey(key)) {
|
||||
return getFileContentsCacheFromKey(key);
|
||||
}
|
||||
|
||||
var contents = '';
|
||||
try {
|
||||
if (!fs) {
|
||||
// Use SJAX if we are in the browser
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', path, /** async */ false);
|
||||
xhr.send(null);
|
||||
if (xhr.readyState === 4 && xhr.status === 200) {
|
||||
contents = xhr.responseText;
|
||||
}
|
||||
} else if (fs.existsSync(path)) {
|
||||
// Otherwise, use the filesystem
|
||||
contents = fs.readFileSync(path, 'utf8');
|
||||
}
|
||||
} catch (er) {
|
||||
/* ignore any errors */
|
||||
}
|
||||
|
||||
return setFileContentsCache(path, contents);
|
||||
});
|
||||
|
||||
// Support URLs relative to a directory, but be careful about a protocol prefix
|
||||
// in case we are in the browser (i.e. directories may start with "http://" or "file:///")
|
||||
function supportRelativeURL(file, url) {
|
||||
if(!file) return url;
|
||||
// given that this happens within error formatting codepath, probably best to
|
||||
// fallback instead of throwing if anything goes wrong
|
||||
try {
|
||||
// if should output a URL
|
||||
if(isAbsoluteUrl(file) || isSchemeRelativeUrl(file)) {
|
||||
if(isAbsoluteUrl(url) || isSchemeRelativeUrl(url)) {
|
||||
return new URL(url, file).toString();
|
||||
}
|
||||
if(path.isAbsolute(url)) {
|
||||
return new URL(pathToFileURL(url), file).toString();
|
||||
}
|
||||
// url is relative path or URL
|
||||
return new URL(url.replace(/\\/g, '/'), file).toString();
|
||||
}
|
||||
// if should output a path (unless URL is something like https://)
|
||||
if(path.isAbsolute(file)) {
|
||||
if(isFileUrl(url)) {
|
||||
return fileURLToPath(url);
|
||||
}
|
||||
if(isSchemeRelativeUrl(url)) {
|
||||
return fileURLToPath(new URL(url, 'file://'));
|
||||
}
|
||||
if(isAbsoluteUrl(url)) {
|
||||
// url is a non-file URL
|
||||
// Go with the URL
|
||||
return url;
|
||||
}
|
||||
if(path.isAbsolute(url)) {
|
||||
// Normalize at all? decodeURI or normalize slashes?
|
||||
return path.normalize(url);
|
||||
}
|
||||
// url is relative path or URL
|
||||
return path.join(file, '..', decodeURI(url));
|
||||
}
|
||||
// If we get here, file is relative.
|
||||
// Shouldn't happen since node identifies modules with absolute paths or URLs.
|
||||
// But we can take a stab at returning something meaningful anyway.
|
||||
if(isAbsoluteUrl(url) || isSchemeRelativeUrl(url)) {
|
||||
return url;
|
||||
}
|
||||
return path.join(file, '..', url);
|
||||
} catch(e) {
|
||||
return url;
|
||||
}
|
||||
}
|
||||
|
||||
// Return pathOrUrl in the same style as matchStyleOf: either a file URL or a native path
|
||||
function matchStyleOfPathOrUrl(matchStyleOf, pathOrUrl) {
|
||||
try {
|
||||
if(isAbsoluteUrl(matchStyleOf) || isSchemeRelativeUrl(matchStyleOf)) {
|
||||
if(isAbsoluteUrl(pathOrUrl) || isSchemeRelativeUrl(pathOrUrl)) return pathOrUrl;
|
||||
if(path.isAbsolute(pathOrUrl)) return pathToFileURL(pathOrUrl).toString();
|
||||
} else if(path.isAbsolute(matchStyleOf)) {
|
||||
if(isAbsoluteUrl(pathOrUrl) || isSchemeRelativeUrl(pathOrUrl)) {
|
||||
return fileURLToPath(new URL(pathOrUrl, 'file://'));
|
||||
}
|
||||
}
|
||||
return pathOrUrl;
|
||||
} catch(e) {
|
||||
return pathOrUrl;
|
||||
}
|
||||
}
|
||||
|
||||
function retrieveSourceMapURL(source) {
|
||||
var fileData;
|
||||
|
||||
if (isInBrowser()) {
|
||||
try {
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', source, false);
|
||||
xhr.send(null);
|
||||
fileData = xhr.readyState === 4 ? xhr.responseText : null;
|
||||
|
||||
// Support providing a sourceMappingURL via the SourceMap header
|
||||
var sourceMapHeader = xhr.getResponseHeader("SourceMap") ||
|
||||
xhr.getResponseHeader("X-SourceMap");
|
||||
if (sourceMapHeader) {
|
||||
return sourceMapHeader;
|
||||
}
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
|
||||
// Get the URL of the source map
|
||||
fileData = retrieveFile(tryFileURLToPath(source));
|
||||
var re = /(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg;
|
||||
// Keep executing the search to find the *last* sourceMappingURL to avoid
|
||||
// picking up sourceMappingURLs from comments, strings, etc.
|
||||
var lastMatch, match;
|
||||
while (match = re.exec(fileData)) lastMatch = match;
|
||||
if (!lastMatch) return null;
|
||||
return lastMatch[1];
|
||||
};
|
||||
|
||||
// Can be overridden by the retrieveSourceMap option to install. Takes a
|
||||
// generated source filename; returns a {map, optional url} object, or null if
|
||||
// there is no source map. The map field may be either a string or the parsed
|
||||
// JSON object (ie, it must be a valid argument to the SourceMapConsumer
|
||||
// constructor).
|
||||
/** @type {(source: string) => import('./source-map-support').UrlAndMap | null} */
|
||||
var retrieveSourceMap = handlerExec(sharedData.retrieveMapHandlers, sharedData.internalRetrieveMapHandlers);
|
||||
sharedData.internalRetrieveMapHandlers.push(function(source) {
|
||||
var sourceMappingURL = retrieveSourceMapURL(source);
|
||||
if (!sourceMappingURL) return null;
|
||||
|
||||
// Read the contents of the source map
|
||||
var sourceMapData;
|
||||
if (reSourceMap.test(sourceMappingURL)) {
|
||||
// Support source map URL as a data url
|
||||
var rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(',') + 1);
|
||||
sourceMapData = Buffer.from(rawData, "base64").toString();
|
||||
sourceMappingURL = source;
|
||||
} else {
|
||||
// Support source map URLs relative to the source URL
|
||||
sourceMappingURL = supportRelativeURL(source, sourceMappingURL);
|
||||
sourceMapData = retrieveFile(tryFileURLToPath(sourceMappingURL));
|
||||
}
|
||||
|
||||
if (!sourceMapData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
url: sourceMappingURL,
|
||||
map: sourceMapData
|
||||
};
|
||||
});
|
||||
|
||||
function mapSourcePosition(position) {
|
||||
var sourceMap = getSourceMapCache(position.source);
|
||||
if (!sourceMap) {
|
||||
// Call the (overrideable) retrieveSourceMap function to get the source map.
|
||||
var urlAndMap = retrieveSourceMap(position.source);
|
||||
if (urlAndMap) {
|
||||
sourceMap = setSourceMapCache(position.source, {
|
||||
url: urlAndMap.url,
|
||||
map: new AnyMap(urlAndMap.map, urlAndMap.url)
|
||||
});
|
||||
|
||||
// Overwrite trace-mapping's resolutions, because they do not handle
|
||||
// Windows paths the way we want.
|
||||
// TODO Remove now that windows path support was added to resolve-uri and thus trace-mapping?
|
||||
sourceMap.map.resolvedSources = sourceMap.map.sources.map(s => supportRelativeURL(sourceMap.url, s));
|
||||
|
||||
// Load all sources stored inline with the source map into the file cache
|
||||
// to pretend like they are already loaded. They may not exist on disk.
|
||||
if (sourceMap.map.sourcesContent) {
|
||||
sourceMap.map.resolvedSources.forEach(function(resolvedSource, i) {
|
||||
var contents = sourceMap.map.sourcesContent[i];
|
||||
if (contents) {
|
||||
setFileContentsCache(resolvedSource, contents);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
sourceMap = setSourceMapCache(position.source, {
|
||||
url: null,
|
||||
map: null
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the source URL relative to the URL of the source map
|
||||
if (sourceMap && sourceMap.map) {
|
||||
var originalPosition = originalPositionFor(sourceMap.map, position);
|
||||
|
||||
// Only return the original position if a matching line was found. If no
|
||||
// matching line is found then we return position instead, which will cause
|
||||
// the stack trace to print the path and line for the compiled file. It is
|
||||
// better to give a precise location in the compiled file than a vague
|
||||
// location in the original file.
|
||||
if (originalPosition.source !== null) {
|
||||
// originalPosition.source has *already* been resolved against sourceMap.url
|
||||
// so is *already* as absolute as possible.
|
||||
// However, we want to ensure we output in same format as input: URL or native path
|
||||
originalPosition.source = matchStyleOfPathOrUrl(
|
||||
position.source, originalPosition.source);
|
||||
return originalPosition;
|
||||
}
|
||||
}
|
||||
|
||||
return position;
|
||||
}
|
||||
|
||||
// Parses code generated by FormatEvalOrigin(), a function inside V8:
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||
function mapEvalOrigin(origin) {
|
||||
// Most eval() calls are in this format
|
||||
var match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin);
|
||||
if (match) {
|
||||
var position = mapSourcePosition({
|
||||
source: match[2],
|
||||
line: +match[3],
|
||||
column: match[4] - 1
|
||||
});
|
||||
return 'eval at ' + match[1] + ' (' + position.source + ':' +
|
||||
position.line + ':' + (position.column + 1) + ')';
|
||||
}
|
||||
|
||||
// Parse nested eval() calls using recursion
|
||||
match = /^eval at ([^(]+) \((.+)\)$/.exec(origin);
|
||||
if (match) {
|
||||
return 'eval at ' + match[1] + ' (' + mapEvalOrigin(match[2]) + ')';
|
||||
}
|
||||
|
||||
// Make sure we still return useful information if we didn't find anything
|
||||
return origin;
|
||||
}
|
||||
|
||||
// This is copied almost verbatim from the V8 source code at
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||
// Update 2022-04-29:
|
||||
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/builtins/builtins-callsite.cc#L175-L179
|
||||
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/objects/call-site-info.cc#L795-L804
|
||||
// https://github.com/v8/v8/blob/98f6f100c5ab8e390e51422747c4ef644d5ac6f2/src/objects/call-site-info.cc#L717-L750
|
||||
// The implementation of wrapCallSite() used to just forward to the actual source
|
||||
// code of CallSite.prototype.toString but unfortunately a new release of V8
|
||||
// did something to the prototype chain and broke the shim. The only fix I
|
||||
// could find was copy/paste.
|
||||
function CallSiteToString() {
|
||||
var fileName;
|
||||
var fileLocation = "";
|
||||
if (this.isNative()) {
|
||||
fileLocation = "native";
|
||||
} else {
|
||||
fileName = this.getScriptNameOrSourceURL();
|
||||
if (!fileName && this.isEval()) {
|
||||
fileLocation = this.getEvalOrigin();
|
||||
fileLocation += ", "; // Expecting source position to follow.
|
||||
}
|
||||
|
||||
if (fileName) {
|
||||
fileLocation += fileName;
|
||||
} else {
|
||||
// Source code does not originate from a file and is not native, but we
|
||||
// can still get the source position inside the source string, e.g. in
|
||||
// an eval string.
|
||||
fileLocation += "<anonymous>";
|
||||
}
|
||||
var lineNumber = this.getLineNumber();
|
||||
if (lineNumber != null) {
|
||||
fileLocation += ":" + lineNumber;
|
||||
var columnNumber = this.getColumnNumber();
|
||||
if (columnNumber) {
|
||||
fileLocation += ":" + columnNumber;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var line = "";
|
||||
var isAsync = this.isAsync ? this.isAsync() : false;
|
||||
if(isAsync) {
|
||||
line += 'async ';
|
||||
var isPromiseAll = this.isPromiseAll ? this.isPromiseAll() : false;
|
||||
var isPromiseAny = this.isPromiseAny ? this.isPromiseAny() : false;
|
||||
if(isPromiseAny || isPromiseAll) {
|
||||
line += isPromiseAll ? 'Promise.all (index ' : 'Promise.any (index ';
|
||||
var promiseIndex = this.getPromiseIndex();
|
||||
line += promiseIndex + ')';
|
||||
}
|
||||
}
|
||||
var functionName = this.getFunctionName();
|
||||
var addSuffix = true;
|
||||
var isConstructor = this.isConstructor();
|
||||
var isMethodCall = !(this.isToplevel() || isConstructor);
|
||||
if (isMethodCall) {
|
||||
var typeName = this.getTypeName();
|
||||
// Fixes shim to be backward compatable with Node v0 to v4
|
||||
if (typeName === "[object Object]") {
|
||||
typeName = "null";
|
||||
}
|
||||
var methodName = this.getMethodName();
|
||||
if (functionName) {
|
||||
if (typeName && functionName.indexOf(typeName) != 0) {
|
||||
line += typeName + ".";
|
||||
}
|
||||
line += functionName;
|
||||
if (methodName && functionName.indexOf("." + methodName) != functionName.length - methodName.length - 1) {
|
||||
line += " [as " + methodName + "]";
|
||||
}
|
||||
} else {
|
||||
line += typeName + "." + (methodName || "<anonymous>");
|
||||
}
|
||||
} else if (isConstructor) {
|
||||
line += "new " + (functionName || "<anonymous>");
|
||||
} else if (functionName) {
|
||||
line += functionName;
|
||||
} else {
|
||||
line += fileLocation;
|
||||
addSuffix = false;
|
||||
}
|
||||
if (addSuffix) {
|
||||
line += " (" + fileLocation + ")";
|
||||
}
|
||||
return line;
|
||||
}
|
||||
|
||||
function cloneCallSite(frame) {
|
||||
var object = {};
|
||||
Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach(function(name) {
|
||||
object[name] = /^(?:is|get)/.test(name) ? function() { return frame[name].call(frame); } : frame[name];
|
||||
});
|
||||
object.toString = CallSiteToString;
|
||||
return object;
|
||||
}
|
||||
|
||||
function wrapCallSite(frame, state) {
|
||||
// provides interface backward compatibility
|
||||
if (state === undefined) {
|
||||
state = { nextPosition: null, curPosition: null }
|
||||
}
|
||||
if(frame.isNative()) {
|
||||
state.curPosition = null;
|
||||
return frame;
|
||||
}
|
||||
|
||||
// Most call sites will return the source file from getFileName(), but code
|
||||
// passed to eval() ending in "//# sourceURL=..." will return the source file
|
||||
// from getScriptNameOrSourceURL() instead
|
||||
var source = frame.getFileName() || frame.getScriptNameOrSourceURL();
|
||||
if (source) {
|
||||
// v8 does not expose its internal isWasm, etc methods, so we do this instead.
|
||||
if(source.startsWith('wasm://')) {
|
||||
state.curPosition = null;
|
||||
return frame;
|
||||
}
|
||||
|
||||
var line = frame.getLineNumber();
|
||||
var column = frame.getColumnNumber() - 1;
|
||||
|
||||
// Fix position in Node where some (internal) code is prepended.
|
||||
// See https://github.com/evanw/node-source-map-support/issues/36
|
||||
// Header removed in node at ^10.16 || >=11.11.0
|
||||
// v11 is not an LTS candidate, we can just test the one version with it.
|
||||
// Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11
|
||||
var noHeader = /^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;
|
||||
var headerLength = noHeader.test(process.version) ? 0 : 62;
|
||||
if (line === 1 && column > headerLength && !isInBrowser() && !frame.isEval()) {
|
||||
column -= headerLength;
|
||||
}
|
||||
|
||||
var position = mapSourcePosition({
|
||||
source: source,
|
||||
line: line,
|
||||
column: column
|
||||
});
|
||||
state.curPosition = position;
|
||||
frame = cloneCallSite(frame);
|
||||
var originalFunctionName = frame.getFunctionName;
|
||||
frame.getFunctionName = function() {
|
||||
if (state.nextPosition == null) {
|
||||
return originalFunctionName();
|
||||
}
|
||||
return state.nextPosition.name || originalFunctionName();
|
||||
};
|
||||
frame.getFileName = function() { return position.source; };
|
||||
frame.getLineNumber = function() { return position.line; };
|
||||
frame.getColumnNumber = function() { return position.column + 1; };
|
||||
frame.getScriptNameOrSourceURL = function() { return position.source; };
|
||||
return frame;
|
||||
}
|
||||
|
||||
// Code called using eval() needs special handling
|
||||
var origin = frame.isEval() && frame.getEvalOrigin();
|
||||
if (origin) {
|
||||
origin = mapEvalOrigin(origin);
|
||||
frame = cloneCallSite(frame);
|
||||
frame.getEvalOrigin = function() { return origin; };
|
||||
return frame;
|
||||
}
|
||||
|
||||
// If we get here then we were unable to change the source position
|
||||
return frame;
|
||||
}
|
||||
|
||||
var kIsNodeError = undefined;
|
||||
try {
|
||||
// Get a deliberate ERR_INVALID_ARG_TYPE
|
||||
// TODO is there a better way to reliably get an instance of NodeError?
|
||||
path.resolve(123);
|
||||
} catch(e) {
|
||||
const symbols = Object.getOwnPropertySymbols(e);
|
||||
const symbol = symbols.find(function (s) {return s.toString().indexOf('kIsNodeError') >= 0});
|
||||
if(symbol) kIsNodeError = symbol;
|
||||
}
|
||||
|
||||
const ErrorPrototypeToString = (err) =>Error.prototype.toString.call(err);
|
||||
|
||||
/** @param {HookState} hookState */
|
||||
function createPrepareStackTrace(hookState) {
|
||||
return prepareStackTrace;
|
||||
|
||||
// This function is part of the V8 stack trace API, for more info see:
|
||||
// https://v8.dev/docs/stack-trace-api
|
||||
function prepareStackTrace(error, stack) {
|
||||
if(!hookState.enabled) return hookState.originalValue.apply(this, arguments);
|
||||
|
||||
if (sharedData.emptyCacheBetweenOperations) {
|
||||
clearCaches();
|
||||
}
|
||||
|
||||
// node gives its own errors special treatment. Mimic that behavior
|
||||
// https://github.com/nodejs/node/blob/3cbaabc4622df1b4009b9d026a1a970bdbae6e89/lib/internal/errors.js#L118-L128
|
||||
// https://github.com/nodejs/node/pull/39182
|
||||
var errorString;
|
||||
if (kIsNodeError) {
|
||||
if(kIsNodeError in error) {
|
||||
errorString = `${error.name} [${error.code}]: ${error.message}`;
|
||||
} else {
|
||||
errorString = ErrorPrototypeToString(error);
|
||||
}
|
||||
} else {
|
||||
var name = error.name || 'Error';
|
||||
var message = error.message || '';
|
||||
errorString = message ? name + ": " + message : name;
|
||||
}
|
||||
|
||||
var state = { nextPosition: null, curPosition: null };
|
||||
var processedStack = [];
|
||||
for (var i = stack.length - 1; i >= 0; i--) {
|
||||
processedStack.push('\n at ' + wrapCallSite(stack[i], state));
|
||||
state.nextPosition = state.curPosition;
|
||||
}
|
||||
state.curPosition = state.nextPosition = null;
|
||||
return errorString + processedStack.reverse().join('');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate position and snippet of original source with pointer
|
||||
function getErrorSource(error) {
|
||||
var match = /\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(error.stack);
|
||||
if (match) {
|
||||
var source = match[1];
|
||||
var line = +match[2];
|
||||
var column = +match[3];
|
||||
|
||||
// Support the inline sourceContents inside the source map
|
||||
var contents = getFileContentsCache(source);
|
||||
|
||||
const sourceAsPath = tryFileURLToPath(source);
|
||||
|
||||
// Support files on disk
|
||||
if (!contents && fs && fs.existsSync(sourceAsPath)) {
|
||||
try {
|
||||
contents = fs.readFileSync(sourceAsPath, 'utf8');
|
||||
} catch (er) {
|
||||
contents = '';
|
||||
}
|
||||
}
|
||||
|
||||
// Format the line from the original source code like node does
|
||||
if (contents) {
|
||||
var code = contents.split(/(?:\r\n|\r|\n)/)[line - 1];
|
||||
if (code) {
|
||||
return source + ':' + line + '\n' + code + '\n' +
|
||||
new Array(column).join(' ') + '^';
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function printFatalErrorUponExit (error) {
|
||||
var source = getErrorSource(error);
|
||||
|
||||
// Ensure error is printed synchronously and not truncated
|
||||
if (process.stderr._handle && process.stderr._handle.setBlocking) {
|
||||
process.stderr._handle.setBlocking(true);
|
||||
}
|
||||
|
||||
if (source) {
|
||||
console.error(source);
|
||||
}
|
||||
|
||||
// Matches node's behavior for colorized output
|
||||
console.error(
|
||||
util.inspect(error, {
|
||||
customInspect: false,
|
||||
colors: process.stderr.isTTY
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function shimEmitUncaughtException () {
|
||||
const originalValue = process.emit;
|
||||
var hook = sharedData.processEmitHook = {
|
||||
enabled: true,
|
||||
originalValue,
|
||||
installedValue: undefined
|
||||
};
|
||||
var isTerminatingDueToFatalException = false;
|
||||
var fatalException;
|
||||
|
||||
process.emit = sharedData.processEmitHook.installedValue = function (type) {
|
||||
const hadListeners = originalValue.apply(this, arguments);
|
||||
if(hook.enabled) {
|
||||
if (type === 'uncaughtException' && !hadListeners) {
|
||||
isTerminatingDueToFatalException = true;
|
||||
fatalException = arguments[1];
|
||||
process.exit(1);
|
||||
}
|
||||
if (type === 'exit' && isTerminatingDueToFatalException) {
|
||||
printFatalErrorUponExit(fatalException);
|
||||
}
|
||||
}
|
||||
return hadListeners;
|
||||
};
|
||||
}
|
||||
|
||||
var originalRetrieveFileHandlers = sharedData.retrieveFileHandlers.slice(0);
|
||||
var originalRetrieveMapHandlers = sharedData.retrieveMapHandlers.slice(0);
|
||||
|
||||
exports.wrapCallSite = wrapCallSite;
|
||||
exports.getErrorSource = getErrorSource;
|
||||
exports.mapSourcePosition = mapSourcePosition;
|
||||
exports.retrieveSourceMap = retrieveSourceMap;
|
||||
|
||||
exports.install = function(options) {
|
||||
options = options || {};
|
||||
|
||||
if (options.environment) {
|
||||
environment = options.environment;
|
||||
if (["node", "browser", "auto"].indexOf(environment) === -1) {
|
||||
throw new Error("environment " + environment + " was unknown. Available options are {auto, browser, node}")
|
||||
}
|
||||
}
|
||||
|
||||
// Use dynamicRequire to avoid including in browser bundles
|
||||
var Module = dynamicRequire(module, 'module');
|
||||
|
||||
// Redirect subsequent imports of "source-map-support"
|
||||
// to this package
|
||||
const {redirectConflictingLibrary = true, onConflictingLibraryRedirect} = options;
|
||||
if(redirectConflictingLibrary) {
|
||||
if (!sharedData.moduleResolveFilenameHook) {
|
||||
const originalValue = Module._resolveFilename;
|
||||
const moduleResolveFilenameHook = sharedData.moduleResolveFilenameHook = {
|
||||
enabled: true,
|
||||
originalValue,
|
||||
installedValue: undefined,
|
||||
}
|
||||
Module._resolveFilename = sharedData.moduleResolveFilenameHook.installedValue = function (request, parent, isMain, options) {
|
||||
if (moduleResolveFilenameHook.enabled) {
|
||||
// Match all source-map-support entrypoints: source-map-support, source-map-support/register
|
||||
let requestRedirect;
|
||||
if (request === 'source-map-support') {
|
||||
requestRedirect = './';
|
||||
} else if (request === 'source-map-support/register') {
|
||||
requestRedirect = './register';
|
||||
}
|
||||
|
||||
if (requestRedirect !== undefined) {
|
||||
const newRequest = require.resolve(requestRedirect);
|
||||
for (const cb of sharedData.onConflictingLibraryRedirectArr) {
|
||||
cb(request, parent, isMain, options, newRequest);
|
||||
}
|
||||
request = newRequest;
|
||||
}
|
||||
}
|
||||
|
||||
return originalValue.call(this, request, parent, isMain, options);
|
||||
}
|
||||
}
|
||||
if (onConflictingLibraryRedirect) {
|
||||
sharedData.onConflictingLibraryRedirectArr.push(onConflictingLibraryRedirect);
|
||||
}
|
||||
}
|
||||
|
||||
// Allow sources to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveFile) {
|
||||
if (options.overrideRetrieveFile) {
|
||||
sharedData.retrieveFileHandlers.length = 0;
|
||||
}
|
||||
|
||||
sharedData.retrieveFileHandlers.unshift(options.retrieveFile);
|
||||
}
|
||||
|
||||
// Allow source maps to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveSourceMap) {
|
||||
if (options.overrideRetrieveSourceMap) {
|
||||
sharedData.retrieveMapHandlers.length = 0;
|
||||
}
|
||||
|
||||
sharedData.retrieveMapHandlers.unshift(options.retrieveSourceMap);
|
||||
}
|
||||
|
||||
// Support runtime transpilers that include inline source maps
|
||||
if (options.hookRequire && !isInBrowser()) {
|
||||
var $compile = Module.prototype._compile;
|
||||
|
||||
if (!$compile.__sourceMapSupport) {
|
||||
Module.prototype._compile = function(content, filename) {
|
||||
setFileContentsCache(filename, content);
|
||||
setSourceMapCache(filename, undefined);
|
||||
return $compile.call(this, content, filename);
|
||||
};
|
||||
|
||||
Module.prototype._compile.__sourceMapSupport = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Configure options
|
||||
if (!sharedData.emptyCacheBetweenOperations) {
|
||||
sharedData.emptyCacheBetweenOperations = 'emptyCacheBetweenOperations' in options ?
|
||||
options.emptyCacheBetweenOperations : false;
|
||||
}
|
||||
|
||||
|
||||
// Install the error reformatter
|
||||
if (!sharedData.errorPrepareStackTraceHook) {
|
||||
const originalValue = Error.prepareStackTrace;
|
||||
sharedData.errorPrepareStackTraceHook = {
|
||||
enabled: true,
|
||||
originalValue,
|
||||
installedValue: undefined
|
||||
};
|
||||
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.installedValue = createPrepareStackTrace(sharedData.errorPrepareStackTraceHook);
|
||||
}
|
||||
|
||||
if (!sharedData.processEmitHook) {
|
||||
var installHandler = 'handleUncaughtExceptions' in options ?
|
||||
options.handleUncaughtExceptions : true;
|
||||
|
||||
// Do not override 'uncaughtException' with our own handler in Node.js
|
||||
// Worker threads. Workers pass the error to the main thread as an event,
|
||||
// rather than printing something to stderr and exiting.
|
||||
try {
|
||||
// We need to use `dynamicRequire` because `require` on it's own will be optimized by WebPack/Browserify.
|
||||
var worker_threads = dynamicRequire(module, 'worker_threads');
|
||||
if (worker_threads.isMainThread === false) {
|
||||
installHandler = false;
|
||||
}
|
||||
} catch(e) {}
|
||||
|
||||
// Provide the option to not install the uncaught exception handler. This is
|
||||
// to support other uncaught exception handlers (in test frameworks, for
|
||||
// example). If this handler is not installed and there are no other uncaught
|
||||
// exception handlers, uncaught exceptions will be caught by node's built-in
|
||||
// exception handler and the process will still be terminated. However, the
|
||||
// generated JavaScript code will be shown above the stack trace instead of
|
||||
// the original source code.
|
||||
if (installHandler && hasGlobalProcessEventEmitter()) {
|
||||
shimEmitUncaughtException();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
exports.uninstall = function() {
|
||||
if(sharedData.processEmitHook) {
|
||||
// Disable behavior
|
||||
sharedData.processEmitHook.enabled = false;
|
||||
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
||||
if(process.emit === sharedData.processEmitHook.installedValue) {
|
||||
process.emit = sharedData.processEmitHook.originalValue;
|
||||
}
|
||||
sharedData.processEmitHook = undefined;
|
||||
}
|
||||
if(sharedData.errorPrepareStackTraceHook) {
|
||||
// Disable behavior
|
||||
sharedData.errorPrepareStackTraceHook.enabled = false;
|
||||
// If possible or necessary, remove our hook function.
|
||||
// In vanilla environments, prepareStackTrace is `undefined`.
|
||||
// We cannot delegate to `undefined` the way we can to a function w/`.apply()`; our only option is to remove the function.
|
||||
// If we are the *first* hook installed, and another was installed on top of us, we have no choice but to remove both.
|
||||
if(Error.prepareStackTrace === sharedData.errorPrepareStackTraceHook.installedValue || typeof sharedData.errorPrepareStackTraceHook.originalValue !== 'function') {
|
||||
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.originalValue;
|
||||
}
|
||||
sharedData.errorPrepareStackTraceHook = undefined;
|
||||
}
|
||||
if (sharedData.moduleResolveFilenameHook) {
|
||||
// Disable behavior
|
||||
sharedData.moduleResolveFilenameHook.enabled = false;
|
||||
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
||||
var Module = dynamicRequire(module, 'module');
|
||||
if(Module._resolveFilename === sharedData.moduleResolveFilenameHook.installedValue) {
|
||||
Module._resolveFilename = sharedData.moduleResolveFilenameHook.originalValue;
|
||||
}
|
||||
sharedData.moduleResolveFilenameHook = undefined;
|
||||
}
|
||||
sharedData.onConflictingLibraryRedirectArr.length = 0;
|
||||
}
|
||||
|
||||
exports.resetRetrieveHandlers = function() {
|
||||
sharedData.retrieveFileHandlers.length = 0;
|
||||
sharedData.retrieveMapHandlers.length = 0;
|
||||
}
|
||||
21
node_modules/@tsconfig/node10/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node10/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
38
node_modules/@tsconfig/node10/README.md
generated
vendored
Normal file
38
node_modules/@tsconfig/node10/README.md
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
### A base TSConfig for working with Node 10.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node10
|
||||
yarn add --dev @tsconfig/node10
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node10/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2018"],
|
||||
"module": "commonjs",
|
||||
"target": "es2018",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node10.json).
|
||||
15
node_modules/@tsconfig/node10/package.json
generated
vendored
Normal file
15
node_modules/@tsconfig/node10/package.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@tsconfig/node10",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tsconfig/bases.git",
|
||||
"directory": "bases"
|
||||
},
|
||||
"license": "MIT",
|
||||
"description": "A base TSConfig for working with Node 10.",
|
||||
"keywords": [
|
||||
"tsconfig",
|
||||
"node10"
|
||||
],
|
||||
"version": "1.0.11"
|
||||
}
|
||||
14
node_modules/@tsconfig/node10/tsconfig.json
generated
vendored
Normal file
14
node_modules/@tsconfig/node10/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2018"],
|
||||
"module": "commonjs",
|
||||
"target": "es2018",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
21
node_modules/@tsconfig/node12/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node12/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
40
node_modules/@tsconfig/node12/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node12/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
### A base TSConfig for working with Node 12.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node12
|
||||
yarn add --dev @tsconfig/node12
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node12/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 12",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2019", "es2020.promise", "es2020.bigint", "es2020.string"],
|
||||
"module": "commonjs",
|
||||
"target": "es2019",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node12.json).
|
||||
1
node_modules/@tsconfig/node12/package.json
generated
vendored
Normal file
1
node_modules/@tsconfig/node12/package.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"name":"@tsconfig/node12","repository":{"type":"git","url":"https://github.com/tsconfig/bases.git","directory":"bases"},"license":"MIT","description":"A base TSConfig for working with Node 12.","keywords":["tsconfig","node12"],"version":"1.0.11"}
|
||||
16
node_modules/@tsconfig/node12/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node12/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 12",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2019", "es2020.promise", "es2020.bigint", "es2020.string"],
|
||||
"module": "commonjs",
|
||||
"target": "es2019",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
21
node_modules/@tsconfig/node14/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node14/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
40
node_modules/@tsconfig/node14/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node14/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
### A base TSConfig for working with Node 14.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node14
|
||||
yarn add --dev @tsconfig/node14
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node14/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 14",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2020"],
|
||||
"module": "commonjs",
|
||||
"target": "es2020",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node14.json).
|
||||
1
node_modules/@tsconfig/node14/package.json
generated
vendored
Normal file
1
node_modules/@tsconfig/node14/package.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"name":"@tsconfig/node14","repository":{"type":"git","url":"https://github.com/tsconfig/bases.git","directory":"bases"},"license":"MIT","description":"A base TSConfig for working with Node 14.","keywords":["tsconfig","node14"],"version":"1.0.3"}
|
||||
16
node_modules/@tsconfig/node14/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node14/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 14",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2020"],
|
||||
"module": "commonjs",
|
||||
"target": "es2020",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
21
node_modules/@tsconfig/node16/LICENSE
generated
vendored
Normal file
21
node_modules/@tsconfig/node16/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
40
node_modules/@tsconfig/node16/README.md
generated
vendored
Normal file
40
node_modules/@tsconfig/node16/README.md
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
### A base TSConfig for working with Node 16.
|
||||
|
||||
Add the package to your `"devDependencies"`:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @tsconfig/node16
|
||||
yarn add --dev @tsconfig/node16
|
||||
```
|
||||
|
||||
Add to your `tsconfig.json`:
|
||||
|
||||
```json
|
||||
"extends": "@tsconfig/node16/tsconfig.json"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The `tsconfig.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 16",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2021"],
|
||||
"module": "Node16",
|
||||
"target": "es2021",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
You can find the [code here](https://github.com/tsconfig/bases/blob/master/bases/node16.json).
|
||||
15
node_modules/@tsconfig/node16/package.json
generated
vendored
Normal file
15
node_modules/@tsconfig/node16/package.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "@tsconfig/node16",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tsconfig/bases.git",
|
||||
"directory": "bases"
|
||||
},
|
||||
"license": "MIT",
|
||||
"description": "A base TSConfig for working with Node 16.",
|
||||
"keywords": [
|
||||
"tsconfig",
|
||||
"node16"
|
||||
],
|
||||
"version": "1.0.4"
|
||||
}
|
||||
16
node_modules/@tsconfig/node16/tsconfig.json
generated
vendored
Normal file
16
node_modules/@tsconfig/node16/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node 16",
|
||||
|
||||
"compilerOptions": {
|
||||
"lib": ["es2021"],
|
||||
"module": "Node16",
|
||||
"target": "es2021",
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node"
|
||||
}
|
||||
}
|
||||
21
node_modules/@types/bcryptjs/LICENSE
generated
vendored
Normal file
21
node_modules/@types/bcryptjs/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
15
node_modules/@types/bcryptjs/README.md
generated
vendored
Normal file
15
node_modules/@types/bcryptjs/README.md
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# Installation
|
||||
> `npm install --save @types/bcryptjs`
|
||||
|
||||
# Summary
|
||||
This package contains type definitions for bcryptjs (https://github.com/dcodeIO/bcrypt.js).
|
||||
|
||||
# Details
|
||||
Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/bcryptjs.
|
||||
|
||||
### Additional Details
|
||||
* Last updated: Mon, 06 Nov 2023 22:41:04 GMT
|
||||
* Dependencies: none
|
||||
|
||||
# Credits
|
||||
These definitions were written by [Joshua Filby](https://github.com/Joshua-F), [Rafael Kraut](https://github.com/RafaelKr), and [Branislav Holý](https://github.com/branoholy).
|
||||
124
node_modules/@types/bcryptjs/index.d.ts
generated
vendored
Normal file
124
node_modules/@types/bcryptjs/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
/**
|
||||
* Sets the pseudo random number generator to use as a fallback if neither node's crypto module nor the Web Crypto API is available.
|
||||
* Please note: It is highly important that the PRNG used is cryptographically secure and that it is seeded properly!
|
||||
* @param random Function taking the number of bytes to generate as its sole argument, returning the corresponding array of cryptographically secure random byte values.
|
||||
*/
|
||||
export declare function setRandomFallback(random: (random: number) => number[]): void;
|
||||
|
||||
/**
|
||||
* Synchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @return Resulting salt
|
||||
* @throws If a random fallback is required but not set
|
||||
*/
|
||||
export declare function genSaltSync(rounds?: number): string;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @return Promise with resulting salt, if callback has been omitted
|
||||
*/
|
||||
export declare function genSalt(rounds?: number): Promise<string>;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param callback Callback receiving the error, if any, and the resulting salt
|
||||
*/
|
||||
export declare function genSalt(callback: (err: Error | null, salt: string) => void): void;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @param callback Callback receiving the error, if any, and the resulting salt
|
||||
*/
|
||||
export declare function genSalt(rounds: number, callback: (err: Error | null, salt: string) => void): void;
|
||||
|
||||
/**
|
||||
* Synchronously generates a hash for the given string.
|
||||
* @param s String to hash
|
||||
* @param salt Salt length to generate or salt to use, default to 10
|
||||
* @return Resulting hash
|
||||
*/
|
||||
export declare function hashSync(s: string, salt?: number | string): string;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a hash for the given string.
|
||||
* @param s String to hash
|
||||
* @param salt Salt length to generate or salt to use
|
||||
* @return Promise with resulting hash, if callback has been omitted
|
||||
*/
|
||||
export declare function hash(s: string, salt: number | string): Promise<string>;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a hash for the given string.
|
||||
* @param s String to hash
|
||||
* @param salt Salt length to generate or salt to use
|
||||
* @param callback Callback receiving the error, if any, and the resulting hash
|
||||
* @param progressCallback Callback successively called with the percentage of rounds completed (0.0 - 1.0), maximally once per MAX_EXECUTION_TIME = 100 ms.
|
||||
*/
|
||||
export declare function hash(
|
||||
s: string,
|
||||
salt: number | string,
|
||||
callback?: (err: Error | null, hash: string) => void,
|
||||
progressCallback?: (percent: number) => void,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Synchronously tests a string against a hash.
|
||||
* @param s String to compare
|
||||
* @param hash Hash to test against
|
||||
* @return true if matching, otherwise false
|
||||
*/
|
||||
export declare function compareSync(s: string, hash: string): boolean;
|
||||
|
||||
/**
|
||||
* Asynchronously compares the given data against the given hash.
|
||||
* @param s Data to compare
|
||||
* @param hash Data to be compared to
|
||||
* @return Promise, if callback has been omitted
|
||||
*/
|
||||
export declare function compare(s: string, hash: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Asynchronously compares the given data against the given hash.
|
||||
* @param s Data to compare
|
||||
* @param hash Data to be compared to
|
||||
* @param callback Callback receiving the error, if any, otherwise the result
|
||||
* @param progressCallback Callback successively called with the percentage of rounds completed (0.0 - 1.0), maximally once per MAX_EXECUTION_TIME = 100 ms.
|
||||
*/
|
||||
export declare function compare(
|
||||
s: string,
|
||||
hash: string,
|
||||
callback?: (err: Error | null, success: boolean) => void,
|
||||
progressCallback?: (percent: number) => void,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Gets the number of rounds used to encrypt the specified hash.
|
||||
* @param hash Hash to extract the used number of rounds from
|
||||
* @return Number of rounds used
|
||||
*/
|
||||
export declare function getRounds(hash: string): number;
|
||||
|
||||
/**
|
||||
* Gets the salt portion from a hash. Does not validate the hash.
|
||||
* @param hash Hash to extract the salt from
|
||||
* @return Extracted salt part
|
||||
*/
|
||||
export declare function getSalt(hash: string): string;
|
||||
|
||||
/**
|
||||
* Encodes a byte array to base64 with up to len bytes of input, using the custom bcrypt alphabet.
|
||||
* @function
|
||||
* @param b Byte array
|
||||
* @param len Maximum input length
|
||||
*/
|
||||
export declare function encodeBase64(b: Readonly<ArrayLike<number>>, len: number): string;
|
||||
|
||||
/**
|
||||
* Decodes a base64 encoded string to up to len bytes of output, using the custom bcrypt alphabet.
|
||||
* @function
|
||||
* @param s String to decode
|
||||
* @param len Maximum output length
|
||||
*/
|
||||
export declare function decodeBase64(s: string, len: number): number[];
|
||||
35
node_modules/@types/bcryptjs/package.json
generated
vendored
Normal file
35
node_modules/@types/bcryptjs/package.json
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "@types/bcryptjs",
|
||||
"version": "2.4.6",
|
||||
"description": "TypeScript definitions for bcryptjs",
|
||||
"homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/bcryptjs",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Joshua Filby",
|
||||
"githubUsername": "Joshua-F",
|
||||
"url": "https://github.com/Joshua-F"
|
||||
},
|
||||
{
|
||||
"name": "Rafael Kraut",
|
||||
"githubUsername": "RafaelKr",
|
||||
"url": "https://github.com/RafaelKr"
|
||||
},
|
||||
{
|
||||
"name": "Branislav Holý",
|
||||
"githubUsername": "branoholy",
|
||||
"url": "https://github.com/branoholy"
|
||||
}
|
||||
],
|
||||
"main": "",
|
||||
"types": "index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git",
|
||||
"directory": "types/bcryptjs"
|
||||
},
|
||||
"scripts": {},
|
||||
"dependencies": {},
|
||||
"typesPublisherContentHash": "b39980d6e9840f900b6f594a24d8f24dcce0864b2e9b631ed39ce6ef5b690daa",
|
||||
"typeScriptVersion": "4.5"
|
||||
}
|
||||
199
node_modules/acorn-walk/CHANGELOG.md
generated
vendored
Normal file
199
node_modules/acorn-walk/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,199 @@
|
||||
## 8.3.4 (2024-09-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Walk SwitchCase nodes as separate nodes.
|
||||
|
||||
## 8.3.3 (2024-01-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make acorn a dependency because acorn-walk uses the types from that package.
|
||||
|
||||
## 8.3.2 (2024-01-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Add missing type for `findNodeBefore`.
|
||||
|
||||
## 8.3.1 (2023-12-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Add `Function` and `Class` to the `AggregateType` type, so that they can be used in walkers without raising a type error.
|
||||
|
||||
Visitor functions are now called in such a way that their `this` refers to the object they are part of.
|
||||
|
||||
## 8.3.0 (2023-10-26)
|
||||
|
||||
### New features
|
||||
|
||||
Use a set of new, much more precise, TypeScript types.
|
||||
|
||||
## 8.2.0 (2021-09-06)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for walking ES2022 class static blocks.
|
||||
|
||||
## 8.1.1 (2021-06-29)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Include `base` in the type declarations.
|
||||
|
||||
## 8.1.0 (2021-04-24)
|
||||
|
||||
### New features
|
||||
|
||||
Support node types for class fields and private methods.
|
||||
|
||||
## 8.0.2 (2021-01-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Adjust package.json to work with Node 12.16.0 and 13.0-13.6.
|
||||
|
||||
## 8.0.0 (2021-01-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where `full` and `fullAncestor` would skip nodes with overridden types.
|
||||
|
||||
## 8.0.0 (2020-08-12)
|
||||
|
||||
### New features
|
||||
|
||||
The package can now be loaded directly as an ECMAScript module in node 13+.
|
||||
|
||||
## 7.2.0 (2020-06-17)
|
||||
|
||||
### New features
|
||||
|
||||
Support optional chaining and nullish coalescing.
|
||||
|
||||
Support `import.meta`.
|
||||
|
||||
Add support for `export * as ns from "source"`.
|
||||
|
||||
## 7.1.1 (2020-02-13)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Clean up the type definitions to actually work well with the main parser.
|
||||
|
||||
## 7.1.0 (2020-02-11)
|
||||
|
||||
### New features
|
||||
|
||||
Add a TypeScript definition file for the library.
|
||||
|
||||
## 7.0.0 (2017-08-12)
|
||||
|
||||
### New features
|
||||
|
||||
Support walking `ImportExpression` nodes.
|
||||
|
||||
## 6.2.0 (2017-07-04)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for `Import` nodes.
|
||||
|
||||
## 6.1.0 (2018-09-28)
|
||||
|
||||
### New features
|
||||
|
||||
The walker now walks `TemplateElement` nodes.
|
||||
|
||||
## 6.0.1 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bad "main" field in package.json.
|
||||
|
||||
## 6.0.0 (2018-09-14)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
This is now a separate package, `acorn-walk`, rather than part of the main `acorn` package.
|
||||
|
||||
The `ScopeBody` and `ScopeExpression` meta-node-types are no longer supported.
|
||||
|
||||
## 5.7.1 (2018-06-15)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make sure the walker and bin files are rebuilt on release (the previous release didn't get the up-to-date versions).
|
||||
|
||||
## 5.7.0 (2018-06-15)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix crash in walker when walking a binding-less catch node.
|
||||
|
||||
## 5.6.2 (2018-06-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
In the walker, go back to allowing the `baseVisitor` argument to be null to default to the default base everywhere.
|
||||
|
||||
## 5.6.1 (2018-06-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix regression when passing `null` as fourth argument to `walk.recursive`.
|
||||
|
||||
## 5.6.0 (2018-05-31)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug in the walker that caused a crash when walking an object pattern spread.
|
||||
|
||||
## 5.5.1 (2018-03-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix regression in walker causing property values in object patterns to be walked as expressions.
|
||||
|
||||
## 5.5.0 (2018-02-27)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Support object spread in the AST walker.
|
||||
|
||||
## 5.4.1 (2018-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
5.4.0 somehow accidentally included an old version of walk.js.
|
||||
|
||||
## 5.2.0 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The `full` and `fullAncestor` walkers no longer visit nodes multiple times.
|
||||
|
||||
## 5.1.0 (2017-07-05)
|
||||
|
||||
### New features
|
||||
|
||||
New walker functions `full` and `fullAncestor`.
|
||||
|
||||
## 3.2.0 (2016-06-07)
|
||||
|
||||
### New features
|
||||
|
||||
Make it possible to use `visit.ancestor` with a walk state.
|
||||
|
||||
## 3.1.0 (2016-04-18)
|
||||
|
||||
### New features
|
||||
|
||||
The walker now allows defining handlers for `CatchClause` nodes.
|
||||
|
||||
## 2.5.2 (2015-10-27)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix bug where the walker walked an exported `let` statement as an expression.
|
||||
21
node_modules/acorn-walk/LICENSE
generated
vendored
Normal file
21
node_modules/acorn-walk/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (C) 2012-2020 by various contributors (see AUTHORS)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
124
node_modules/acorn-walk/README.md
generated
vendored
Normal file
124
node_modules/acorn-walk/README.md
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
# Acorn AST walker
|
||||
|
||||
An abstract syntax tree walker for the
|
||||
[ESTree](https://github.com/estree/estree) format.
|
||||
|
||||
## Community
|
||||
|
||||
Acorn is open source software released under an
|
||||
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn-walk/LICENSE).
|
||||
|
||||
You are welcome to
|
||||
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||
requests on [github](https://github.com/acornjs/acorn).
|
||||
|
||||
## Installation
|
||||
|
||||
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
npm install acorn-walk
|
||||
```
|
||||
|
||||
Alternately, you can download the source and build acorn yourself:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/acornjs/acorn.git
|
||||
cd acorn
|
||||
npm install
|
||||
```
|
||||
|
||||
## Interface
|
||||
|
||||
An algorithm for recursing through a syntax tree is stored as an
|
||||
object, with a property for each tree node type holding a function
|
||||
that will recurse through such a node. There are several ways to run
|
||||
such a walker.
|
||||
|
||||
**simple**`(node, visitors, base, state)` does a 'simple' walk over a
|
||||
tree. `node` should be the AST node to walk, and `visitors` an object
|
||||
with properties whose names correspond to node types in the [ESTree
|
||||
spec](https://github.com/estree/estree). The properties should contain
|
||||
functions that will be called with the node object and, if applicable
|
||||
the state at that point. The last two arguments are optional. `base`
|
||||
is a walker algorithm, and `state` is a start state. The default
|
||||
walker will simply visit all statements and expressions and not
|
||||
produce a meaningful state. (An example of a use of state is to track
|
||||
scope at each point in the tree.)
|
||||
|
||||
```js
|
||||
const acorn = require("acorn")
|
||||
const walk = require("acorn-walk")
|
||||
|
||||
walk.simple(acorn.parse("let x = 10"), {
|
||||
Literal(node) {
|
||||
console.log(`Found a literal: ${node.value}`)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
**ancestor**`(node, visitors, base, state)` does a 'simple' walk over
|
||||
a tree, building up an array of ancestor nodes (including the current node)
|
||||
and passing the array to the callbacks as a third parameter.
|
||||
|
||||
```js
|
||||
const acorn = require("acorn")
|
||||
const walk = require("acorn-walk")
|
||||
|
||||
walk.ancestor(acorn.parse("foo('hi')"), {
|
||||
Literal(_node, _state, ancestors) {
|
||||
console.log("This literal's ancestors are:", ancestors.map(n => n.type))
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
**recursive**`(node, state, functions, base)` does a 'recursive'
|
||||
walk, where the walker functions are responsible for continuing the
|
||||
walk on the child nodes of their target node. `state` is the start
|
||||
state, and `functions` should contain an object that maps node types
|
||||
to walker functions. Such functions are called with `(node, state, c)`
|
||||
arguments, and can cause the walk to continue on a sub-node by calling
|
||||
the `c` argument on it with `(node, state)` arguments. The optional
|
||||
`base` argument provides the fallback walker functions for node types
|
||||
that aren't handled in the `functions` object. If not given, the
|
||||
default walkers will be used.
|
||||
|
||||
**make**`(functions, base)` builds a new walker object by using the
|
||||
walker functions in `functions` and filling in the missing ones by
|
||||
taking defaults from `base`.
|
||||
|
||||
**full**`(node, callback, base, state)` does a 'full' walk over a
|
||||
tree, calling the callback with the arguments (node, state, type) for
|
||||
each node
|
||||
|
||||
**fullAncestor**`(node, callback, base, state)` does a 'full' walk
|
||||
over a tree, building up an array of ancestor nodes (including the
|
||||
current node) and passing the array to the callbacks as a third
|
||||
parameter.
|
||||
|
||||
```js
|
||||
const acorn = require("acorn")
|
||||
const walk = require("acorn-walk")
|
||||
|
||||
walk.full(acorn.parse("1 + 1"), node => {
|
||||
console.log(`There's a ${node.type} node at ${node.ch}`)
|
||||
})
|
||||
```
|
||||
|
||||
**findNodeAt**`(node, start, end, test, base, state)` tries to locate
|
||||
a node in a tree at the given start and/or end offsets, which
|
||||
satisfies the predicate `test`. `start` and `end` can be either `null`
|
||||
(as wildcard) or a number. `test` may be a string (indicating a node
|
||||
type) or a function that takes `(nodeType, node)` arguments and
|
||||
returns a boolean indicating whether this node is interesting. `base`
|
||||
and `state` are optional, and can be used to specify a custom walker.
|
||||
Nodes are tested from inner to outer, so if two nodes match the
|
||||
boundaries, the inner one will be preferred.
|
||||
|
||||
**findNodeAround**`(node, pos, test, base, state)` is a lot like
|
||||
`findNodeAt`, but will match any node that exists 'around' (spanning)
|
||||
the given position.
|
||||
|
||||
**findNodeAfter**`(node, pos, test, base, state)` is similar to
|
||||
`findNodeAround`, but will match all nodes *after* the given position
|
||||
(testing outer nodes before inner nodes).
|
||||
177
node_modules/acorn-walk/dist/walk.d.mts
generated
vendored
Normal file
177
node_modules/acorn-walk/dist/walk.d.mts
generated
vendored
Normal file
@ -0,0 +1,177 @@
|
||||
import * as acorn from "acorn"
|
||||
|
||||
export type FullWalkerCallback<TState> = (
|
||||
node: acorn.Node,
|
||||
state: TState,
|
||||
type: string
|
||||
) => void
|
||||
|
||||
export type FullAncestorWalkerCallback<TState> = (
|
||||
node: acorn.Node,
|
||||
state: TState,
|
||||
ancestors: acorn.Node[],
|
||||
type: string
|
||||
) => void
|
||||
|
||||
type AggregateType = {
|
||||
Expression: acorn.Expression,
|
||||
Statement: acorn.Statement,
|
||||
Function: acorn.Function,
|
||||
Class: acorn.Class,
|
||||
Pattern: acorn.Pattern,
|
||||
ForInit: acorn.VariableDeclaration | acorn.Expression
|
||||
}
|
||||
|
||||
export type SimpleVisitors<TState> = {
|
||||
[type in acorn.AnyNode["type"]]?: (node: Extract<acorn.AnyNode, { type: type }>, state: TState) => void
|
||||
} & {
|
||||
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState) => void
|
||||
}
|
||||
|
||||
export type AncestorVisitors<TState> = {
|
||||
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, ancestors: acorn.Node[]
|
||||
) => void
|
||||
} & {
|
||||
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, ancestors: acorn.Node[]) => void
|
||||
}
|
||||
|
||||
export type WalkerCallback<TState> = (node: acorn.Node, state: TState) => void
|
||||
|
||||
export type RecursiveVisitors<TState> = {
|
||||
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, callback: WalkerCallback<TState>) => void
|
||||
} & {
|
||||
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, callback: WalkerCallback<TState>) => void
|
||||
}
|
||||
|
||||
export type FindPredicate = (type: string, node: acorn.Node) => boolean
|
||||
|
||||
export interface Found<TState> {
|
||||
node: acorn.Node,
|
||||
state: TState
|
||||
}
|
||||
|
||||
/**
|
||||
* does a 'simple' walk over a tree
|
||||
* @param node the AST node to walk
|
||||
* @param visitors an object with properties whose names correspond to node types in the {@link https://github.com/estree/estree | ESTree spec}. The properties should contain functions that will be called with the node object and, if applicable the state at that point.
|
||||
* @param base a walker algorithm
|
||||
* @param state a start state. The default walker will simply visit all statements and expressions and not produce a meaningful state. (An example of a use of state is to track scope at each point in the tree.)
|
||||
*/
|
||||
export function simple<TState>(
|
||||
node: acorn.Node,
|
||||
visitors: SimpleVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||
* @param node
|
||||
* @param visitors
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function ancestor<TState>(
|
||||
node: acorn.Node,
|
||||
visitors: AncestorVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
|
||||
* @param node
|
||||
* @param state the start state
|
||||
* @param functions contain an object that maps node types to walker functions
|
||||
* @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used.
|
||||
*/
|
||||
export function recursive<TState>(
|
||||
node: acorn.Node,
|
||||
state: TState,
|
||||
functions: RecursiveVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node
|
||||
* @param node
|
||||
* @param callback
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function full<TState>(
|
||||
node: acorn.Node,
|
||||
callback: FullWalkerCallback<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||
* @param node
|
||||
* @param callback
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function fullAncestor<TState>(
|
||||
node: acorn.Node,
|
||||
callback: FullAncestorWalkerCallback<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}.
|
||||
* @param functions
|
||||
* @param base
|
||||
*/
|
||||
export function make<TState>(
|
||||
functions: RecursiveVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>
|
||||
): RecursiveVisitors<TState>
|
||||
|
||||
/**
|
||||
* tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred.
|
||||
* @param node
|
||||
* @param start
|
||||
* @param end
|
||||
* @param type
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function findNodeAt<TState>(
|
||||
node: acorn.Node,
|
||||
start: number | undefined,
|
||||
end?: number | undefined,
|
||||
type?: FindPredicate | string,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined
|
||||
|
||||
/**
|
||||
* like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position.
|
||||
* @param node
|
||||
* @param start
|
||||
* @param type
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function findNodeAround<TState>(
|
||||
node: acorn.Node,
|
||||
start: number | undefined,
|
||||
type?: FindPredicate | string,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined
|
||||
|
||||
/**
|
||||
* Find the outermost matching node after a given position.
|
||||
*/
|
||||
export const findNodeAfter: typeof findNodeAround
|
||||
|
||||
/**
|
||||
* Find the outermost matching node before a given position.
|
||||
*/
|
||||
export const findNodeBefore: typeof findNodeAround
|
||||
|
||||
export const base: RecursiveVisitors<any>
|
||||
177
node_modules/acorn-walk/dist/walk.d.ts
generated
vendored
Normal file
177
node_modules/acorn-walk/dist/walk.d.ts
generated
vendored
Normal file
@ -0,0 +1,177 @@
|
||||
import * as acorn from "acorn"
|
||||
|
||||
export type FullWalkerCallback<TState> = (
|
||||
node: acorn.Node,
|
||||
state: TState,
|
||||
type: string
|
||||
) => void
|
||||
|
||||
export type FullAncestorWalkerCallback<TState> = (
|
||||
node: acorn.Node,
|
||||
state: TState,
|
||||
ancestors: acorn.Node[],
|
||||
type: string
|
||||
) => void
|
||||
|
||||
type AggregateType = {
|
||||
Expression: acorn.Expression,
|
||||
Statement: acorn.Statement,
|
||||
Function: acorn.Function,
|
||||
Class: acorn.Class,
|
||||
Pattern: acorn.Pattern,
|
||||
ForInit: acorn.VariableDeclaration | acorn.Expression
|
||||
}
|
||||
|
||||
export type SimpleVisitors<TState> = {
|
||||
[type in acorn.AnyNode["type"]]?: (node: Extract<acorn.AnyNode, { type: type }>, state: TState) => void
|
||||
} & {
|
||||
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState) => void
|
||||
}
|
||||
|
||||
export type AncestorVisitors<TState> = {
|
||||
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, ancestors: acorn.Node[]
|
||||
) => void
|
||||
} & {
|
||||
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, ancestors: acorn.Node[]) => void
|
||||
}
|
||||
|
||||
export type WalkerCallback<TState> = (node: acorn.Node, state: TState) => void
|
||||
|
||||
export type RecursiveVisitors<TState> = {
|
||||
[type in acorn.AnyNode["type"]]?: ( node: Extract<acorn.AnyNode, { type: type }>, state: TState, callback: WalkerCallback<TState>) => void
|
||||
} & {
|
||||
[type in keyof AggregateType]?: (node: AggregateType[type], state: TState, callback: WalkerCallback<TState>) => void
|
||||
}
|
||||
|
||||
export type FindPredicate = (type: string, node: acorn.Node) => boolean
|
||||
|
||||
export interface Found<TState> {
|
||||
node: acorn.Node,
|
||||
state: TState
|
||||
}
|
||||
|
||||
/**
|
||||
* does a 'simple' walk over a tree
|
||||
* @param node the AST node to walk
|
||||
* @param visitors an object with properties whose names correspond to node types in the {@link https://github.com/estree/estree | ESTree spec}. The properties should contain functions that will be called with the node object and, if applicable the state at that point.
|
||||
* @param base a walker algorithm
|
||||
* @param state a start state. The default walker will simply visit all statements and expressions and not produce a meaningful state. (An example of a use of state is to track scope at each point in the tree.)
|
||||
*/
|
||||
export function simple<TState>(
|
||||
node: acorn.Node,
|
||||
visitors: SimpleVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'simple' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||
* @param node
|
||||
* @param visitors
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function ancestor<TState>(
|
||||
node: acorn.Node,
|
||||
visitors: AncestorVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'recursive' walk, where the walker functions are responsible for continuing the walk on the child nodes of their target node.
|
||||
* @param node
|
||||
* @param state the start state
|
||||
* @param functions contain an object that maps node types to walker functions
|
||||
* @param base provides the fallback walker functions for node types that aren't handled in the {@link functions} object. If not given, the default walkers will be used.
|
||||
*/
|
||||
export function recursive<TState>(
|
||||
node: acorn.Node,
|
||||
state: TState,
|
||||
functions: RecursiveVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'full' walk over a tree, calling the {@link callback} with the arguments (node, state, type) for each node
|
||||
* @param node
|
||||
* @param callback
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function full<TState>(
|
||||
node: acorn.Node,
|
||||
callback: FullWalkerCallback<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* does a 'full' walk over a tree, building up an array of ancestor nodes (including the current node) and passing the array to the callbacks as a third parameter.
|
||||
* @param node
|
||||
* @param callback
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function fullAncestor<TState>(
|
||||
node: acorn.Node,
|
||||
callback: FullAncestorWalkerCallback<TState>,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): void
|
||||
|
||||
/**
|
||||
* builds a new walker object by using the walker functions in {@link functions} and filling in the missing ones by taking defaults from {@link base}.
|
||||
* @param functions
|
||||
* @param base
|
||||
*/
|
||||
export function make<TState>(
|
||||
functions: RecursiveVisitors<TState>,
|
||||
base?: RecursiveVisitors<TState>
|
||||
): RecursiveVisitors<TState>
|
||||
|
||||
/**
|
||||
* tries to locate a node in a tree at the given start and/or end offsets, which satisfies the predicate test. {@link start} and {@link end} can be either `null` (as wildcard) or a `number`. {@link test} may be a string (indicating a node type) or a function that takes (nodeType, node) arguments and returns a boolean indicating whether this node is interesting. {@link base} and {@link state} are optional, and can be used to specify a custom walker. Nodes are tested from inner to outer, so if two nodes match the boundaries, the inner one will be preferred.
|
||||
* @param node
|
||||
* @param start
|
||||
* @param end
|
||||
* @param type
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function findNodeAt<TState>(
|
||||
node: acorn.Node,
|
||||
start: number | undefined,
|
||||
end?: number | undefined,
|
||||
type?: FindPredicate | string,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined
|
||||
|
||||
/**
|
||||
* like {@link findNodeAt}, but will match any node that exists 'around' (spanning) the given position.
|
||||
* @param node
|
||||
* @param start
|
||||
* @param type
|
||||
* @param base
|
||||
* @param state
|
||||
*/
|
||||
export function findNodeAround<TState>(
|
||||
node: acorn.Node,
|
||||
start: number | undefined,
|
||||
type?: FindPredicate | string,
|
||||
base?: RecursiveVisitors<TState>,
|
||||
state?: TState
|
||||
): Found<TState> | undefined
|
||||
|
||||
/**
|
||||
* Find the outermost matching node after a given position.
|
||||
*/
|
||||
export const findNodeAfter: typeof findNodeAround
|
||||
|
||||
/**
|
||||
* Find the outermost matching node before a given position.
|
||||
*/
|
||||
export const findNodeBefore: typeof findNodeAround
|
||||
|
||||
export const base: RecursiveVisitors<any>
|
||||
455
node_modules/acorn-walk/dist/walk.js
generated
vendored
Normal file
455
node_modules/acorn-walk/dist/walk.js
generated
vendored
Normal file
@ -0,0 +1,455 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory((global.acorn = global.acorn || {}, global.acorn.walk = {})));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
// AST walker module for ESTree compatible trees
|
||||
|
||||
// A simple walk is one where you simply specify callbacks to be
|
||||
// called on specific nodes. The last two arguments are optional. A
|
||||
// simple use would be
|
||||
//
|
||||
// walk.simple(myTree, {
|
||||
// Expression: function(node) { ... }
|
||||
// });
|
||||
//
|
||||
// to do something with all expressions. All ESTree node types
|
||||
// can be used to identify node types, as well as Expression and
|
||||
// Statement, which denote categories of nodes.
|
||||
//
|
||||
// The base argument can be used to pass a custom (recursive)
|
||||
// walker, and state can be used to give this walked an initial
|
||||
// state.
|
||||
|
||||
function simple(node, visitors, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (visitors[type]) { visitors[type](node, st); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An ancestor walk keeps an array of ancestor nodes (including the
|
||||
// current node) and passes them to the callback as third parameter
|
||||
// (and also as state parameter when no other state is present).
|
||||
function ancestor(node, visitors, baseVisitor, state, override) {
|
||||
var ancestors = [];
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (visitors[type]) { visitors[type](node, st || ancestors, ancestors); }
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// A recursive walk is one where your functions override the default
|
||||
// walkers. They can modify and replace the state parameter that's
|
||||
// threaded through the walk, and can opt how and whether to walk
|
||||
// their child nodes (by calling their third argument on these
|
||||
// nodes).
|
||||
function recursive(node, state, funcs, baseVisitor, override) {
|
||||
var visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor
|
||||
;(function c(node, st, override) {
|
||||
visitor[override || node.type](node, st, c);
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
function makeTest(test) {
|
||||
if (typeof test === "string")
|
||||
{ return function (type) { return type === test; } }
|
||||
else if (!test)
|
||||
{ return function () { return true; } }
|
||||
else
|
||||
{ return test }
|
||||
}
|
||||
|
||||
var Found = function Found(node, state) { this.node = node; this.state = state; };
|
||||
|
||||
// A full walk triggers the callback on each node
|
||||
function full(node, callback, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (last !== node) {
|
||||
callback(node, st, type);
|
||||
last = node;
|
||||
}
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An fullAncestor walk is like an ancestor walk, but triggers
|
||||
// the callback on each node
|
||||
function fullAncestor(node, callback, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var ancestors = [], last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (last !== node) {
|
||||
callback(node, st || ancestors, ancestors, type);
|
||||
last = node;
|
||||
}
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state);
|
||||
}
|
||||
|
||||
// Find a node with a given start, end, and type (all are optional,
|
||||
// null can be used as wildcard). Returns a {node, state} object, or
|
||||
// undefined when it doesn't find a matching node.
|
||||
function findNodeAt(node, start, end, test, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
test = makeTest(test);
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if ((start == null || node.start <= start) &&
|
||||
(end == null || node.end >= end))
|
||||
{ baseVisitor[type](node, st, c); }
|
||||
if ((start == null || node.start === start) &&
|
||||
(end == null || node.end === end) &&
|
||||
test(type, node))
|
||||
{ throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the innermost node of a given type that contains the given
|
||||
// position. Interface similar to findNodeAt.
|
||||
function findNodeAround(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if (node.start > pos || node.end < pos) { return }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (test(type, node)) { throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node after a given position.
|
||||
function findNodeAfter(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
if (node.end < pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.start >= pos && test(type, node)) { throw new Found(node, st) }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node before a given position.
|
||||
function findNodeBefore(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var max
|
||||
;(function c(node, st, override) {
|
||||
if (node.start > pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))
|
||||
{ max = new Found(node, st); }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
return max
|
||||
}
|
||||
|
||||
// Used to create a custom walker. Will fill in all missing node
|
||||
// type properties with the defaults.
|
||||
function make(funcs, baseVisitor) {
|
||||
var visitor = Object.create(baseVisitor || base);
|
||||
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||
return visitor
|
||||
}
|
||||
|
||||
function skipThrough(node, st, c) { c(node, st); }
|
||||
function ignore(_node, _st, _c) {}
|
||||
|
||||
// Node walkers.
|
||||
|
||||
var base = {};
|
||||
|
||||
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var stmt = list[i];
|
||||
|
||||
c(stmt, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.Statement = skipThrough;
|
||||
base.EmptyStatement = ignore;
|
||||
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
|
||||
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||
base.IfStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Statement");
|
||||
if (node.alternate) { c(node.alternate, st, "Statement"); }
|
||||
};
|
||||
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
|
||||
base.BreakStatement = base.ContinueStatement = ignore;
|
||||
base.WithStatement = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.SwitchStatement = function (node, st, c) {
|
||||
c(node.discriminant, st, "Expression");
|
||||
for (var i = 0, list = node.cases; i < list.length; i += 1) {
|
||||
var cs = list[i];
|
||||
|
||||
c(cs, st);
|
||||
}
|
||||
};
|
||||
base.SwitchCase = function (node, st, c) {
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
for (var i = 0, list = node.consequent; i < list.length; i += 1)
|
||||
{
|
||||
var cons = list[i];
|
||||
|
||||
c(cons, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
|
||||
if (node.argument) { c(node.argument, st, "Expression"); }
|
||||
};
|
||||
base.ThrowStatement = base.SpreadElement =
|
||||
function (node, st, c) { return c(node.argument, st, "Expression"); };
|
||||
base.TryStatement = function (node, st, c) {
|
||||
c(node.block, st, "Statement");
|
||||
if (node.handler) { c(node.handler, st); }
|
||||
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
|
||||
};
|
||||
base.CatchClause = function (node, st, c) {
|
||||
if (node.param) { c(node.param, st, "Pattern"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForStatement = function (node, st, c) {
|
||||
if (node.init) { c(node.init, st, "ForInit"); }
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
if (node.update) { c(node.update, st, "Expression"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
||||
c(node.left, st, "ForInit");
|
||||
c(node.right, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInit = function (node, st, c) {
|
||||
if (node.type === "VariableDeclaration") { c(node, st); }
|
||||
else { c(node, st, "Expression"); }
|
||||
};
|
||||
base.DebuggerStatement = ignore;
|
||||
|
||||
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
|
||||
base.VariableDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.declarations; i < list.length; i += 1)
|
||||
{
|
||||
var decl = list[i];
|
||||
|
||||
c(decl, st);
|
||||
}
|
||||
};
|
||||
base.VariableDeclarator = function (node, st, c) {
|
||||
c(node.id, st, "Pattern");
|
||||
if (node.init) { c(node.init, st, "Expression"); }
|
||||
};
|
||||
|
||||
base.Function = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
for (var i = 0, list = node.params; i < list.length; i += 1)
|
||||
{
|
||||
var param = list[i];
|
||||
|
||||
c(param, st, "Pattern");
|
||||
}
|
||||
c(node.body, st, node.expression ? "Expression" : "Statement");
|
||||
};
|
||||
|
||||
base.Pattern = function (node, st, c) {
|
||||
if (node.type === "Identifier")
|
||||
{ c(node, st, "VariablePattern"); }
|
||||
else if (node.type === "MemberExpression")
|
||||
{ c(node, st, "MemberPattern"); }
|
||||
else
|
||||
{ c(node, st); }
|
||||
};
|
||||
base.VariablePattern = ignore;
|
||||
base.MemberPattern = skipThrough;
|
||||
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
|
||||
base.ArrayPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Pattern"); }
|
||||
}
|
||||
};
|
||||
base.ObjectPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1) {
|
||||
var prop = list[i];
|
||||
|
||||
if (prop.type === "Property") {
|
||||
if (prop.computed) { c(prop.key, st, "Expression"); }
|
||||
c(prop.value, st, "Pattern");
|
||||
} else if (prop.type === "RestElement") {
|
||||
c(prop.argument, st, "Pattern");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
base.Expression = skipThrough;
|
||||
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
||||
base.ArrayExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Expression"); }
|
||||
}
|
||||
};
|
||||
base.ObjectExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1)
|
||||
{
|
||||
var prop = list[i];
|
||||
|
||||
c(prop, st);
|
||||
}
|
||||
};
|
||||
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
||||
base.SequenceExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.expressions; i < list.length; i += 1)
|
||||
{
|
||||
var expr = list[i];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateLiteral = function (node, st, c) {
|
||||
for (var i = 0, list = node.quasis; i < list.length; i += 1)
|
||||
{
|
||||
var quasi = list[i];
|
||||
|
||||
c(quasi, st);
|
||||
}
|
||||
|
||||
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
|
||||
{
|
||||
var expr = list$1[i$1];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateElement = ignore;
|
||||
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
||||
c(node.argument, st, "Expression");
|
||||
};
|
||||
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
||||
c(node.left, st, "Expression");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
||||
c(node.left, st, "Pattern");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.ConditionalExpression = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Expression");
|
||||
c(node.alternate, st, "Expression");
|
||||
};
|
||||
base.NewExpression = base.CallExpression = function (node, st, c) {
|
||||
c(node.callee, st, "Expression");
|
||||
if (node.arguments)
|
||||
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
|
||||
{
|
||||
var arg = list[i];
|
||||
|
||||
c(arg, st, "Expression");
|
||||
} }
|
||||
};
|
||||
base.MemberExpression = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
if (node.computed) { c(node.property, st, "Expression"); }
|
||||
};
|
||||
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
||||
if (node.declaration)
|
||||
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
|
||||
if (node.source) { c(node.source, st, "Expression"); }
|
||||
};
|
||||
base.ExportAllDeclaration = function (node, st, c) {
|
||||
if (node.exported)
|
||||
{ c(node.exported, st); }
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
|
||||
{
|
||||
var spec = list[i];
|
||||
|
||||
c(spec, st);
|
||||
}
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportExpression = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
|
||||
|
||||
base.TaggedTemplateExpression = function (node, st, c) {
|
||||
c(node.tag, st, "Expression");
|
||||
c(node.quasi, st, "Expression");
|
||||
};
|
||||
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
|
||||
base.Class = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
if (node.superClass) { c(node.superClass, st, "Expression"); }
|
||||
c(node.body, st);
|
||||
};
|
||||
base.ClassBody = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var elt = list[i];
|
||||
|
||||
c(elt, st);
|
||||
}
|
||||
};
|
||||
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
|
||||
if (node.computed) { c(node.key, st, "Expression"); }
|
||||
if (node.value) { c(node.value, st, "Expression"); }
|
||||
};
|
||||
|
||||
exports.ancestor = ancestor;
|
||||
exports.base = base;
|
||||
exports.findNodeAfter = findNodeAfter;
|
||||
exports.findNodeAround = findNodeAround;
|
||||
exports.findNodeAt = findNodeAt;
|
||||
exports.findNodeBefore = findNodeBefore;
|
||||
exports.full = full;
|
||||
exports.fullAncestor = fullAncestor;
|
||||
exports.make = make;
|
||||
exports.recursive = recursive;
|
||||
exports.simple = simple;
|
||||
|
||||
}));
|
||||
437
node_modules/acorn-walk/dist/walk.mjs
generated
vendored
Normal file
437
node_modules/acorn-walk/dist/walk.mjs
generated
vendored
Normal file
@ -0,0 +1,437 @@
|
||||
// AST walker module for ESTree compatible trees
|
||||
|
||||
// A simple walk is one where you simply specify callbacks to be
|
||||
// called on specific nodes. The last two arguments are optional. A
|
||||
// simple use would be
|
||||
//
|
||||
// walk.simple(myTree, {
|
||||
// Expression: function(node) { ... }
|
||||
// });
|
||||
//
|
||||
// to do something with all expressions. All ESTree node types
|
||||
// can be used to identify node types, as well as Expression and
|
||||
// Statement, which denote categories of nodes.
|
||||
//
|
||||
// The base argument can be used to pass a custom (recursive)
|
||||
// walker, and state can be used to give this walked an initial
|
||||
// state.
|
||||
|
||||
function simple(node, visitors, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (visitors[type]) { visitors[type](node, st); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An ancestor walk keeps an array of ancestor nodes (including the
|
||||
// current node) and passes them to the callback as third parameter
|
||||
// (and also as state parameter when no other state is present).
|
||||
function ancestor(node, visitors, baseVisitor, state, override) {
|
||||
var ancestors = [];
|
||||
if (!baseVisitor) { baseVisitor = base
|
||||
; }(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (visitors[type]) { visitors[type](node, st || ancestors, ancestors); }
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// A recursive walk is one where your functions override the default
|
||||
// walkers. They can modify and replace the state parameter that's
|
||||
// threaded through the walk, and can opt how and whether to walk
|
||||
// their child nodes (by calling their third argument on these
|
||||
// nodes).
|
||||
function recursive(node, state, funcs, baseVisitor, override) {
|
||||
var visitor = funcs ? make(funcs, baseVisitor || undefined) : baseVisitor
|
||||
;(function c(node, st, override) {
|
||||
visitor[override || node.type](node, st, c);
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
function makeTest(test) {
|
||||
if (typeof test === "string")
|
||||
{ return function (type) { return type === test; } }
|
||||
else if (!test)
|
||||
{ return function () { return true; } }
|
||||
else
|
||||
{ return test }
|
||||
}
|
||||
|
||||
var Found = function Found(node, state) { this.node = node; this.state = state; };
|
||||
|
||||
// A full walk triggers the callback on each node
|
||||
function full(node, callback, baseVisitor, state, override) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
baseVisitor[type](node, st, c);
|
||||
if (last !== node) {
|
||||
callback(node, st, type);
|
||||
last = node;
|
||||
}
|
||||
})(node, state, override);
|
||||
}
|
||||
|
||||
// An fullAncestor walk is like an ancestor walk, but triggers
|
||||
// the callback on each node
|
||||
function fullAncestor(node, callback, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var ancestors = [], last
|
||||
;(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
var isNew = node !== ancestors[ancestors.length - 1];
|
||||
if (isNew) { ancestors.push(node); }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (last !== node) {
|
||||
callback(node, st || ancestors, ancestors, type);
|
||||
last = node;
|
||||
}
|
||||
if (isNew) { ancestors.pop(); }
|
||||
})(node, state);
|
||||
}
|
||||
|
||||
// Find a node with a given start, end, and type (all are optional,
|
||||
// null can be used as wildcard). Returns a {node, state} object, or
|
||||
// undefined when it doesn't find a matching node.
|
||||
function findNodeAt(node, start, end, test, baseVisitor, state) {
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
test = makeTest(test);
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if ((start == null || node.start <= start) &&
|
||||
(end == null || node.end >= end))
|
||||
{ baseVisitor[type](node, st, c); }
|
||||
if ((start == null || node.start === start) &&
|
||||
(end == null || node.end === end) &&
|
||||
test(type, node))
|
||||
{ throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the innermost node of a given type that contains the given
|
||||
// position. Interface similar to findNodeAt.
|
||||
function findNodeAround(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
var type = override || node.type;
|
||||
if (node.start > pos || node.end < pos) { return }
|
||||
baseVisitor[type](node, st, c);
|
||||
if (test(type, node)) { throw new Found(node, st) }
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node after a given position.
|
||||
function findNodeAfter(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
try {
|
||||
(function c(node, st, override) {
|
||||
if (node.end < pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.start >= pos && test(type, node)) { throw new Found(node, st) }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
} catch (e) {
|
||||
if (e instanceof Found) { return e }
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Find the outermost matching node before a given position.
|
||||
function findNodeBefore(node, pos, test, baseVisitor, state) {
|
||||
test = makeTest(test);
|
||||
if (!baseVisitor) { baseVisitor = base; }
|
||||
var max
|
||||
;(function c(node, st, override) {
|
||||
if (node.start > pos) { return }
|
||||
var type = override || node.type;
|
||||
if (node.end <= pos && (!max || max.node.end < node.end) && test(type, node))
|
||||
{ max = new Found(node, st); }
|
||||
baseVisitor[type](node, st, c);
|
||||
})(node, state);
|
||||
return max
|
||||
}
|
||||
|
||||
// Used to create a custom walker. Will fill in all missing node
|
||||
// type properties with the defaults.
|
||||
function make(funcs, baseVisitor) {
|
||||
var visitor = Object.create(baseVisitor || base);
|
||||
for (var type in funcs) { visitor[type] = funcs[type]; }
|
||||
return visitor
|
||||
}
|
||||
|
||||
function skipThrough(node, st, c) { c(node, st); }
|
||||
function ignore(_node, _st, _c) {}
|
||||
|
||||
// Node walkers.
|
||||
|
||||
var base = {};
|
||||
|
||||
base.Program = base.BlockStatement = base.StaticBlock = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var stmt = list[i];
|
||||
|
||||
c(stmt, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.Statement = skipThrough;
|
||||
base.EmptyStatement = ignore;
|
||||
base.ExpressionStatement = base.ParenthesizedExpression = base.ChainExpression =
|
||||
function (node, st, c) { return c(node.expression, st, "Expression"); };
|
||||
base.IfStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Statement");
|
||||
if (node.alternate) { c(node.alternate, st, "Statement"); }
|
||||
};
|
||||
base.LabeledStatement = function (node, st, c) { return c(node.body, st, "Statement"); };
|
||||
base.BreakStatement = base.ContinueStatement = ignore;
|
||||
base.WithStatement = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.SwitchStatement = function (node, st, c) {
|
||||
c(node.discriminant, st, "Expression");
|
||||
for (var i = 0, list = node.cases; i < list.length; i += 1) {
|
||||
var cs = list[i];
|
||||
|
||||
c(cs, st);
|
||||
}
|
||||
};
|
||||
base.SwitchCase = function (node, st, c) {
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
for (var i = 0, list = node.consequent; i < list.length; i += 1)
|
||||
{
|
||||
var cons = list[i];
|
||||
|
||||
c(cons, st, "Statement");
|
||||
}
|
||||
};
|
||||
base.ReturnStatement = base.YieldExpression = base.AwaitExpression = function (node, st, c) {
|
||||
if (node.argument) { c(node.argument, st, "Expression"); }
|
||||
};
|
||||
base.ThrowStatement = base.SpreadElement =
|
||||
function (node, st, c) { return c(node.argument, st, "Expression"); };
|
||||
base.TryStatement = function (node, st, c) {
|
||||
c(node.block, st, "Statement");
|
||||
if (node.handler) { c(node.handler, st); }
|
||||
if (node.finalizer) { c(node.finalizer, st, "Statement"); }
|
||||
};
|
||||
base.CatchClause = function (node, st, c) {
|
||||
if (node.param) { c(node.param, st, "Pattern"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.WhileStatement = base.DoWhileStatement = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForStatement = function (node, st, c) {
|
||||
if (node.init) { c(node.init, st, "ForInit"); }
|
||||
if (node.test) { c(node.test, st, "Expression"); }
|
||||
if (node.update) { c(node.update, st, "Expression"); }
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInStatement = base.ForOfStatement = function (node, st, c) {
|
||||
c(node.left, st, "ForInit");
|
||||
c(node.right, st, "Expression");
|
||||
c(node.body, st, "Statement");
|
||||
};
|
||||
base.ForInit = function (node, st, c) {
|
||||
if (node.type === "VariableDeclaration") { c(node, st); }
|
||||
else { c(node, st, "Expression"); }
|
||||
};
|
||||
base.DebuggerStatement = ignore;
|
||||
|
||||
base.FunctionDeclaration = function (node, st, c) { return c(node, st, "Function"); };
|
||||
base.VariableDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.declarations; i < list.length; i += 1)
|
||||
{
|
||||
var decl = list[i];
|
||||
|
||||
c(decl, st);
|
||||
}
|
||||
};
|
||||
base.VariableDeclarator = function (node, st, c) {
|
||||
c(node.id, st, "Pattern");
|
||||
if (node.init) { c(node.init, st, "Expression"); }
|
||||
};
|
||||
|
||||
base.Function = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
for (var i = 0, list = node.params; i < list.length; i += 1)
|
||||
{
|
||||
var param = list[i];
|
||||
|
||||
c(param, st, "Pattern");
|
||||
}
|
||||
c(node.body, st, node.expression ? "Expression" : "Statement");
|
||||
};
|
||||
|
||||
base.Pattern = function (node, st, c) {
|
||||
if (node.type === "Identifier")
|
||||
{ c(node, st, "VariablePattern"); }
|
||||
else if (node.type === "MemberExpression")
|
||||
{ c(node, st, "MemberPattern"); }
|
||||
else
|
||||
{ c(node, st); }
|
||||
};
|
||||
base.VariablePattern = ignore;
|
||||
base.MemberPattern = skipThrough;
|
||||
base.RestElement = function (node, st, c) { return c(node.argument, st, "Pattern"); };
|
||||
base.ArrayPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Pattern"); }
|
||||
}
|
||||
};
|
||||
base.ObjectPattern = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1) {
|
||||
var prop = list[i];
|
||||
|
||||
if (prop.type === "Property") {
|
||||
if (prop.computed) { c(prop.key, st, "Expression"); }
|
||||
c(prop.value, st, "Pattern");
|
||||
} else if (prop.type === "RestElement") {
|
||||
c(prop.argument, st, "Pattern");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
base.Expression = skipThrough;
|
||||
base.ThisExpression = base.Super = base.MetaProperty = ignore;
|
||||
base.ArrayExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.elements; i < list.length; i += 1) {
|
||||
var elt = list[i];
|
||||
|
||||
if (elt) { c(elt, st, "Expression"); }
|
||||
}
|
||||
};
|
||||
base.ObjectExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.properties; i < list.length; i += 1)
|
||||
{
|
||||
var prop = list[i];
|
||||
|
||||
c(prop, st);
|
||||
}
|
||||
};
|
||||
base.FunctionExpression = base.ArrowFunctionExpression = base.FunctionDeclaration;
|
||||
base.SequenceExpression = function (node, st, c) {
|
||||
for (var i = 0, list = node.expressions; i < list.length; i += 1)
|
||||
{
|
||||
var expr = list[i];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateLiteral = function (node, st, c) {
|
||||
for (var i = 0, list = node.quasis; i < list.length; i += 1)
|
||||
{
|
||||
var quasi = list[i];
|
||||
|
||||
c(quasi, st);
|
||||
}
|
||||
|
||||
for (var i$1 = 0, list$1 = node.expressions; i$1 < list$1.length; i$1 += 1)
|
||||
{
|
||||
var expr = list$1[i$1];
|
||||
|
||||
c(expr, st, "Expression");
|
||||
}
|
||||
};
|
||||
base.TemplateElement = ignore;
|
||||
base.UnaryExpression = base.UpdateExpression = function (node, st, c) {
|
||||
c(node.argument, st, "Expression");
|
||||
};
|
||||
base.BinaryExpression = base.LogicalExpression = function (node, st, c) {
|
||||
c(node.left, st, "Expression");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.AssignmentExpression = base.AssignmentPattern = function (node, st, c) {
|
||||
c(node.left, st, "Pattern");
|
||||
c(node.right, st, "Expression");
|
||||
};
|
||||
base.ConditionalExpression = function (node, st, c) {
|
||||
c(node.test, st, "Expression");
|
||||
c(node.consequent, st, "Expression");
|
||||
c(node.alternate, st, "Expression");
|
||||
};
|
||||
base.NewExpression = base.CallExpression = function (node, st, c) {
|
||||
c(node.callee, st, "Expression");
|
||||
if (node.arguments)
|
||||
{ for (var i = 0, list = node.arguments; i < list.length; i += 1)
|
||||
{
|
||||
var arg = list[i];
|
||||
|
||||
c(arg, st, "Expression");
|
||||
} }
|
||||
};
|
||||
base.MemberExpression = function (node, st, c) {
|
||||
c(node.object, st, "Expression");
|
||||
if (node.computed) { c(node.property, st, "Expression"); }
|
||||
};
|
||||
base.ExportNamedDeclaration = base.ExportDefaultDeclaration = function (node, st, c) {
|
||||
if (node.declaration)
|
||||
{ c(node.declaration, st, node.type === "ExportNamedDeclaration" || node.declaration.id ? "Statement" : "Expression"); }
|
||||
if (node.source) { c(node.source, st, "Expression"); }
|
||||
};
|
||||
base.ExportAllDeclaration = function (node, st, c) {
|
||||
if (node.exported)
|
||||
{ c(node.exported, st); }
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportDeclaration = function (node, st, c) {
|
||||
for (var i = 0, list = node.specifiers; i < list.length; i += 1)
|
||||
{
|
||||
var spec = list[i];
|
||||
|
||||
c(spec, st);
|
||||
}
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportExpression = function (node, st, c) {
|
||||
c(node.source, st, "Expression");
|
||||
};
|
||||
base.ImportSpecifier = base.ImportDefaultSpecifier = base.ImportNamespaceSpecifier = base.Identifier = base.PrivateIdentifier = base.Literal = ignore;
|
||||
|
||||
base.TaggedTemplateExpression = function (node, st, c) {
|
||||
c(node.tag, st, "Expression");
|
||||
c(node.quasi, st, "Expression");
|
||||
};
|
||||
base.ClassDeclaration = base.ClassExpression = function (node, st, c) { return c(node, st, "Class"); };
|
||||
base.Class = function (node, st, c) {
|
||||
if (node.id) { c(node.id, st, "Pattern"); }
|
||||
if (node.superClass) { c(node.superClass, st, "Expression"); }
|
||||
c(node.body, st);
|
||||
};
|
||||
base.ClassBody = function (node, st, c) {
|
||||
for (var i = 0, list = node.body; i < list.length; i += 1)
|
||||
{
|
||||
var elt = list[i];
|
||||
|
||||
c(elt, st);
|
||||
}
|
||||
};
|
||||
base.MethodDefinition = base.PropertyDefinition = base.Property = function (node, st, c) {
|
||||
if (node.computed) { c(node.key, st, "Expression"); }
|
||||
if (node.value) { c(node.value, st, "Expression"); }
|
||||
};
|
||||
|
||||
export { ancestor, base, findNodeAfter, findNodeAround, findNodeAt, findNodeBefore, full, fullAncestor, make, recursive, simple };
|
||||
50
node_modules/acorn-walk/package.json
generated
vendored
Normal file
50
node_modules/acorn-walk/package.json
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"name": "acorn-walk",
|
||||
"description": "ECMAScript (ESTree) AST walker",
|
||||
"homepage": "https://github.com/acornjs/acorn",
|
||||
"main": "dist/walk.js",
|
||||
"types": "dist/walk.d.ts",
|
||||
"module": "dist/walk.mjs",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": "./dist/walk.mjs",
|
||||
"require": "./dist/walk.js",
|
||||
"default": "./dist/walk.js"
|
||||
},
|
||||
"./dist/walk.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"version": "8.3.4",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"acorn": "^8.11.0"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "Marijn Haverbeke",
|
||||
"email": "marijnh@gmail.com",
|
||||
"web": "https://marijnhaverbeke.nl"
|
||||
},
|
||||
{
|
||||
"name": "Ingvar Stepanyan",
|
||||
"email": "me@rreverser.com",
|
||||
"web": "https://rreverser.com/"
|
||||
},
|
||||
{
|
||||
"name": "Adrian Heine",
|
||||
"web": "http://adrianheine.de"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/acornjs/acorn.git"
|
||||
},
|
||||
"scripts": {
|
||||
"prepare": "cd ..; npm run build:walk"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
940
node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
940
node_modules/acorn/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,940 @@
|
||||
## 8.14.1 (2025-03-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix an issue where `await` expressions in class field initializers were inappropriately allowed.
|
||||
|
||||
Properly allow await inside an async arrow function inside a class field initializer.
|
||||
|
||||
Mention the source file name in syntax error messages when given.
|
||||
|
||||
Properly add an empty `attributes` property to every form of `ExportNamedDeclaration`.
|
||||
|
||||
## 8.14.0 (2024-10-27)
|
||||
|
||||
### New features
|
||||
|
||||
Support ES2025 import attributes.
|
||||
|
||||
Support ES2025 RegExp modifiers.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Support some missing Unicode properties.
|
||||
|
||||
## 8.13.0 (2024-10-16)
|
||||
|
||||
### New features
|
||||
|
||||
Upgrade to Unicode 16.0.
|
||||
|
||||
## 8.12.1 (2024-07-03)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a regression that caused Acorn to no longer run on Node versions <8.10.
|
||||
|
||||
## 8.12.0 (2024-06-14)
|
||||
|
||||
### New features
|
||||
|
||||
Support ES2025 duplicate capture group names in regular expressions.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Include `VariableDeclarator` in the `AnyNode` type so that walker objects can refer to it without getting a type error.
|
||||
|
||||
Properly raise a parse error for invalid `for`/`of` statements using `async` as binding name.
|
||||
|
||||
Properly recognize \"use strict\" when preceded by a string with an escaped newline.
|
||||
|
||||
Mark the `Parser` constructor as protected, not private, so plugins can extend it without type errors.
|
||||
|
||||
Fix a bug where some invalid `delete` expressions were let through when the operand was parenthesized and `preserveParens` was enabled.
|
||||
|
||||
Properly normalize line endings in raw strings of invalid template tokens.
|
||||
|
||||
Properly track line numbers for escaped newlines in strings.
|
||||
|
||||
Fix a bug that broke line number accounting after a template literal with invalid escape sequences.
|
||||
|
||||
## 8.11.3 (2023-12-29)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Add `Function` and `Class` to the `AggregateType` type, so that they can be used in walkers without raising a type error.
|
||||
|
||||
Make sure `onToken` get an `import` keyword token when parsing `import.meta`.
|
||||
|
||||
Fix a bug where `.loc.start` could be undefined for `new.target` `meta` nodes.
|
||||
|
||||
## 8.11.2 (2023-10-27)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug that caused regular expressions after colon tokens to not be properly tokenized in some circumstances.
|
||||
|
||||
## 8.11.1 (2023-10-26)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a regression where `onToken` would receive 'name' tokens for 'new' keyword tokens.
|
||||
|
||||
## 8.11.0 (2023-10-26)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix an issue where tokenizing (without parsing) an object literal with a property named `class` or `function` could, in some circumstance, put the tokenizer into an invalid state.
|
||||
|
||||
Fix an issue where a slash after a call to a propery named the same as some keywords would be tokenized as a regular expression.
|
||||
|
||||
### New features
|
||||
|
||||
Upgrade to Unicode 15.1.
|
||||
|
||||
Use a set of new, much more precise, TypeScript types.
|
||||
|
||||
## 8.10.0 (2023-07-05)
|
||||
|
||||
### New features
|
||||
|
||||
Add a `checkPrivateFields` option that disables strict checking of private property use.
|
||||
|
||||
## 8.9.0 (2023-06-16)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Forbid dynamic import after `new`, even when part of a member expression.
|
||||
|
||||
### New features
|
||||
|
||||
Add Unicode properties for ES2023.
|
||||
|
||||
Add support for the `v` flag to regular expressions.
|
||||
|
||||
## 8.8.2 (2023-01-23)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug that caused `allowHashBang` to be set to false when not provided, even with `ecmaVersion >= 14`.
|
||||
|
||||
Fix an exception when passing no option object to `parse` or `new Parser`.
|
||||
|
||||
Fix incorrect parse error on `if (0) let\n[astral identifier char]`.
|
||||
|
||||
## 8.8.1 (2022-10-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make type for `Comment` compatible with estree types.
|
||||
|
||||
## 8.8.0 (2022-07-21)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Allow parentheses around spread args in destructuring object assignment.
|
||||
|
||||
Fix an issue where the tree contained `directive` properties in when parsing with a language version that doesn't support them.
|
||||
|
||||
### New features
|
||||
|
||||
Support hashbang comments by default in ECMAScript 2023 and later.
|
||||
|
||||
## 8.7.1 (2021-04-26)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Stop handling `"use strict"` directives in ECMAScript versions before 5.
|
||||
|
||||
Fix an issue where duplicate quoted export names in `export *` syntax were incorrectly checked.
|
||||
|
||||
Add missing type for `tokTypes`.
|
||||
|
||||
## 8.7.0 (2021-12-27)
|
||||
|
||||
### New features
|
||||
|
||||
Support quoted export names.
|
||||
|
||||
Upgrade to Unicode 14.
|
||||
|
||||
Add support for Unicode 13 properties in regular expressions.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Use a loop to find line breaks, because the existing regexp search would overrun the end of the searched range and waste a lot of time in minified code.
|
||||
|
||||
## 8.6.0 (2021-11-18)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where an object literal with multiple `__proto__` properties would incorrectly be accepted if a later property value held an assigment.
|
||||
|
||||
### New features
|
||||
|
||||
Support class private fields with the `in` operator.
|
||||
|
||||
## 8.5.0 (2021-09-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve context-dependent tokenization in a number of corner cases.
|
||||
|
||||
Fix location tracking after a 0x2028 or 0x2029 character in a string literal (which before did not increase the line number).
|
||||
|
||||
Fix an issue where arrow function bodies in for loop context would inappropriately consume `in` operators.
|
||||
|
||||
Fix wrong end locations stored on SequenceExpression nodes.
|
||||
|
||||
Implement restriction that `for`/`of` loop LHS can't start with `let`.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for ES2022 class static blocks.
|
||||
|
||||
Allow multiple input files to be passed to the CLI tool.
|
||||
|
||||
## 8.4.1 (2021-06-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where `allowAwaitOutsideFunction` would allow `await` in class field initializers, and setting `ecmaVersion` to 13 or higher would allow top-level await in non-module sources.
|
||||
|
||||
## 8.4.0 (2021-06-11)
|
||||
|
||||
### New features
|
||||
|
||||
A new option, `allowSuperOutsideMethod`, can be used to suppress the error when `super` is used in the wrong context.
|
||||
|
||||
## 8.3.0 (2021-05-31)
|
||||
|
||||
### New features
|
||||
|
||||
Default `allowAwaitOutsideFunction` to true for ECMAScript 2022 an higher.
|
||||
|
||||
Add support for the `d` ([indices](https://github.com/tc39/proposal-regexp-match-indices)) regexp flag.
|
||||
|
||||
## 8.2.4 (2021-05-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix spec conformity in corner case 'for await (async of ...)'.
|
||||
|
||||
## 8.2.3 (2021-05-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix an issue where the library couldn't parse 'for (async of ...)'.
|
||||
|
||||
Fix a bug in UTF-16 decoding that would read characters incorrectly in some circumstances.
|
||||
|
||||
## 8.2.2 (2021-04-29)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug where a class field initialized to an async arrow function wouldn't allow await inside it. Same issue existed for generator arrow functions with yield.
|
||||
|
||||
## 8.2.1 (2021-04-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a regression introduced in 8.2.0 where static or async class methods with keyword names fail to parse.
|
||||
|
||||
## 8.2.0 (2021-04-24)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for ES2022 class fields and private methods.
|
||||
|
||||
## 8.1.1 (2021-04-12)
|
||||
|
||||
### Various
|
||||
|
||||
Stop shipping source maps in the NPM package.
|
||||
|
||||
## 8.1.0 (2021-03-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a spurious error in nested destructuring arrays.
|
||||
|
||||
### New features
|
||||
|
||||
Expose `allowAwaitOutsideFunction` in CLI interface.
|
||||
|
||||
Make `allowImportExportAnywhere` also apply to `import.meta`.
|
||||
|
||||
## 8.0.5 (2021-01-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Adjust package.json to work with Node 12.16.0 and 13.0-13.6.
|
||||
|
||||
## 8.0.4 (2020-10-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make `await x ** y` an error, following the spec.
|
||||
|
||||
Fix potentially exponential regular expression.
|
||||
|
||||
## 8.0.3 (2020-10-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a wasteful loop during `Parser` creation when setting `ecmaVersion` to `"latest"`.
|
||||
|
||||
## 8.0.2 (2020-09-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make the TypeScript types reflect the current allowed values for `ecmaVersion`.
|
||||
|
||||
Fix another regexp/division tokenizer issue.
|
||||
|
||||
## 8.0.1 (2020-08-12)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Provide the correct value in the `version` export.
|
||||
|
||||
## 8.0.0 (2020-08-12)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow expressions like `(a = b) = c`.
|
||||
|
||||
Make non-octal escape sequences a syntax error in strict mode.
|
||||
|
||||
### New features
|
||||
|
||||
The package can now be loaded directly as an ECMAScript module in node 13+.
|
||||
|
||||
Update to the set of Unicode properties from ES2021.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The `ecmaVersion` option is now required. For the moment, omitting it will still work with a warning, but that will change in a future release.
|
||||
|
||||
Some changes to method signatures that may be used by plugins.
|
||||
|
||||
## 7.4.0 (2020-08-03)
|
||||
|
||||
### New features
|
||||
|
||||
Add support for logical assignment operators.
|
||||
|
||||
Add support for numeric separators.
|
||||
|
||||
## 7.3.1 (2020-06-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Make the string in the `version` export match the actual library version.
|
||||
|
||||
## 7.3.0 (2020-06-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a bug that caused parsing of object patterns with a property named `set` that had a default value to fail.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for optional chaining (`?.`).
|
||||
|
||||
## 7.2.0 (2020-05-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix precedence issue in parsing of async arrow functions.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for nullish coalescing.
|
||||
|
||||
Add support for `import.meta`.
|
||||
|
||||
Support `export * as ...` syntax.
|
||||
|
||||
Upgrade to Unicode 13.
|
||||
|
||||
## 6.4.1 (2020-03-09)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||
|
||||
## 7.1.1 (2020-03-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Treat `\8` and `\9` as invalid escapes in template strings.
|
||||
|
||||
Allow unicode escapes in property names that are keywords.
|
||||
|
||||
Don't error on an exponential operator expression as argument to `await`.
|
||||
|
||||
More carefully check for valid UTF16 surrogate pairs in regexp validator.
|
||||
|
||||
## 7.1.0 (2019-09-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow trailing object literal commas when ecmaVersion is less than 5.
|
||||
|
||||
### New features
|
||||
|
||||
Add a static `acorn` property to the `Parser` class that contains the entire module interface, to allow plugins to access the instance of the library that they are acting on.
|
||||
|
||||
## 7.0.0 (2019-08-13)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
Changes the node format for dynamic imports to use the `ImportExpression` node type, as defined in [ESTree](https://github.com/estree/estree/blob/master/es2020.md#importexpression).
|
||||
|
||||
Makes 10 (ES2019) the default value for the `ecmaVersion` option.
|
||||
|
||||
## 6.3.0 (2019-08-12)
|
||||
|
||||
### New features
|
||||
|
||||
`sourceType: "module"` can now be used even when `ecmaVersion` is less than 6, to parse module-style code that otherwise conforms to an older standard.
|
||||
|
||||
## 6.2.1 (2019-07-21)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug causing Acorn to treat some characters as identifier characters that shouldn't be treated as such.
|
||||
|
||||
Fix issue where setting the `allowReserved` option to `"never"` allowed reserved words in some circumstances.
|
||||
|
||||
## 6.2.0 (2019-07-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve valid assignment checking in `for`/`in` and `for`/`of` loops.
|
||||
|
||||
Disallow binding `let` in patterns.
|
||||
|
||||
### New features
|
||||
|
||||
Support bigint syntax with `ecmaVersion` >= 11.
|
||||
|
||||
Support dynamic `import` syntax with `ecmaVersion` >= 11.
|
||||
|
||||
Upgrade to Unicode version 12.
|
||||
|
||||
## 6.1.1 (2019-02-27)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug that caused parsing default exports of with names to fail.
|
||||
|
||||
## 6.1.0 (2019-02-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix scope checking when redefining a `var` as a lexical binding.
|
||||
|
||||
### New features
|
||||
|
||||
Split up `parseSubscripts` to use an internal `parseSubscript` method to make it easier to extend with plugins.
|
||||
|
||||
## 6.0.7 (2019-02-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Check that exported bindings are defined.
|
||||
|
||||
Don't treat `\u180e` as a whitespace character.
|
||||
|
||||
Check for duplicate parameter names in methods.
|
||||
|
||||
Don't allow shorthand properties when they are generators or async methods.
|
||||
|
||||
Forbid binding `await` in async arrow function's parameter list.
|
||||
|
||||
## 6.0.6 (2019-01-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The content of class declarations and expressions is now always parsed in strict mode.
|
||||
|
||||
Don't allow `let` or `const` to bind the variable name `let`.
|
||||
|
||||
Treat class declarations as lexical.
|
||||
|
||||
Don't allow a generator function declaration as the sole body of an `if` or `else`.
|
||||
|
||||
Ignore `"use strict"` when after an empty statement.
|
||||
|
||||
Allow string line continuations with special line terminator characters.
|
||||
|
||||
Treat `for` bodies as part of the `for` scope when checking for conflicting bindings.
|
||||
|
||||
Fix bug with parsing `yield` in a `for` loop initializer.
|
||||
|
||||
Implement special cases around scope checking for functions.
|
||||
|
||||
## 6.0.5 (2019-01-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix TypeScript type for `Parser.extend` and add `allowAwaitOutsideFunction` to options type.
|
||||
|
||||
Don't treat `let` as a keyword when the next token is `{` on the next line.
|
||||
|
||||
Fix bug that broke checking for parentheses around an object pattern in a destructuring assignment when `preserveParens` was on.
|
||||
|
||||
## 6.0.4 (2018-11-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Further improvements to tokenizing regular expressions in corner cases.
|
||||
|
||||
## 6.0.3 (2018-11-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug in tokenizing an expression-less return followed by a function followed by a regular expression.
|
||||
|
||||
Remove stray symlink in the package tarball.
|
||||
|
||||
## 6.0.2 (2018-09-26)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug where default expressions could fail to parse inside an object destructuring assignment expression.
|
||||
|
||||
## 6.0.1 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix wrong value in `version` export.
|
||||
|
||||
## 6.0.0 (2018-09-14)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Better handle variable-redefinition checks for catch bindings and functions directly under if statements.
|
||||
|
||||
Forbid `new.target` in top-level arrow functions.
|
||||
|
||||
Fix issue with parsing a regexp after `yield` in some contexts.
|
||||
|
||||
### New features
|
||||
|
||||
The package now comes with TypeScript definitions.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default value of the `ecmaVersion` option is now 9 (2018).
|
||||
|
||||
Plugins work differently, and will have to be rewritten to work with this version.
|
||||
|
||||
The loose parser and walker have been moved into separate packages (`acorn-loose` and `acorn-walk`).
|
||||
|
||||
## 5.7.3 (2018-09-10)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix failure to tokenize regexps after expressions like `x.of`.
|
||||
|
||||
Better error message for unterminated template literals.
|
||||
|
||||
## 5.7.2 (2018-08-24)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Properly handle `allowAwaitOutsideFunction` in for statements.
|
||||
|
||||
Treat function declarations at the top level of modules like let bindings.
|
||||
|
||||
Don't allow async function declarations as the only statement under a label.
|
||||
|
||||
## 5.7.0 (2018-06-15)
|
||||
|
||||
### New features
|
||||
|
||||
Upgraded to Unicode 11.
|
||||
|
||||
## 5.6.0 (2018-05-31)
|
||||
|
||||
### New features
|
||||
|
||||
Allow U+2028 and U+2029 in string when ECMAVersion >= 10.
|
||||
|
||||
Allow binding-less catch statements when ECMAVersion >= 10.
|
||||
|
||||
Add `allowAwaitOutsideFunction` option for parsing top-level `await`.
|
||||
|
||||
## 5.5.3 (2018-03-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A _second_ republish of the code in 5.5.1, this time with yarn, to hopefully get valid timestamps.
|
||||
|
||||
## 5.5.2 (2018-03-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A republish of the code in 5.5.1 in an attempt to solve an issue with the file timestamps in the npm package being 0.
|
||||
|
||||
## 5.5.1 (2018-03-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix misleading error message for octal escapes in template strings.
|
||||
|
||||
## 5.5.0 (2018-02-27)
|
||||
|
||||
### New features
|
||||
|
||||
The identifier character categorization is now based on Unicode version 10.
|
||||
|
||||
Acorn will now validate the content of regular expressions, including new ES9 features.
|
||||
|
||||
## 5.4.0 (2018-02-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow duplicate or escaped flags on regular expressions.
|
||||
|
||||
Disallow octal escapes in strings in strict mode.
|
||||
|
||||
### New features
|
||||
|
||||
Add support for async iteration.
|
||||
|
||||
Add support for object spread and rest.
|
||||
|
||||
## 5.3.0 (2017-12-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix parsing of floating point literals with leading zeroes in loose mode.
|
||||
|
||||
Allow duplicate property names in object patterns.
|
||||
|
||||
Don't allow static class methods named `prototype`.
|
||||
|
||||
Disallow async functions directly under `if` or `else`.
|
||||
|
||||
Parse right-hand-side of `for`/`of` as an assignment expression.
|
||||
|
||||
Stricter parsing of `for`/`in`.
|
||||
|
||||
Don't allow unicode escapes in contextual keywords.
|
||||
|
||||
### New features
|
||||
|
||||
Parsing class members was factored into smaller methods to allow plugins to hook into it.
|
||||
|
||||
## 5.2.1 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix a token context corruption bug.
|
||||
|
||||
## 5.2.0 (2017-10-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix token context tracking for `class` and `function` in property-name position.
|
||||
|
||||
Make sure `%*` isn't parsed as a valid operator.
|
||||
|
||||
Allow shorthand properties `get` and `set` to be followed by default values.
|
||||
|
||||
Disallow `super` when not in callee or object position.
|
||||
|
||||
### New features
|
||||
|
||||
Support [`directive` property](https://github.com/estree/estree/compare/b3de58c9997504d6fba04b72f76e6dd1619ee4eb...1da8e603237144f44710360f8feb7a9977e905e0) on directive expression statements.
|
||||
|
||||
## 5.1.2 (2017-09-04)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disable parsing of legacy HTML-style comments in modules.
|
||||
|
||||
Fix parsing of async methods whose names are keywords.
|
||||
|
||||
## 5.1.1 (2017-07-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix problem with disambiguating regexp and division after a class.
|
||||
|
||||
## 5.1.0 (2017-07-05)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix tokenizing of regexps in an object-desctructuring `for`/`of` loop and after `yield`.
|
||||
|
||||
Parse zero-prefixed numbers with non-octal digits as decimal.
|
||||
|
||||
Allow object/array patterns in rest parameters.
|
||||
|
||||
Don't error when `yield` is used as a property name.
|
||||
|
||||
Allow `async` as a shorthand object property.
|
||||
|
||||
### New features
|
||||
|
||||
Implement the [template literal revision proposal](https://github.com/tc39/proposal-template-literal-revision) for ES9.
|
||||
|
||||
## 5.0.3 (2017-04-01)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix spurious duplicate variable definition errors for named functions.
|
||||
|
||||
## 5.0.2 (2017-03-30)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
A binary operator after a parenthesized arrow expression is no longer incorrectly treated as an error.
|
||||
|
||||
## 5.0.0 (2017-03-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Raise an error for duplicated lexical bindings.
|
||||
|
||||
Fix spurious error when an assignement expression occurred after a spread expression.
|
||||
|
||||
Accept regular expressions after `of` (in `for`/`of`), `yield` (in a generator), and braced arrow functions.
|
||||
|
||||
Allow labels in front or `var` declarations, even in strict mode.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
Parse declarations following `export default` as declaration nodes, not expressions. This means that class and function declarations nodes can now have `null` as their `id`.
|
||||
|
||||
## 4.0.11 (2017-02-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Allow all forms of member expressions to be parenthesized as lvalue.
|
||||
|
||||
## 4.0.10 (2017-02-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Don't expect semicolons after default-exported functions or classes, even when they are expressions.
|
||||
|
||||
Check for use of `'use strict'` directives in non-simple parameter functions, even when already in strict mode.
|
||||
|
||||
## 4.0.9 (2017-02-06)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix incorrect error raised for parenthesized simple assignment targets, so that `(x) = 1` parses again.
|
||||
|
||||
## 4.0.8 (2017-02-03)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Solve spurious parenthesized pattern errors by temporarily erring on the side of accepting programs that our delayed errors don't handle correctly yet.
|
||||
|
||||
## 4.0.7 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Accept invalidly rejected code like `(x).y = 2` again.
|
||||
|
||||
Don't raise an error when a function _inside_ strict code has a non-simple parameter list.
|
||||
|
||||
## 4.0.6 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix exponential behavior (manifesting itself as a complete hang for even relatively small source files) introduced by the new 'use strict' check.
|
||||
|
||||
## 4.0.5 (2017-02-02)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Disallow parenthesized pattern expressions.
|
||||
|
||||
Allow keywords as export names.
|
||||
|
||||
Don't allow the `async` keyword to be parenthesized.
|
||||
|
||||
Properly raise an error when a keyword contains a character escape.
|
||||
|
||||
Allow `"use strict"` to appear after other string literal expressions.
|
||||
|
||||
Disallow labeled declarations.
|
||||
|
||||
## 4.0.4 (2016-12-19)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix crash when `export` was followed by a keyword that can't be
|
||||
exported.
|
||||
|
||||
## 4.0.3 (2016-08-16)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Allow regular function declarations inside single-statement `if` branches in loose mode. Forbid them entirely in strict mode.
|
||||
|
||||
Properly parse properties named `async` in ES2017 mode.
|
||||
|
||||
Fix bug where reserved words were broken in ES2017 mode.
|
||||
|
||||
## 4.0.2 (2016-08-11)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Don't ignore period or 'e' characters after octal numbers.
|
||||
|
||||
Fix broken parsing for call expressions in default parameter values of arrow functions.
|
||||
|
||||
## 4.0.1 (2016-08-08)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix false positives in duplicated export name errors.
|
||||
|
||||
## 4.0.0 (2016-08-07)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default `ecmaVersion` option value is now 7.
|
||||
|
||||
A number of internal method signatures changed, so plugins might need to be updated.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
The parser now raises errors on duplicated export names.
|
||||
|
||||
`arguments` and `eval` can now be used in shorthand properties.
|
||||
|
||||
Duplicate parameter names in non-simple argument lists now always produce an error.
|
||||
|
||||
### New features
|
||||
|
||||
The `ecmaVersion` option now also accepts year-style version numbers
|
||||
(2015, etc).
|
||||
|
||||
Support for `async`/`await` syntax when `ecmaVersion` is >= 8.
|
||||
|
||||
Support for trailing commas in call expressions when `ecmaVersion` is >= 8.
|
||||
|
||||
## 3.3.0 (2016-07-25)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Fix bug in tokenizing of regexp operator after a function declaration.
|
||||
|
||||
Fix parser crash when parsing an array pattern with a hole.
|
||||
|
||||
### New features
|
||||
|
||||
Implement check against complex argument lists in functions that enable strict mode in ES7.
|
||||
|
||||
## 3.2.0 (2016-06-07)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Improve handling of lack of unicode regexp support in host
|
||||
environment.
|
||||
|
||||
Properly reject shorthand properties whose name is a keyword.
|
||||
|
||||
### New features
|
||||
|
||||
Visitors created with `visit.make` now have their base as _prototype_, rather than copying properties into a fresh object.
|
||||
|
||||
## 3.1.0 (2016-04-18)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
Properly tokenize the division operator directly after a function expression.
|
||||
|
||||
Allow trailing comma in destructuring arrays.
|
||||
|
||||
## 3.0.4 (2016-02-25)
|
||||
|
||||
### Fixes
|
||||
|
||||
Allow update expressions as left-hand-side of the ES7 exponential operator.
|
||||
|
||||
## 3.0.2 (2016-02-10)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix bug that accidentally made `undefined` a reserved word when parsing ES7.
|
||||
|
||||
## 3.0.0 (2016-02-10)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
The default value of the `ecmaVersion` option is now 6 (used to be 5).
|
||||
|
||||
Support for comprehension syntax (which was dropped from the draft spec) has been removed.
|
||||
|
||||
### Fixes
|
||||
|
||||
`let` and `yield` are now “contextual keywords”, meaning you can mostly use them as identifiers in ES5 non-strict code.
|
||||
|
||||
A parenthesized class or function expression after `export default` is now parsed correctly.
|
||||
|
||||
### New features
|
||||
|
||||
When `ecmaVersion` is set to 7, Acorn will parse the exponentiation operator (`**`).
|
||||
|
||||
The identifier character ranges are now based on Unicode 8.0.0.
|
||||
|
||||
Plugins can now override the `raiseRecoverable` method to override the way non-critical errors are handled.
|
||||
|
||||
## 2.7.0 (2016-01-04)
|
||||
|
||||
### Fixes
|
||||
|
||||
Stop allowing rest parameters in setters.
|
||||
|
||||
Disallow `y` rexexp flag in ES5.
|
||||
|
||||
Disallow `\00` and `\000` escapes in strict mode.
|
||||
|
||||
Raise an error when an import name is a reserved word.
|
||||
|
||||
## 2.6.2 (2015-11-10)
|
||||
|
||||
### Fixes
|
||||
|
||||
Don't crash when no options object is passed.
|
||||
|
||||
## 2.6.0 (2015-11-09)
|
||||
|
||||
### Fixes
|
||||
|
||||
Add `await` as a reserved word in module sources.
|
||||
|
||||
Disallow `yield` in a parameter default value for a generator.
|
||||
|
||||
Forbid using a comma after a rest pattern in an array destructuring.
|
||||
|
||||
### New features
|
||||
|
||||
Support parsing stdin in command-line tool.
|
||||
|
||||
## 2.5.0 (2015-10-27)
|
||||
|
||||
### Fixes
|
||||
|
||||
Fix tokenizer support in the command-line tool.
|
||||
|
||||
Stop allowing `new.target` outside of functions.
|
||||
|
||||
Remove legacy `guard` and `guardedHandler` properties from try nodes.
|
||||
|
||||
Stop allowing multiple `__proto__` properties on an object literal in strict mode.
|
||||
|
||||
Don't allow rest parameters to be non-identifier patterns.
|
||||
|
||||
Check for duplicate paramter names in arrow functions.
|
||||
21
node_modules/acorn/LICENSE
generated
vendored
Normal file
21
node_modules/acorn/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (C) 2012-2022 by various contributors (see AUTHORS)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
282
node_modules/acorn/README.md
generated
vendored
Normal file
282
node_modules/acorn/README.md
generated
vendored
Normal file
@ -0,0 +1,282 @@
|
||||
# Acorn
|
||||
|
||||
A tiny, fast JavaScript parser written in JavaScript.
|
||||
|
||||
## Community
|
||||
|
||||
Acorn is open source software released under an
|
||||
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn/LICENSE).
|
||||
|
||||
You are welcome to
|
||||
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
|
||||
requests on [github](https://github.com/acornjs/acorn).
|
||||
|
||||
## Installation
|
||||
|
||||
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
npm install acorn
|
||||
```
|
||||
|
||||
Alternately, you can download the source and build acorn yourself:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/acornjs/acorn.git
|
||||
cd acorn
|
||||
npm install
|
||||
```
|
||||
|
||||
## Interface
|
||||
|
||||
**parse**`(input, options)` is the main interface to the library. The
|
||||
`input` parameter is a string, `options` must be an object setting
|
||||
some of the options listed below. The return value will be an abstract
|
||||
syntax tree object as specified by the [ESTree
|
||||
spec](https://github.com/estree/estree).
|
||||
|
||||
```javascript
|
||||
let acorn = require("acorn");
|
||||
console.log(acorn.parse("1 + 1", {ecmaVersion: 2020}));
|
||||
```
|
||||
|
||||
When encountering a syntax error, the parser will raise a
|
||||
`SyntaxError` object with a meaningful message. The error object will
|
||||
have a `pos` property that indicates the string offset at which the
|
||||
error occurred, and a `loc` object that contains a `{line, column}`
|
||||
object referring to that same position.
|
||||
|
||||
Options are provided by in a second argument, which should be an
|
||||
object containing any of these fields (only `ecmaVersion` is
|
||||
required):
|
||||
|
||||
- **ecmaVersion**: Indicates the ECMAScript version to parse. Can be a
|
||||
number, either in year (`2022`) or plain version number (`6`) form,
|
||||
or `"latest"` (the latest the library supports). This influences
|
||||
support for strict mode, the set of reserved words, and support for
|
||||
new syntax features.
|
||||
|
||||
**NOTE**: Only 'stage 4' (finalized) ECMAScript features are being
|
||||
implemented by Acorn. Other proposed new features must be
|
||||
implemented through plugins.
|
||||
|
||||
- **sourceType**: Indicate the mode the code should be parsed in. Can be
|
||||
either `"script"` or `"module"`. This influences global strict mode
|
||||
and parsing of `import` and `export` declarations.
|
||||
|
||||
**NOTE**: If set to `"module"`, then static `import` / `export` syntax
|
||||
will be valid, even if `ecmaVersion` is less than 6.
|
||||
|
||||
- **onInsertedSemicolon**: If given a callback, that callback will be
|
||||
called whenever a missing semicolon is inserted by the parser. The
|
||||
callback will be given the character offset of the point where the
|
||||
semicolon is inserted as argument, and if `locations` is on, also a
|
||||
`{line, column}` object representing this position.
|
||||
|
||||
- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing
|
||||
commas.
|
||||
|
||||
- **allowReserved**: If `false`, using a reserved word will generate
|
||||
an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher
|
||||
versions. When given the value `"never"`, reserved words and
|
||||
keywords can also not be used as property names (as in Internet
|
||||
Explorer's old parser).
|
||||
|
||||
- **allowReturnOutsideFunction**: By default, a return statement at
|
||||
the top level raises an error. Set this to `true` to accept such
|
||||
code.
|
||||
|
||||
- **allowImportExportEverywhere**: By default, `import` and `export`
|
||||
declarations can only appear at a program's top level. Setting this
|
||||
option to `true` allows them anywhere where a statement is allowed,
|
||||
and also allows `import.meta` expressions to appear in scripts
|
||||
(when `sourceType` is not `"module"`).
|
||||
|
||||
- **allowAwaitOutsideFunction**: If `false`, `await` expressions can
|
||||
only appear inside `async` functions. Defaults to `true` in modules
|
||||
for `ecmaVersion` 2022 and later, `false` for lower versions.
|
||||
Setting this option to `true` allows to have top-level `await`
|
||||
expressions. They are still not allowed in non-`async` functions,
|
||||
though.
|
||||
|
||||
- **allowSuperOutsideMethod**: By default, `super` outside a method
|
||||
raises an error. Set this to `true` to accept such code.
|
||||
|
||||
- **allowHashBang**: When this is enabled, if the code starts with the
|
||||
characters `#!` (as in a shellscript), the first line will be
|
||||
treated as a comment. Defaults to true when `ecmaVersion` >= 2023.
|
||||
|
||||
- **checkPrivateFields**: By default, the parser will verify that
|
||||
private properties are only used in places where they are valid and
|
||||
have been declared. Set this to false to turn such checks off.
|
||||
|
||||
- **locations**: When `true`, each node has a `loc` object attached
|
||||
with `start` and `end` subobjects, each of which contains the
|
||||
one-based line and zero-based column numbers in `{line, column}`
|
||||
form. Default is `false`.
|
||||
|
||||
- **onToken**: If a function is passed for this option, each found
|
||||
token will be passed in same format as tokens returned from
|
||||
`tokenizer().getToken()`.
|
||||
|
||||
If array is passed, each found token is pushed to it.
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **onComment**: If a function is passed for this option, whenever a
|
||||
comment is encountered the function will be called with the
|
||||
following parameters:
|
||||
|
||||
- `block`: `true` if the comment is a block comment, false if it
|
||||
is a line comment.
|
||||
- `text`: The content of the comment.
|
||||
- `start`: Character offset of the start of the comment.
|
||||
- `end`: Character offset of the end of the comment.
|
||||
|
||||
When the `locations` options is on, the `{line, column}` locations
|
||||
of the comment’s start and end are passed as two additional
|
||||
parameters.
|
||||
|
||||
If array is passed for this option, each found comment is pushed
|
||||
to it as object in Esprima format:
|
||||
|
||||
```javascript
|
||||
{
|
||||
"type": "Line" | "Block",
|
||||
"value": "comment text",
|
||||
"start": Number,
|
||||
"end": Number,
|
||||
// If `locations` option is on:
|
||||
"loc": {
|
||||
"start": {line: Number, column: Number}
|
||||
"end": {line: Number, column: Number}
|
||||
},
|
||||
// If `ranges` option is on:
|
||||
"range": [Number, Number]
|
||||
}
|
||||
```
|
||||
|
||||
Note that you are not allowed to call the parser from the
|
||||
callback—that will corrupt its internal state.
|
||||
|
||||
- **ranges**: Nodes have their start and end characters offsets
|
||||
recorded in `start` and `end` properties (directly on the node,
|
||||
rather than the `loc` object, which holds line/column data. To also
|
||||
add a
|
||||
[semi-standardized](https://bugzilla.mozilla.org/show_bug.cgi?id=745678)
|
||||
`range` property holding a `[start, end]` array with the same
|
||||
numbers, set the `ranges` option to `true`.
|
||||
|
||||
- **program**: It is possible to parse multiple files into a single
|
||||
AST by passing the tree produced by parsing the first file as the
|
||||
`program` option in subsequent parses. This will add the toplevel
|
||||
forms of the parsed file to the "Program" (top) node of an existing
|
||||
parse tree.
|
||||
|
||||
- **sourceFile**: When the `locations` option is `true`, you can pass
|
||||
this option to add a `source` attribute in every node’s `loc`
|
||||
object. Note that the contents of this option are not examined or
|
||||
processed in any way; you are free to use whatever format you
|
||||
choose.
|
||||
|
||||
- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property
|
||||
will be added (regardless of the `location` option) directly to the
|
||||
nodes, rather than the `loc` object.
|
||||
|
||||
- **preserveParens**: If this option is `true`, parenthesized expressions
|
||||
are represented by (non-standard) `ParenthesizedExpression` nodes
|
||||
that have a single `expression` property containing the expression
|
||||
inside parentheses.
|
||||
|
||||
**parseExpressionAt**`(input, offset, options)` will parse a single
|
||||
expression in a string, and return its AST. It will not complain if
|
||||
there is more of the string left after the expression.
|
||||
|
||||
**tokenizer**`(input, options)` returns an object with a `getToken`
|
||||
method that can be called repeatedly to get the next token, a `{start,
|
||||
end, type, value}` object (with added `loc` property when the
|
||||
`locations` option is enabled and `range` property when the `ranges`
|
||||
option is enabled). When the token's type is `tokTypes.eof`, you
|
||||
should stop calling the method, since it will keep returning that same
|
||||
token forever.
|
||||
|
||||
Note that tokenizing JavaScript without parsing it is, in modern
|
||||
versions of the language, not really possible due to the way syntax is
|
||||
overloaded in ways that can only be disambiguated by the parse
|
||||
context. This package applies a bunch of heuristics to try and do a
|
||||
reasonable job, but you are advised to use `parse` with the `onToken`
|
||||
option instead of this.
|
||||
|
||||
In ES6 environment, returned result can be used as any other
|
||||
protocol-compliant iterable:
|
||||
|
||||
```javascript
|
||||
for (let token of acorn.tokenizer(str)) {
|
||||
// iterate over the tokens
|
||||
}
|
||||
|
||||
// transform code to array of tokens:
|
||||
var tokens = [...acorn.tokenizer(str)];
|
||||
```
|
||||
|
||||
**tokTypes** holds an object mapping names to the token type objects
|
||||
that end up in the `type` properties of tokens.
|
||||
|
||||
**getLineInfo**`(input, offset)` can be used to get a `{line,
|
||||
column}` object for a given program string and offset.
|
||||
|
||||
### The `Parser` class
|
||||
|
||||
Instances of the **`Parser`** class contain all the state and logic
|
||||
that drives a parse. It has static methods `parse`,
|
||||
`parseExpressionAt`, and `tokenizer` that match the top-level
|
||||
functions by the same name.
|
||||
|
||||
When extending the parser with plugins, you need to call these methods
|
||||
on the extended version of the class. To extend a parser with plugins,
|
||||
you can use its static `extend` method.
|
||||
|
||||
```javascript
|
||||
var acorn = require("acorn");
|
||||
var jsx = require("acorn-jsx");
|
||||
var JSXParser = acorn.Parser.extend(jsx());
|
||||
JSXParser.parse("foo(<bar/>)", {ecmaVersion: 2020});
|
||||
```
|
||||
|
||||
The `extend` method takes any number of plugin values, and returns a
|
||||
new `Parser` class that includes the extra parser logic provided by
|
||||
the plugins.
|
||||
|
||||
## Command line interface
|
||||
|
||||
The `bin/acorn` utility can be used to parse a file from the command
|
||||
line. It accepts as arguments its input file and the following
|
||||
options:
|
||||
|
||||
- `--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|--ecma10`: Sets the ECMAScript version
|
||||
to parse. Default is version 9.
|
||||
|
||||
- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise.
|
||||
|
||||
- `--locations`: Attaches a "loc" object to each node with "start" and
|
||||
"end" subobjects, each of which contains the one-based line and
|
||||
zero-based column numbers in `{line, column}` form.
|
||||
|
||||
- `--allow-hash-bang`: If the code starts with the characters #! (as
|
||||
in a shellscript), the first line will be treated as a comment.
|
||||
|
||||
- `--allow-await-outside-function`: Allows top-level `await` expressions.
|
||||
See the `allowAwaitOutsideFunction` option for more information.
|
||||
|
||||
- `--compact`: No whitespace is used in the AST output.
|
||||
|
||||
- `--silent`: Do not output the AST, just return the exit status.
|
||||
|
||||
- `--help`: Print the usage information and quit.
|
||||
|
||||
The utility spits out the syntax tree as JSON data.
|
||||
|
||||
## Existing plugins
|
||||
|
||||
- [`acorn-jsx`](https://github.com/RReverser/acorn-jsx): Parse [Facebook JSX syntax extensions](https://github.com/facebook/jsx)
|
||||
4
node_modules/acorn/bin/acorn
generated
vendored
Executable file
4
node_modules/acorn/bin/acorn
generated
vendored
Executable file
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict"
|
||||
|
||||
require("../dist/bin.js")
|
||||
866
node_modules/acorn/dist/acorn.d.mts
generated
vendored
Normal file
866
node_modules/acorn/dist/acorn.d.mts
generated
vendored
Normal file
@ -0,0 +1,866 @@
|
||||
export interface Node {
|
||||
start: number
|
||||
end: number
|
||||
type: string
|
||||
range?: [number, number]
|
||||
loc?: SourceLocation | null
|
||||
}
|
||||
|
||||
export interface SourceLocation {
|
||||
source?: string | null
|
||||
start: Position
|
||||
end: Position
|
||||
}
|
||||
|
||||
export interface Position {
|
||||
/** 1-based */
|
||||
line: number
|
||||
/** 0-based */
|
||||
column: number
|
||||
}
|
||||
|
||||
export interface Identifier extends Node {
|
||||
type: "Identifier"
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface Literal extends Node {
|
||||
type: "Literal"
|
||||
value?: string | boolean | null | number | RegExp | bigint
|
||||
raw?: string
|
||||
regex?: {
|
||||
pattern: string
|
||||
flags: string
|
||||
}
|
||||
bigint?: string
|
||||
}
|
||||
|
||||
export interface Program extends Node {
|
||||
type: "Program"
|
||||
body: Array<Statement | ModuleDeclaration>
|
||||
sourceType: "script" | "module"
|
||||
}
|
||||
|
||||
export interface Function extends Node {
|
||||
id?: Identifier | null
|
||||
params: Array<Pattern>
|
||||
body: BlockStatement | Expression
|
||||
generator: boolean
|
||||
expression: boolean
|
||||
async: boolean
|
||||
}
|
||||
|
||||
export interface ExpressionStatement extends Node {
|
||||
type: "ExpressionStatement"
|
||||
expression: Expression | Literal
|
||||
directive?: string
|
||||
}
|
||||
|
||||
export interface BlockStatement extends Node {
|
||||
type: "BlockStatement"
|
||||
body: Array<Statement>
|
||||
}
|
||||
|
||||
export interface EmptyStatement extends Node {
|
||||
type: "EmptyStatement"
|
||||
}
|
||||
|
||||
export interface DebuggerStatement extends Node {
|
||||
type: "DebuggerStatement"
|
||||
}
|
||||
|
||||
export interface WithStatement extends Node {
|
||||
type: "WithStatement"
|
||||
object: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface ReturnStatement extends Node {
|
||||
type: "ReturnStatement"
|
||||
argument?: Expression | null
|
||||
}
|
||||
|
||||
export interface LabeledStatement extends Node {
|
||||
type: "LabeledStatement"
|
||||
label: Identifier
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface BreakStatement extends Node {
|
||||
type: "BreakStatement"
|
||||
label?: Identifier | null
|
||||
}
|
||||
|
||||
export interface ContinueStatement extends Node {
|
||||
type: "ContinueStatement"
|
||||
label?: Identifier | null
|
||||
}
|
||||
|
||||
export interface IfStatement extends Node {
|
||||
type: "IfStatement"
|
||||
test: Expression
|
||||
consequent: Statement
|
||||
alternate?: Statement | null
|
||||
}
|
||||
|
||||
export interface SwitchStatement extends Node {
|
||||
type: "SwitchStatement"
|
||||
discriminant: Expression
|
||||
cases: Array<SwitchCase>
|
||||
}
|
||||
|
||||
export interface SwitchCase extends Node {
|
||||
type: "SwitchCase"
|
||||
test?: Expression | null
|
||||
consequent: Array<Statement>
|
||||
}
|
||||
|
||||
export interface ThrowStatement extends Node {
|
||||
type: "ThrowStatement"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface TryStatement extends Node {
|
||||
type: "TryStatement"
|
||||
block: BlockStatement
|
||||
handler?: CatchClause | null
|
||||
finalizer?: BlockStatement | null
|
||||
}
|
||||
|
||||
export interface CatchClause extends Node {
|
||||
type: "CatchClause"
|
||||
param?: Pattern | null
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface WhileStatement extends Node {
|
||||
type: "WhileStatement"
|
||||
test: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface DoWhileStatement extends Node {
|
||||
type: "DoWhileStatement"
|
||||
body: Statement
|
||||
test: Expression
|
||||
}
|
||||
|
||||
export interface ForStatement extends Node {
|
||||
type: "ForStatement"
|
||||
init?: VariableDeclaration | Expression | null
|
||||
test?: Expression | null
|
||||
update?: Expression | null
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface ForInStatement extends Node {
|
||||
type: "ForInStatement"
|
||||
left: VariableDeclaration | Pattern
|
||||
right: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface FunctionDeclaration extends Function {
|
||||
type: "FunctionDeclaration"
|
||||
id: Identifier
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface VariableDeclaration extends Node {
|
||||
type: "VariableDeclaration"
|
||||
declarations: Array<VariableDeclarator>
|
||||
kind: "var" | "let" | "const"
|
||||
}
|
||||
|
||||
export interface VariableDeclarator extends Node {
|
||||
type: "VariableDeclarator"
|
||||
id: Pattern
|
||||
init?: Expression | null
|
||||
}
|
||||
|
||||
export interface ThisExpression extends Node {
|
||||
type: "ThisExpression"
|
||||
}
|
||||
|
||||
export interface ArrayExpression extends Node {
|
||||
type: "ArrayExpression"
|
||||
elements: Array<Expression | SpreadElement | null>
|
||||
}
|
||||
|
||||
export interface ObjectExpression extends Node {
|
||||
type: "ObjectExpression"
|
||||
properties: Array<Property | SpreadElement>
|
||||
}
|
||||
|
||||
export interface Property extends Node {
|
||||
type: "Property"
|
||||
key: Expression
|
||||
value: Expression
|
||||
kind: "init" | "get" | "set"
|
||||
method: boolean
|
||||
shorthand: boolean
|
||||
computed: boolean
|
||||
}
|
||||
|
||||
export interface FunctionExpression extends Function {
|
||||
type: "FunctionExpression"
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface UnaryExpression extends Node {
|
||||
type: "UnaryExpression"
|
||||
operator: UnaryOperator
|
||||
prefix: boolean
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete"
|
||||
|
||||
export interface UpdateExpression extends Node {
|
||||
type: "UpdateExpression"
|
||||
operator: UpdateOperator
|
||||
argument: Expression
|
||||
prefix: boolean
|
||||
}
|
||||
|
||||
export type UpdateOperator = "++" | "--"
|
||||
|
||||
export interface BinaryExpression extends Node {
|
||||
type: "BinaryExpression"
|
||||
operator: BinaryOperator
|
||||
left: Expression | PrivateIdentifier
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type BinaryOperator = "==" | "!=" | "===" | "!==" | "<" | "<=" | ">" | ">=" | "<<" | ">>" | ">>>" | "+" | "-" | "*" | "/" | "%" | "|" | "^" | "&" | "in" | "instanceof" | "**"
|
||||
|
||||
export interface AssignmentExpression extends Node {
|
||||
type: "AssignmentExpression"
|
||||
operator: AssignmentOperator
|
||||
left: Pattern
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type AssignmentOperator = "=" | "+=" | "-=" | "*=" | "/=" | "%=" | "<<=" | ">>=" | ">>>=" | "|=" | "^=" | "&=" | "**=" | "||=" | "&&=" | "??="
|
||||
|
||||
export interface LogicalExpression extends Node {
|
||||
type: "LogicalExpression"
|
||||
operator: LogicalOperator
|
||||
left: Expression
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type LogicalOperator = "||" | "&&" | "??"
|
||||
|
||||
export interface MemberExpression extends Node {
|
||||
type: "MemberExpression"
|
||||
object: Expression | Super
|
||||
property: Expression | PrivateIdentifier
|
||||
computed: boolean
|
||||
optional: boolean
|
||||
}
|
||||
|
||||
export interface ConditionalExpression extends Node {
|
||||
type: "ConditionalExpression"
|
||||
test: Expression
|
||||
alternate: Expression
|
||||
consequent: Expression
|
||||
}
|
||||
|
||||
export interface CallExpression extends Node {
|
||||
type: "CallExpression"
|
||||
callee: Expression | Super
|
||||
arguments: Array<Expression | SpreadElement>
|
||||
optional: boolean
|
||||
}
|
||||
|
||||
export interface NewExpression extends Node {
|
||||
type: "NewExpression"
|
||||
callee: Expression
|
||||
arguments: Array<Expression | SpreadElement>
|
||||
}
|
||||
|
||||
export interface SequenceExpression extends Node {
|
||||
type: "SequenceExpression"
|
||||
expressions: Array<Expression>
|
||||
}
|
||||
|
||||
export interface ForOfStatement extends Node {
|
||||
type: "ForOfStatement"
|
||||
left: VariableDeclaration | Pattern
|
||||
right: Expression
|
||||
body: Statement
|
||||
await: boolean
|
||||
}
|
||||
|
||||
export interface Super extends Node {
|
||||
type: "Super"
|
||||
}
|
||||
|
||||
export interface SpreadElement extends Node {
|
||||
type: "SpreadElement"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface ArrowFunctionExpression extends Function {
|
||||
type: "ArrowFunctionExpression"
|
||||
}
|
||||
|
||||
export interface YieldExpression extends Node {
|
||||
type: "YieldExpression"
|
||||
argument?: Expression | null
|
||||
delegate: boolean
|
||||
}
|
||||
|
||||
export interface TemplateLiteral extends Node {
|
||||
type: "TemplateLiteral"
|
||||
quasis: Array<TemplateElement>
|
||||
expressions: Array<Expression>
|
||||
}
|
||||
|
||||
export interface TaggedTemplateExpression extends Node {
|
||||
type: "TaggedTemplateExpression"
|
||||
tag: Expression
|
||||
quasi: TemplateLiteral
|
||||
}
|
||||
|
||||
export interface TemplateElement extends Node {
|
||||
type: "TemplateElement"
|
||||
tail: boolean
|
||||
value: {
|
||||
cooked?: string | null
|
||||
raw: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface AssignmentProperty extends Node {
|
||||
type: "Property"
|
||||
key: Expression
|
||||
value: Pattern
|
||||
kind: "init"
|
||||
method: false
|
||||
shorthand: boolean
|
||||
computed: boolean
|
||||
}
|
||||
|
||||
export interface ObjectPattern extends Node {
|
||||
type: "ObjectPattern"
|
||||
properties: Array<AssignmentProperty | RestElement>
|
||||
}
|
||||
|
||||
export interface ArrayPattern extends Node {
|
||||
type: "ArrayPattern"
|
||||
elements: Array<Pattern | null>
|
||||
}
|
||||
|
||||
export interface RestElement extends Node {
|
||||
type: "RestElement"
|
||||
argument: Pattern
|
||||
}
|
||||
|
||||
export interface AssignmentPattern extends Node {
|
||||
type: "AssignmentPattern"
|
||||
left: Pattern
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export interface Class extends Node {
|
||||
id?: Identifier | null
|
||||
superClass?: Expression | null
|
||||
body: ClassBody
|
||||
}
|
||||
|
||||
export interface ClassBody extends Node {
|
||||
type: "ClassBody"
|
||||
body: Array<MethodDefinition | PropertyDefinition | StaticBlock>
|
||||
}
|
||||
|
||||
export interface MethodDefinition extends Node {
|
||||
type: "MethodDefinition"
|
||||
key: Expression | PrivateIdentifier
|
||||
value: FunctionExpression
|
||||
kind: "constructor" | "method" | "get" | "set"
|
||||
computed: boolean
|
||||
static: boolean
|
||||
}
|
||||
|
||||
export interface ClassDeclaration extends Class {
|
||||
type: "ClassDeclaration"
|
||||
id: Identifier
|
||||
}
|
||||
|
||||
export interface ClassExpression extends Class {
|
||||
type: "ClassExpression"
|
||||
}
|
||||
|
||||
export interface MetaProperty extends Node {
|
||||
type: "MetaProperty"
|
||||
meta: Identifier
|
||||
property: Identifier
|
||||
}
|
||||
|
||||
export interface ImportDeclaration extends Node {
|
||||
type: "ImportDeclaration"
|
||||
specifiers: Array<ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier>
|
||||
source: Literal
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface ImportSpecifier extends Node {
|
||||
type: "ImportSpecifier"
|
||||
imported: Identifier | Literal
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportDefaultSpecifier extends Node {
|
||||
type: "ImportDefaultSpecifier"
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportNamespaceSpecifier extends Node {
|
||||
type: "ImportNamespaceSpecifier"
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportAttribute extends Node {
|
||||
type: "ImportAttribute"
|
||||
key: Identifier | Literal
|
||||
value: Literal
|
||||
}
|
||||
|
||||
export interface ExportNamedDeclaration extends Node {
|
||||
type: "ExportNamedDeclaration"
|
||||
declaration?: Declaration | null
|
||||
specifiers: Array<ExportSpecifier>
|
||||
source?: Literal | null
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface ExportSpecifier extends Node {
|
||||
type: "ExportSpecifier"
|
||||
exported: Identifier | Literal
|
||||
local: Identifier | Literal
|
||||
}
|
||||
|
||||
export interface AnonymousFunctionDeclaration extends Function {
|
||||
type: "FunctionDeclaration"
|
||||
id: null
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface AnonymousClassDeclaration extends Class {
|
||||
type: "ClassDeclaration"
|
||||
id: null
|
||||
}
|
||||
|
||||
export interface ExportDefaultDeclaration extends Node {
|
||||
type: "ExportDefaultDeclaration"
|
||||
declaration: AnonymousFunctionDeclaration | FunctionDeclaration | AnonymousClassDeclaration | ClassDeclaration | Expression
|
||||
}
|
||||
|
||||
export interface ExportAllDeclaration extends Node {
|
||||
type: "ExportAllDeclaration"
|
||||
source: Literal
|
||||
exported?: Identifier | Literal | null
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface AwaitExpression extends Node {
|
||||
type: "AwaitExpression"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface ChainExpression extends Node {
|
||||
type: "ChainExpression"
|
||||
expression: MemberExpression | CallExpression
|
||||
}
|
||||
|
||||
export interface ImportExpression extends Node {
|
||||
type: "ImportExpression"
|
||||
source: Expression
|
||||
options: Expression | null
|
||||
}
|
||||
|
||||
export interface ParenthesizedExpression extends Node {
|
||||
type: "ParenthesizedExpression"
|
||||
expression: Expression
|
||||
}
|
||||
|
||||
export interface PropertyDefinition extends Node {
|
||||
type: "PropertyDefinition"
|
||||
key: Expression | PrivateIdentifier
|
||||
value?: Expression | null
|
||||
computed: boolean
|
||||
static: boolean
|
||||
}
|
||||
|
||||
export interface PrivateIdentifier extends Node {
|
||||
type: "PrivateIdentifier"
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface StaticBlock extends Node {
|
||||
type: "StaticBlock"
|
||||
body: Array<Statement>
|
||||
}
|
||||
|
||||
export type Statement =
|
||||
| ExpressionStatement
|
||||
| BlockStatement
|
||||
| EmptyStatement
|
||||
| DebuggerStatement
|
||||
| WithStatement
|
||||
| ReturnStatement
|
||||
| LabeledStatement
|
||||
| BreakStatement
|
||||
| ContinueStatement
|
||||
| IfStatement
|
||||
| SwitchStatement
|
||||
| ThrowStatement
|
||||
| TryStatement
|
||||
| WhileStatement
|
||||
| DoWhileStatement
|
||||
| ForStatement
|
||||
| ForInStatement
|
||||
| ForOfStatement
|
||||
| Declaration
|
||||
|
||||
export type Declaration =
|
||||
| FunctionDeclaration
|
||||
| VariableDeclaration
|
||||
| ClassDeclaration
|
||||
|
||||
export type Expression =
|
||||
| Identifier
|
||||
| Literal
|
||||
| ThisExpression
|
||||
| ArrayExpression
|
||||
| ObjectExpression
|
||||
| FunctionExpression
|
||||
| UnaryExpression
|
||||
| UpdateExpression
|
||||
| BinaryExpression
|
||||
| AssignmentExpression
|
||||
| LogicalExpression
|
||||
| MemberExpression
|
||||
| ConditionalExpression
|
||||
| CallExpression
|
||||
| NewExpression
|
||||
| SequenceExpression
|
||||
| ArrowFunctionExpression
|
||||
| YieldExpression
|
||||
| TemplateLiteral
|
||||
| TaggedTemplateExpression
|
||||
| ClassExpression
|
||||
| MetaProperty
|
||||
| AwaitExpression
|
||||
| ChainExpression
|
||||
| ImportExpression
|
||||
| ParenthesizedExpression
|
||||
|
||||
export type Pattern =
|
||||
| Identifier
|
||||
| MemberExpression
|
||||
| ObjectPattern
|
||||
| ArrayPattern
|
||||
| RestElement
|
||||
| AssignmentPattern
|
||||
|
||||
export type ModuleDeclaration =
|
||||
| ImportDeclaration
|
||||
| ExportNamedDeclaration
|
||||
| ExportDefaultDeclaration
|
||||
| ExportAllDeclaration
|
||||
|
||||
export type AnyNode = Statement | Expression | Declaration | ModuleDeclaration | Literal | Program | SwitchCase | CatchClause | Property | Super | SpreadElement | TemplateElement | AssignmentProperty | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | ClassBody | MethodDefinition | MetaProperty | ImportAttribute | ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier | AnonymousFunctionDeclaration | AnonymousClassDeclaration | PropertyDefinition | PrivateIdentifier | StaticBlock | VariableDeclarator
|
||||
|
||||
export function parse(input: string, options: Options): Program
|
||||
|
||||
export function parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||
|
||||
export function tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
|
||||
export type ecmaVersion = 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 2023 | 2024 | 2025 | "latest"
|
||||
|
||||
export interface Options {
|
||||
/**
|
||||
* `ecmaVersion` indicates the ECMAScript version to parse. Can be a
|
||||
* number, either in year (`2022`) or plain version number (`6`) form,
|
||||
* or `"latest"` (the latest the library supports). This influences
|
||||
* support for strict mode, the set of reserved words, and support for
|
||||
* new syntax features.
|
||||
*/
|
||||
ecmaVersion: ecmaVersion
|
||||
|
||||
/**
|
||||
* `sourceType` indicates the mode the code should be parsed in.
|
||||
* Can be either `"script"` or `"module"`. This influences global
|
||||
* strict mode and parsing of `import` and `export` declarations.
|
||||
*/
|
||||
sourceType?: "script" | "module"
|
||||
|
||||
/**
|
||||
* a callback that will be called when a semicolon is automatically inserted.
|
||||
* @param lastTokEnd the position of the comma as an offset
|
||||
* @param lastTokEndLoc location if {@link locations} is enabled
|
||||
*/
|
||||
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
|
||||
/**
|
||||
* similar to `onInsertedSemicolon`, but for trailing commas
|
||||
* @param lastTokEnd the position of the comma as an offset
|
||||
* @param lastTokEndLoc location if `locations` is enabled
|
||||
*/
|
||||
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
|
||||
/**
|
||||
* By default, reserved words are only enforced if ecmaVersion >= 5.
|
||||
* Set `allowReserved` to a boolean value to explicitly turn this on
|
||||
* an off. When this option has the value "never", reserved words
|
||||
* and keywords can also not be used as property names.
|
||||
*/
|
||||
allowReserved?: boolean | "never"
|
||||
|
||||
/**
|
||||
* When enabled, a return at the top level is not considered an error.
|
||||
*/
|
||||
allowReturnOutsideFunction?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, import/export statements are not constrained to
|
||||
* appearing at the top of the program, and an import.meta expression
|
||||
* in a script isn't considered an error.
|
||||
*/
|
||||
allowImportExportEverywhere?: boolean
|
||||
|
||||
/**
|
||||
* By default, `await` identifiers are allowed to appear at the top-level scope only if {@link ecmaVersion} >= 2022.
|
||||
* When enabled, await identifiers are allowed to appear at the top-level scope,
|
||||
* but they are still not allowed in non-async functions.
|
||||
*/
|
||||
allowAwaitOutsideFunction?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, super identifiers are not constrained to
|
||||
* appearing in methods and do not raise an error when they appear elsewhere.
|
||||
*/
|
||||
allowSuperOutsideMethod?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, hashbang directive in the beginning of file is
|
||||
* allowed and treated as a line comment. Enabled by default when
|
||||
* {@link ecmaVersion} >= 2023.
|
||||
*/
|
||||
allowHashBang?: boolean
|
||||
|
||||
/**
|
||||
* By default, the parser will verify that private properties are
|
||||
* only used in places where they are valid and have been declared.
|
||||
* Set this to false to turn such checks off.
|
||||
*/
|
||||
checkPrivateFields?: boolean
|
||||
|
||||
/**
|
||||
* When `locations` is on, `loc` properties holding objects with
|
||||
* `start` and `end` properties as {@link Position} objects will be attached to the
|
||||
* nodes.
|
||||
*/
|
||||
locations?: boolean
|
||||
|
||||
/**
|
||||
* a callback that will cause Acorn to call that export function with object in the same
|
||||
* format as tokens returned from `tokenizer().getToken()`. Note
|
||||
* that you are not allowed to call the parser from the
|
||||
* callback—that will corrupt its internal state.
|
||||
*/
|
||||
onToken?: ((token: Token) => void) | Token[]
|
||||
|
||||
|
||||
/**
|
||||
* This takes a export function or an array.
|
||||
*
|
||||
* When a export function is passed, Acorn will call that export function with `(block, text, start,
|
||||
* end)` parameters whenever a comment is skipped. `block` is a
|
||||
* boolean indicating whether this is a block (`/* *\/`) comment,
|
||||
* `text` is the content of the comment, and `start` and `end` are
|
||||
* character offsets that denote the start and end of the comment.
|
||||
* When the {@link locations} option is on, two more parameters are
|
||||
* passed, the full locations of {@link Position} export type of the start and
|
||||
* end of the comments.
|
||||
*
|
||||
* When a array is passed, each found comment of {@link Comment} export type is pushed to the array.
|
||||
*
|
||||
* Note that you are not allowed to call the
|
||||
* parser from the callback—that will corrupt its internal state.
|
||||
*/
|
||||
onComment?: ((
|
||||
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
|
||||
endLoc?: Position
|
||||
) => void) | Comment[]
|
||||
|
||||
/**
|
||||
* Nodes have their start and end characters offsets recorded in
|
||||
* `start` and `end` properties (directly on the node, rather than
|
||||
* the `loc` object, which holds line/column data. To also add a
|
||||
* [semi-standardized][range] `range` property holding a `[start,
|
||||
* end]` array with the same numbers, set the `ranges` option to
|
||||
* `true`.
|
||||
*/
|
||||
ranges?: boolean
|
||||
|
||||
/**
|
||||
* It is possible to parse multiple files into a single AST by
|
||||
* passing the tree produced by parsing the first file as
|
||||
* `program` option in subsequent parses. This will add the
|
||||
* toplevel forms of the parsed file to the `Program` (top) node
|
||||
* of an existing parse tree.
|
||||
*/
|
||||
program?: Node
|
||||
|
||||
/**
|
||||
* When {@link locations} is on, you can pass this to record the source
|
||||
* file in every node's `loc` object.
|
||||
*/
|
||||
sourceFile?: string
|
||||
|
||||
/**
|
||||
* This value, if given, is stored in every node, whether {@link locations} is on or off.
|
||||
*/
|
||||
directSourceFile?: string
|
||||
|
||||
/**
|
||||
* When enabled, parenthesized expressions are represented by
|
||||
* (non-standard) ParenthesizedExpression nodes
|
||||
*/
|
||||
preserveParens?: boolean
|
||||
}
|
||||
|
||||
export class Parser {
|
||||
options: Options
|
||||
input: string
|
||||
|
||||
protected constructor(options: Options, input: string, startPos?: number)
|
||||
parse(): Program
|
||||
|
||||
static parse(input: string, options: Options): Program
|
||||
static parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||
static tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
static extend(...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
|
||||
}
|
||||
|
||||
export const defaultOptions: Options
|
||||
|
||||
export function getLineInfo(input: string, offset: number): Position
|
||||
|
||||
export class TokenType {
|
||||
label: string
|
||||
keyword: string | undefined
|
||||
}
|
||||
|
||||
export const tokTypes: {
|
||||
num: TokenType
|
||||
regexp: TokenType
|
||||
string: TokenType
|
||||
name: TokenType
|
||||
privateId: TokenType
|
||||
eof: TokenType
|
||||
|
||||
bracketL: TokenType
|
||||
bracketR: TokenType
|
||||
braceL: TokenType
|
||||
braceR: TokenType
|
||||
parenL: TokenType
|
||||
parenR: TokenType
|
||||
comma: TokenType
|
||||
semi: TokenType
|
||||
colon: TokenType
|
||||
dot: TokenType
|
||||
question: TokenType
|
||||
questionDot: TokenType
|
||||
arrow: TokenType
|
||||
template: TokenType
|
||||
invalidTemplate: TokenType
|
||||
ellipsis: TokenType
|
||||
backQuote: TokenType
|
||||
dollarBraceL: TokenType
|
||||
|
||||
eq: TokenType
|
||||
assign: TokenType
|
||||
incDec: TokenType
|
||||
prefix: TokenType
|
||||
logicalOR: TokenType
|
||||
logicalAND: TokenType
|
||||
bitwiseOR: TokenType
|
||||
bitwiseXOR: TokenType
|
||||
bitwiseAND: TokenType
|
||||
equality: TokenType
|
||||
relational: TokenType
|
||||
bitShift: TokenType
|
||||
plusMin: TokenType
|
||||
modulo: TokenType
|
||||
star: TokenType
|
||||
slash: TokenType
|
||||
starstar: TokenType
|
||||
coalesce: TokenType
|
||||
|
||||
_break: TokenType
|
||||
_case: TokenType
|
||||
_catch: TokenType
|
||||
_continue: TokenType
|
||||
_debugger: TokenType
|
||||
_default: TokenType
|
||||
_do: TokenType
|
||||
_else: TokenType
|
||||
_finally: TokenType
|
||||
_for: TokenType
|
||||
_function: TokenType
|
||||
_if: TokenType
|
||||
_return: TokenType
|
||||
_switch: TokenType
|
||||
_throw: TokenType
|
||||
_try: TokenType
|
||||
_var: TokenType
|
||||
_const: TokenType
|
||||
_while: TokenType
|
||||
_with: TokenType
|
||||
_new: TokenType
|
||||
_this: TokenType
|
||||
_super: TokenType
|
||||
_class: TokenType
|
||||
_extends: TokenType
|
||||
_export: TokenType
|
||||
_import: TokenType
|
||||
_null: TokenType
|
||||
_true: TokenType
|
||||
_false: TokenType
|
||||
_in: TokenType
|
||||
_instanceof: TokenType
|
||||
_typeof: TokenType
|
||||
_void: TokenType
|
||||
_delete: TokenType
|
||||
}
|
||||
|
||||
export interface Comment {
|
||||
type: "Line" | "Block"
|
||||
value: string
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
export class Token {
|
||||
type: TokenType
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
export const version: string
|
||||
866
node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
866
node_modules/acorn/dist/acorn.d.ts
generated
vendored
Normal file
@ -0,0 +1,866 @@
|
||||
export interface Node {
|
||||
start: number
|
||||
end: number
|
||||
type: string
|
||||
range?: [number, number]
|
||||
loc?: SourceLocation | null
|
||||
}
|
||||
|
||||
export interface SourceLocation {
|
||||
source?: string | null
|
||||
start: Position
|
||||
end: Position
|
||||
}
|
||||
|
||||
export interface Position {
|
||||
/** 1-based */
|
||||
line: number
|
||||
/** 0-based */
|
||||
column: number
|
||||
}
|
||||
|
||||
export interface Identifier extends Node {
|
||||
type: "Identifier"
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface Literal extends Node {
|
||||
type: "Literal"
|
||||
value?: string | boolean | null | number | RegExp | bigint
|
||||
raw?: string
|
||||
regex?: {
|
||||
pattern: string
|
||||
flags: string
|
||||
}
|
||||
bigint?: string
|
||||
}
|
||||
|
||||
export interface Program extends Node {
|
||||
type: "Program"
|
||||
body: Array<Statement | ModuleDeclaration>
|
||||
sourceType: "script" | "module"
|
||||
}
|
||||
|
||||
export interface Function extends Node {
|
||||
id?: Identifier | null
|
||||
params: Array<Pattern>
|
||||
body: BlockStatement | Expression
|
||||
generator: boolean
|
||||
expression: boolean
|
||||
async: boolean
|
||||
}
|
||||
|
||||
export interface ExpressionStatement extends Node {
|
||||
type: "ExpressionStatement"
|
||||
expression: Expression | Literal
|
||||
directive?: string
|
||||
}
|
||||
|
||||
export interface BlockStatement extends Node {
|
||||
type: "BlockStatement"
|
||||
body: Array<Statement>
|
||||
}
|
||||
|
||||
export interface EmptyStatement extends Node {
|
||||
type: "EmptyStatement"
|
||||
}
|
||||
|
||||
export interface DebuggerStatement extends Node {
|
||||
type: "DebuggerStatement"
|
||||
}
|
||||
|
||||
export interface WithStatement extends Node {
|
||||
type: "WithStatement"
|
||||
object: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface ReturnStatement extends Node {
|
||||
type: "ReturnStatement"
|
||||
argument?: Expression | null
|
||||
}
|
||||
|
||||
export interface LabeledStatement extends Node {
|
||||
type: "LabeledStatement"
|
||||
label: Identifier
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface BreakStatement extends Node {
|
||||
type: "BreakStatement"
|
||||
label?: Identifier | null
|
||||
}
|
||||
|
||||
export interface ContinueStatement extends Node {
|
||||
type: "ContinueStatement"
|
||||
label?: Identifier | null
|
||||
}
|
||||
|
||||
export interface IfStatement extends Node {
|
||||
type: "IfStatement"
|
||||
test: Expression
|
||||
consequent: Statement
|
||||
alternate?: Statement | null
|
||||
}
|
||||
|
||||
export interface SwitchStatement extends Node {
|
||||
type: "SwitchStatement"
|
||||
discriminant: Expression
|
||||
cases: Array<SwitchCase>
|
||||
}
|
||||
|
||||
export interface SwitchCase extends Node {
|
||||
type: "SwitchCase"
|
||||
test?: Expression | null
|
||||
consequent: Array<Statement>
|
||||
}
|
||||
|
||||
export interface ThrowStatement extends Node {
|
||||
type: "ThrowStatement"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface TryStatement extends Node {
|
||||
type: "TryStatement"
|
||||
block: BlockStatement
|
||||
handler?: CatchClause | null
|
||||
finalizer?: BlockStatement | null
|
||||
}
|
||||
|
||||
export interface CatchClause extends Node {
|
||||
type: "CatchClause"
|
||||
param?: Pattern | null
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface WhileStatement extends Node {
|
||||
type: "WhileStatement"
|
||||
test: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface DoWhileStatement extends Node {
|
||||
type: "DoWhileStatement"
|
||||
body: Statement
|
||||
test: Expression
|
||||
}
|
||||
|
||||
export interface ForStatement extends Node {
|
||||
type: "ForStatement"
|
||||
init?: VariableDeclaration | Expression | null
|
||||
test?: Expression | null
|
||||
update?: Expression | null
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface ForInStatement extends Node {
|
||||
type: "ForInStatement"
|
||||
left: VariableDeclaration | Pattern
|
||||
right: Expression
|
||||
body: Statement
|
||||
}
|
||||
|
||||
export interface FunctionDeclaration extends Function {
|
||||
type: "FunctionDeclaration"
|
||||
id: Identifier
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface VariableDeclaration extends Node {
|
||||
type: "VariableDeclaration"
|
||||
declarations: Array<VariableDeclarator>
|
||||
kind: "var" | "let" | "const"
|
||||
}
|
||||
|
||||
export interface VariableDeclarator extends Node {
|
||||
type: "VariableDeclarator"
|
||||
id: Pattern
|
||||
init?: Expression | null
|
||||
}
|
||||
|
||||
export interface ThisExpression extends Node {
|
||||
type: "ThisExpression"
|
||||
}
|
||||
|
||||
export interface ArrayExpression extends Node {
|
||||
type: "ArrayExpression"
|
||||
elements: Array<Expression | SpreadElement | null>
|
||||
}
|
||||
|
||||
export interface ObjectExpression extends Node {
|
||||
type: "ObjectExpression"
|
||||
properties: Array<Property | SpreadElement>
|
||||
}
|
||||
|
||||
export interface Property extends Node {
|
||||
type: "Property"
|
||||
key: Expression
|
||||
value: Expression
|
||||
kind: "init" | "get" | "set"
|
||||
method: boolean
|
||||
shorthand: boolean
|
||||
computed: boolean
|
||||
}
|
||||
|
||||
export interface FunctionExpression extends Function {
|
||||
type: "FunctionExpression"
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface UnaryExpression extends Node {
|
||||
type: "UnaryExpression"
|
||||
operator: UnaryOperator
|
||||
prefix: boolean
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete"
|
||||
|
||||
export interface UpdateExpression extends Node {
|
||||
type: "UpdateExpression"
|
||||
operator: UpdateOperator
|
||||
argument: Expression
|
||||
prefix: boolean
|
||||
}
|
||||
|
||||
export type UpdateOperator = "++" | "--"
|
||||
|
||||
export interface BinaryExpression extends Node {
|
||||
type: "BinaryExpression"
|
||||
operator: BinaryOperator
|
||||
left: Expression | PrivateIdentifier
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type BinaryOperator = "==" | "!=" | "===" | "!==" | "<" | "<=" | ">" | ">=" | "<<" | ">>" | ">>>" | "+" | "-" | "*" | "/" | "%" | "|" | "^" | "&" | "in" | "instanceof" | "**"
|
||||
|
||||
export interface AssignmentExpression extends Node {
|
||||
type: "AssignmentExpression"
|
||||
operator: AssignmentOperator
|
||||
left: Pattern
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type AssignmentOperator = "=" | "+=" | "-=" | "*=" | "/=" | "%=" | "<<=" | ">>=" | ">>>=" | "|=" | "^=" | "&=" | "**=" | "||=" | "&&=" | "??="
|
||||
|
||||
export interface LogicalExpression extends Node {
|
||||
type: "LogicalExpression"
|
||||
operator: LogicalOperator
|
||||
left: Expression
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export type LogicalOperator = "||" | "&&" | "??"
|
||||
|
||||
export interface MemberExpression extends Node {
|
||||
type: "MemberExpression"
|
||||
object: Expression | Super
|
||||
property: Expression | PrivateIdentifier
|
||||
computed: boolean
|
||||
optional: boolean
|
||||
}
|
||||
|
||||
export interface ConditionalExpression extends Node {
|
||||
type: "ConditionalExpression"
|
||||
test: Expression
|
||||
alternate: Expression
|
||||
consequent: Expression
|
||||
}
|
||||
|
||||
export interface CallExpression extends Node {
|
||||
type: "CallExpression"
|
||||
callee: Expression | Super
|
||||
arguments: Array<Expression | SpreadElement>
|
||||
optional: boolean
|
||||
}
|
||||
|
||||
export interface NewExpression extends Node {
|
||||
type: "NewExpression"
|
||||
callee: Expression
|
||||
arguments: Array<Expression | SpreadElement>
|
||||
}
|
||||
|
||||
export interface SequenceExpression extends Node {
|
||||
type: "SequenceExpression"
|
||||
expressions: Array<Expression>
|
||||
}
|
||||
|
||||
export interface ForOfStatement extends Node {
|
||||
type: "ForOfStatement"
|
||||
left: VariableDeclaration | Pattern
|
||||
right: Expression
|
||||
body: Statement
|
||||
await: boolean
|
||||
}
|
||||
|
||||
export interface Super extends Node {
|
||||
type: "Super"
|
||||
}
|
||||
|
||||
export interface SpreadElement extends Node {
|
||||
type: "SpreadElement"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface ArrowFunctionExpression extends Function {
|
||||
type: "ArrowFunctionExpression"
|
||||
}
|
||||
|
||||
export interface YieldExpression extends Node {
|
||||
type: "YieldExpression"
|
||||
argument?: Expression | null
|
||||
delegate: boolean
|
||||
}
|
||||
|
||||
export interface TemplateLiteral extends Node {
|
||||
type: "TemplateLiteral"
|
||||
quasis: Array<TemplateElement>
|
||||
expressions: Array<Expression>
|
||||
}
|
||||
|
||||
export interface TaggedTemplateExpression extends Node {
|
||||
type: "TaggedTemplateExpression"
|
||||
tag: Expression
|
||||
quasi: TemplateLiteral
|
||||
}
|
||||
|
||||
export interface TemplateElement extends Node {
|
||||
type: "TemplateElement"
|
||||
tail: boolean
|
||||
value: {
|
||||
cooked?: string | null
|
||||
raw: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface AssignmentProperty extends Node {
|
||||
type: "Property"
|
||||
key: Expression
|
||||
value: Pattern
|
||||
kind: "init"
|
||||
method: false
|
||||
shorthand: boolean
|
||||
computed: boolean
|
||||
}
|
||||
|
||||
export interface ObjectPattern extends Node {
|
||||
type: "ObjectPattern"
|
||||
properties: Array<AssignmentProperty | RestElement>
|
||||
}
|
||||
|
||||
export interface ArrayPattern extends Node {
|
||||
type: "ArrayPattern"
|
||||
elements: Array<Pattern | null>
|
||||
}
|
||||
|
||||
export interface RestElement extends Node {
|
||||
type: "RestElement"
|
||||
argument: Pattern
|
||||
}
|
||||
|
||||
export interface AssignmentPattern extends Node {
|
||||
type: "AssignmentPattern"
|
||||
left: Pattern
|
||||
right: Expression
|
||||
}
|
||||
|
||||
export interface Class extends Node {
|
||||
id?: Identifier | null
|
||||
superClass?: Expression | null
|
||||
body: ClassBody
|
||||
}
|
||||
|
||||
export interface ClassBody extends Node {
|
||||
type: "ClassBody"
|
||||
body: Array<MethodDefinition | PropertyDefinition | StaticBlock>
|
||||
}
|
||||
|
||||
export interface MethodDefinition extends Node {
|
||||
type: "MethodDefinition"
|
||||
key: Expression | PrivateIdentifier
|
||||
value: FunctionExpression
|
||||
kind: "constructor" | "method" | "get" | "set"
|
||||
computed: boolean
|
||||
static: boolean
|
||||
}
|
||||
|
||||
export interface ClassDeclaration extends Class {
|
||||
type: "ClassDeclaration"
|
||||
id: Identifier
|
||||
}
|
||||
|
||||
export interface ClassExpression extends Class {
|
||||
type: "ClassExpression"
|
||||
}
|
||||
|
||||
export interface MetaProperty extends Node {
|
||||
type: "MetaProperty"
|
||||
meta: Identifier
|
||||
property: Identifier
|
||||
}
|
||||
|
||||
export interface ImportDeclaration extends Node {
|
||||
type: "ImportDeclaration"
|
||||
specifiers: Array<ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier>
|
||||
source: Literal
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface ImportSpecifier extends Node {
|
||||
type: "ImportSpecifier"
|
||||
imported: Identifier | Literal
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportDefaultSpecifier extends Node {
|
||||
type: "ImportDefaultSpecifier"
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportNamespaceSpecifier extends Node {
|
||||
type: "ImportNamespaceSpecifier"
|
||||
local: Identifier
|
||||
}
|
||||
|
||||
export interface ImportAttribute extends Node {
|
||||
type: "ImportAttribute"
|
||||
key: Identifier | Literal
|
||||
value: Literal
|
||||
}
|
||||
|
||||
export interface ExportNamedDeclaration extends Node {
|
||||
type: "ExportNamedDeclaration"
|
||||
declaration?: Declaration | null
|
||||
specifiers: Array<ExportSpecifier>
|
||||
source?: Literal | null
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface ExportSpecifier extends Node {
|
||||
type: "ExportSpecifier"
|
||||
exported: Identifier | Literal
|
||||
local: Identifier | Literal
|
||||
}
|
||||
|
||||
export interface AnonymousFunctionDeclaration extends Function {
|
||||
type: "FunctionDeclaration"
|
||||
id: null
|
||||
body: BlockStatement
|
||||
}
|
||||
|
||||
export interface AnonymousClassDeclaration extends Class {
|
||||
type: "ClassDeclaration"
|
||||
id: null
|
||||
}
|
||||
|
||||
export interface ExportDefaultDeclaration extends Node {
|
||||
type: "ExportDefaultDeclaration"
|
||||
declaration: AnonymousFunctionDeclaration | FunctionDeclaration | AnonymousClassDeclaration | ClassDeclaration | Expression
|
||||
}
|
||||
|
||||
export interface ExportAllDeclaration extends Node {
|
||||
type: "ExportAllDeclaration"
|
||||
source: Literal
|
||||
exported?: Identifier | Literal | null
|
||||
attributes: Array<ImportAttribute>
|
||||
}
|
||||
|
||||
export interface AwaitExpression extends Node {
|
||||
type: "AwaitExpression"
|
||||
argument: Expression
|
||||
}
|
||||
|
||||
export interface ChainExpression extends Node {
|
||||
type: "ChainExpression"
|
||||
expression: MemberExpression | CallExpression
|
||||
}
|
||||
|
||||
export interface ImportExpression extends Node {
|
||||
type: "ImportExpression"
|
||||
source: Expression
|
||||
options: Expression | null
|
||||
}
|
||||
|
||||
export interface ParenthesizedExpression extends Node {
|
||||
type: "ParenthesizedExpression"
|
||||
expression: Expression
|
||||
}
|
||||
|
||||
export interface PropertyDefinition extends Node {
|
||||
type: "PropertyDefinition"
|
||||
key: Expression | PrivateIdentifier
|
||||
value?: Expression | null
|
||||
computed: boolean
|
||||
static: boolean
|
||||
}
|
||||
|
||||
export interface PrivateIdentifier extends Node {
|
||||
type: "PrivateIdentifier"
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface StaticBlock extends Node {
|
||||
type: "StaticBlock"
|
||||
body: Array<Statement>
|
||||
}
|
||||
|
||||
export type Statement =
|
||||
| ExpressionStatement
|
||||
| BlockStatement
|
||||
| EmptyStatement
|
||||
| DebuggerStatement
|
||||
| WithStatement
|
||||
| ReturnStatement
|
||||
| LabeledStatement
|
||||
| BreakStatement
|
||||
| ContinueStatement
|
||||
| IfStatement
|
||||
| SwitchStatement
|
||||
| ThrowStatement
|
||||
| TryStatement
|
||||
| WhileStatement
|
||||
| DoWhileStatement
|
||||
| ForStatement
|
||||
| ForInStatement
|
||||
| ForOfStatement
|
||||
| Declaration
|
||||
|
||||
export type Declaration =
|
||||
| FunctionDeclaration
|
||||
| VariableDeclaration
|
||||
| ClassDeclaration
|
||||
|
||||
export type Expression =
|
||||
| Identifier
|
||||
| Literal
|
||||
| ThisExpression
|
||||
| ArrayExpression
|
||||
| ObjectExpression
|
||||
| FunctionExpression
|
||||
| UnaryExpression
|
||||
| UpdateExpression
|
||||
| BinaryExpression
|
||||
| AssignmentExpression
|
||||
| LogicalExpression
|
||||
| MemberExpression
|
||||
| ConditionalExpression
|
||||
| CallExpression
|
||||
| NewExpression
|
||||
| SequenceExpression
|
||||
| ArrowFunctionExpression
|
||||
| YieldExpression
|
||||
| TemplateLiteral
|
||||
| TaggedTemplateExpression
|
||||
| ClassExpression
|
||||
| MetaProperty
|
||||
| AwaitExpression
|
||||
| ChainExpression
|
||||
| ImportExpression
|
||||
| ParenthesizedExpression
|
||||
|
||||
export type Pattern =
|
||||
| Identifier
|
||||
| MemberExpression
|
||||
| ObjectPattern
|
||||
| ArrayPattern
|
||||
| RestElement
|
||||
| AssignmentPattern
|
||||
|
||||
export type ModuleDeclaration =
|
||||
| ImportDeclaration
|
||||
| ExportNamedDeclaration
|
||||
| ExportDefaultDeclaration
|
||||
| ExportAllDeclaration
|
||||
|
||||
export type AnyNode = Statement | Expression | Declaration | ModuleDeclaration | Literal | Program | SwitchCase | CatchClause | Property | Super | SpreadElement | TemplateElement | AssignmentProperty | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | ClassBody | MethodDefinition | MetaProperty | ImportAttribute | ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier | AnonymousFunctionDeclaration | AnonymousClassDeclaration | PropertyDefinition | PrivateIdentifier | StaticBlock | VariableDeclarator
|
||||
|
||||
export function parse(input: string, options: Options): Program
|
||||
|
||||
export function parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||
|
||||
export function tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
|
||||
export type ecmaVersion = 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 2015 | 2016 | 2017 | 2018 | 2019 | 2020 | 2021 | 2022 | 2023 | 2024 | 2025 | "latest"
|
||||
|
||||
export interface Options {
|
||||
/**
|
||||
* `ecmaVersion` indicates the ECMAScript version to parse. Can be a
|
||||
* number, either in year (`2022`) or plain version number (`6`) form,
|
||||
* or `"latest"` (the latest the library supports). This influences
|
||||
* support for strict mode, the set of reserved words, and support for
|
||||
* new syntax features.
|
||||
*/
|
||||
ecmaVersion: ecmaVersion
|
||||
|
||||
/**
|
||||
* `sourceType` indicates the mode the code should be parsed in.
|
||||
* Can be either `"script"` or `"module"`. This influences global
|
||||
* strict mode and parsing of `import` and `export` declarations.
|
||||
*/
|
||||
sourceType?: "script" | "module"
|
||||
|
||||
/**
|
||||
* a callback that will be called when a semicolon is automatically inserted.
|
||||
* @param lastTokEnd the position of the comma as an offset
|
||||
* @param lastTokEndLoc location if {@link locations} is enabled
|
||||
*/
|
||||
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
|
||||
/**
|
||||
* similar to `onInsertedSemicolon`, but for trailing commas
|
||||
* @param lastTokEnd the position of the comma as an offset
|
||||
* @param lastTokEndLoc location if `locations` is enabled
|
||||
*/
|
||||
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
|
||||
|
||||
/**
|
||||
* By default, reserved words are only enforced if ecmaVersion >= 5.
|
||||
* Set `allowReserved` to a boolean value to explicitly turn this on
|
||||
* an off. When this option has the value "never", reserved words
|
||||
* and keywords can also not be used as property names.
|
||||
*/
|
||||
allowReserved?: boolean | "never"
|
||||
|
||||
/**
|
||||
* When enabled, a return at the top level is not considered an error.
|
||||
*/
|
||||
allowReturnOutsideFunction?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, import/export statements are not constrained to
|
||||
* appearing at the top of the program, and an import.meta expression
|
||||
* in a script isn't considered an error.
|
||||
*/
|
||||
allowImportExportEverywhere?: boolean
|
||||
|
||||
/**
|
||||
* By default, `await` identifiers are allowed to appear at the top-level scope only if {@link ecmaVersion} >= 2022.
|
||||
* When enabled, await identifiers are allowed to appear at the top-level scope,
|
||||
* but they are still not allowed in non-async functions.
|
||||
*/
|
||||
allowAwaitOutsideFunction?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, super identifiers are not constrained to
|
||||
* appearing in methods and do not raise an error when they appear elsewhere.
|
||||
*/
|
||||
allowSuperOutsideMethod?: boolean
|
||||
|
||||
/**
|
||||
* When enabled, hashbang directive in the beginning of file is
|
||||
* allowed and treated as a line comment. Enabled by default when
|
||||
* {@link ecmaVersion} >= 2023.
|
||||
*/
|
||||
allowHashBang?: boolean
|
||||
|
||||
/**
|
||||
* By default, the parser will verify that private properties are
|
||||
* only used in places where they are valid and have been declared.
|
||||
* Set this to false to turn such checks off.
|
||||
*/
|
||||
checkPrivateFields?: boolean
|
||||
|
||||
/**
|
||||
* When `locations` is on, `loc` properties holding objects with
|
||||
* `start` and `end` properties as {@link Position} objects will be attached to the
|
||||
* nodes.
|
||||
*/
|
||||
locations?: boolean
|
||||
|
||||
/**
|
||||
* a callback that will cause Acorn to call that export function with object in the same
|
||||
* format as tokens returned from `tokenizer().getToken()`. Note
|
||||
* that you are not allowed to call the parser from the
|
||||
* callback—that will corrupt its internal state.
|
||||
*/
|
||||
onToken?: ((token: Token) => void) | Token[]
|
||||
|
||||
|
||||
/**
|
||||
* This takes a export function or an array.
|
||||
*
|
||||
* When a export function is passed, Acorn will call that export function with `(block, text, start,
|
||||
* end)` parameters whenever a comment is skipped. `block` is a
|
||||
* boolean indicating whether this is a block (`/* *\/`) comment,
|
||||
* `text` is the content of the comment, and `start` and `end` are
|
||||
* character offsets that denote the start and end of the comment.
|
||||
* When the {@link locations} option is on, two more parameters are
|
||||
* passed, the full locations of {@link Position} export type of the start and
|
||||
* end of the comments.
|
||||
*
|
||||
* When a array is passed, each found comment of {@link Comment} export type is pushed to the array.
|
||||
*
|
||||
* Note that you are not allowed to call the
|
||||
* parser from the callback—that will corrupt its internal state.
|
||||
*/
|
||||
onComment?: ((
|
||||
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
|
||||
endLoc?: Position
|
||||
) => void) | Comment[]
|
||||
|
||||
/**
|
||||
* Nodes have their start and end characters offsets recorded in
|
||||
* `start` and `end` properties (directly on the node, rather than
|
||||
* the `loc` object, which holds line/column data. To also add a
|
||||
* [semi-standardized][range] `range` property holding a `[start,
|
||||
* end]` array with the same numbers, set the `ranges` option to
|
||||
* `true`.
|
||||
*/
|
||||
ranges?: boolean
|
||||
|
||||
/**
|
||||
* It is possible to parse multiple files into a single AST by
|
||||
* passing the tree produced by parsing the first file as
|
||||
* `program` option in subsequent parses. This will add the
|
||||
* toplevel forms of the parsed file to the `Program` (top) node
|
||||
* of an existing parse tree.
|
||||
*/
|
||||
program?: Node
|
||||
|
||||
/**
|
||||
* When {@link locations} is on, you can pass this to record the source
|
||||
* file in every node's `loc` object.
|
||||
*/
|
||||
sourceFile?: string
|
||||
|
||||
/**
|
||||
* This value, if given, is stored in every node, whether {@link locations} is on or off.
|
||||
*/
|
||||
directSourceFile?: string
|
||||
|
||||
/**
|
||||
* When enabled, parenthesized expressions are represented by
|
||||
* (non-standard) ParenthesizedExpression nodes
|
||||
*/
|
||||
preserveParens?: boolean
|
||||
}
|
||||
|
||||
export class Parser {
|
||||
options: Options
|
||||
input: string
|
||||
|
||||
protected constructor(options: Options, input: string, startPos?: number)
|
||||
parse(): Program
|
||||
|
||||
static parse(input: string, options: Options): Program
|
||||
static parseExpressionAt(input: string, pos: number, options: Options): Expression
|
||||
static tokenizer(input: string, options: Options): {
|
||||
getToken(): Token
|
||||
[Symbol.iterator](): Iterator<Token>
|
||||
}
|
||||
static extend(...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
|
||||
}
|
||||
|
||||
export const defaultOptions: Options
|
||||
|
||||
export function getLineInfo(input: string, offset: number): Position
|
||||
|
||||
export class TokenType {
|
||||
label: string
|
||||
keyword: string | undefined
|
||||
}
|
||||
|
||||
export const tokTypes: {
|
||||
num: TokenType
|
||||
regexp: TokenType
|
||||
string: TokenType
|
||||
name: TokenType
|
||||
privateId: TokenType
|
||||
eof: TokenType
|
||||
|
||||
bracketL: TokenType
|
||||
bracketR: TokenType
|
||||
braceL: TokenType
|
||||
braceR: TokenType
|
||||
parenL: TokenType
|
||||
parenR: TokenType
|
||||
comma: TokenType
|
||||
semi: TokenType
|
||||
colon: TokenType
|
||||
dot: TokenType
|
||||
question: TokenType
|
||||
questionDot: TokenType
|
||||
arrow: TokenType
|
||||
template: TokenType
|
||||
invalidTemplate: TokenType
|
||||
ellipsis: TokenType
|
||||
backQuote: TokenType
|
||||
dollarBraceL: TokenType
|
||||
|
||||
eq: TokenType
|
||||
assign: TokenType
|
||||
incDec: TokenType
|
||||
prefix: TokenType
|
||||
logicalOR: TokenType
|
||||
logicalAND: TokenType
|
||||
bitwiseOR: TokenType
|
||||
bitwiseXOR: TokenType
|
||||
bitwiseAND: TokenType
|
||||
equality: TokenType
|
||||
relational: TokenType
|
||||
bitShift: TokenType
|
||||
plusMin: TokenType
|
||||
modulo: TokenType
|
||||
star: TokenType
|
||||
slash: TokenType
|
||||
starstar: TokenType
|
||||
coalesce: TokenType
|
||||
|
||||
_break: TokenType
|
||||
_case: TokenType
|
||||
_catch: TokenType
|
||||
_continue: TokenType
|
||||
_debugger: TokenType
|
||||
_default: TokenType
|
||||
_do: TokenType
|
||||
_else: TokenType
|
||||
_finally: TokenType
|
||||
_for: TokenType
|
||||
_function: TokenType
|
||||
_if: TokenType
|
||||
_return: TokenType
|
||||
_switch: TokenType
|
||||
_throw: TokenType
|
||||
_try: TokenType
|
||||
_var: TokenType
|
||||
_const: TokenType
|
||||
_while: TokenType
|
||||
_with: TokenType
|
||||
_new: TokenType
|
||||
_this: TokenType
|
||||
_super: TokenType
|
||||
_class: TokenType
|
||||
_extends: TokenType
|
||||
_export: TokenType
|
||||
_import: TokenType
|
||||
_null: TokenType
|
||||
_true: TokenType
|
||||
_false: TokenType
|
||||
_in: TokenType
|
||||
_instanceof: TokenType
|
||||
_typeof: TokenType
|
||||
_void: TokenType
|
||||
_delete: TokenType
|
||||
}
|
||||
|
||||
export interface Comment {
|
||||
type: "Line" | "Block"
|
||||
value: string
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
export class Token {
|
||||
type: TokenType
|
||||
start: number
|
||||
end: number
|
||||
loc?: SourceLocation
|
||||
range?: [number, number]
|
||||
}
|
||||
|
||||
export const version: string
|
||||
6183
node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
6183
node_modules/acorn/dist/acorn.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
6154
node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
6154
node_modules/acorn/dist/acorn.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
90
node_modules/acorn/dist/bin.js
generated
vendored
Normal file
90
node_modules/acorn/dist/bin.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var acorn = require('./acorn.js');
|
||||
|
||||
function _interopNamespaceDefault(e) {
|
||||
var n = Object.create(null);
|
||||
if (e) {
|
||||
Object.keys(e).forEach(function (k) {
|
||||
if (k !== 'default') {
|
||||
var d = Object.getOwnPropertyDescriptor(e, k);
|
||||
Object.defineProperty(n, k, d.get ? d : {
|
||||
enumerable: true,
|
||||
get: function () { return e[k]; }
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
n.default = e;
|
||||
return Object.freeze(n);
|
||||
}
|
||||
|
||||
var acorn__namespace = /*#__PURE__*/_interopNamespaceDefault(acorn);
|
||||
|
||||
var inputFilePaths = [], forceFileName = false, fileMode = false, silent = false, compact = false, tokenize = false;
|
||||
var options = {};
|
||||
|
||||
function help(status) {
|
||||
var print = (status === 0) ? console.log : console.error;
|
||||
print("usage: " + path.basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|...|--ecma2015|--ecma2016|--ecma2017|--ecma2018|...]");
|
||||
print(" [--tokenize] [--locations] [--allow-hash-bang] [--allow-await-outside-function] [--compact] [--silent] [--module] [--help] [--] [<infile>...]");
|
||||
process.exit(status);
|
||||
}
|
||||
|
||||
for (var i = 2; i < process.argv.length; ++i) {
|
||||
var arg = process.argv[i];
|
||||
if (arg[0] !== "-" || arg === "-") { inputFilePaths.push(arg); }
|
||||
else if (arg === "--") {
|
||||
inputFilePaths.push.apply(inputFilePaths, process.argv.slice(i + 1));
|
||||
forceFileName = true;
|
||||
break
|
||||
} else if (arg === "--locations") { options.locations = true; }
|
||||
else if (arg === "--allow-hash-bang") { options.allowHashBang = true; }
|
||||
else if (arg === "--allow-await-outside-function") { options.allowAwaitOutsideFunction = true; }
|
||||
else if (arg === "--silent") { silent = true; }
|
||||
else if (arg === "--compact") { compact = true; }
|
||||
else if (arg === "--help") { help(0); }
|
||||
else if (arg === "--tokenize") { tokenize = true; }
|
||||
else if (arg === "--module") { options.sourceType = "module"; }
|
||||
else {
|
||||
var match = arg.match(/^--ecma(\d+)$/);
|
||||
if (match)
|
||||
{ options.ecmaVersion = +match[1]; }
|
||||
else
|
||||
{ help(1); }
|
||||
}
|
||||
}
|
||||
|
||||
function run(codeList) {
|
||||
var result = [], fileIdx = 0;
|
||||
try {
|
||||
codeList.forEach(function (code, idx) {
|
||||
fileIdx = idx;
|
||||
if (!tokenize) {
|
||||
result = acorn__namespace.parse(code, options);
|
||||
options.program = result;
|
||||
} else {
|
||||
var tokenizer = acorn__namespace.tokenizer(code, options), token;
|
||||
do {
|
||||
token = tokenizer.getToken();
|
||||
result.push(token);
|
||||
} while (token.type !== acorn__namespace.tokTypes.eof)
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(fileMode ? e.message.replace(/\(\d+:\d+\)$/, function (m) { return m.slice(0, 1) + inputFilePaths[fileIdx] + " " + m.slice(1); }) : e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
if (!silent) { console.log(JSON.stringify(result, null, compact ? null : 2)); }
|
||||
}
|
||||
|
||||
if (fileMode = inputFilePaths.length && (forceFileName || !inputFilePaths.includes("-") || inputFilePaths.length !== 1)) {
|
||||
run(inputFilePaths.map(function (path) { return fs.readFileSync(path, "utf8"); }));
|
||||
} else {
|
||||
var code = "";
|
||||
process.stdin.resume();
|
||||
process.stdin.on("data", function (chunk) { return code += chunk; });
|
||||
process.stdin.on("end", function () { return run([code]); });
|
||||
}
|
||||
50
node_modules/acorn/package.json
generated
vendored
Normal file
50
node_modules/acorn/package.json
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"name": "acorn",
|
||||
"description": "ECMAScript parser",
|
||||
"homepage": "https://github.com/acornjs/acorn",
|
||||
"main": "dist/acorn.js",
|
||||
"types": "dist/acorn.d.ts",
|
||||
"module": "dist/acorn.mjs",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": "./dist/acorn.mjs",
|
||||
"require": "./dist/acorn.js",
|
||||
"default": "./dist/acorn.js"
|
||||
},
|
||||
"./dist/acorn.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"version": "8.14.1",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "Marijn Haverbeke",
|
||||
"email": "marijnh@gmail.com",
|
||||
"web": "https://marijnhaverbeke.nl"
|
||||
},
|
||||
{
|
||||
"name": "Ingvar Stepanyan",
|
||||
"email": "me@rreverser.com",
|
||||
"web": "https://rreverser.com/"
|
||||
},
|
||||
{
|
||||
"name": "Adrian Heine",
|
||||
"web": "http://adrianheine.de"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/acornjs/acorn.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"prepare": "cd ..; npm run build:main"
|
||||
},
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
}
|
||||
}
|
||||
27
node_modules/bcryptjs/LICENSE
generated
vendored
Normal file
27
node_modules/bcryptjs/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
bcrypt.js
|
||||
---------
|
||||
Copyright (c) 2012 Nevins Bartolomeo <nevins.bartolomeo@gmail.com>
|
||||
Copyright (c) 2012 Shane Girish <shaneGirish@gmail.com>
|
||||
Copyright (c) 2025 Daniel Wirtz <dcode@dcode.io>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. The name of the author may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
201
node_modules/bcryptjs/README.md
generated
vendored
Normal file
201
node_modules/bcryptjs/README.md
generated
vendored
Normal file
@ -0,0 +1,201 @@
|
||||
# bcrypt.js
|
||||
|
||||
Optimized bcrypt in JavaScript with zero dependencies, with TypeScript support. Compatible to the C++
|
||||
[bcrypt](https://npmjs.org/package/bcrypt) binding on Node.js and also working in the browser.
|
||||
|
||||
[](https://github.com/dcodeIO/bcrypt.js/actions/workflows/test.yml) [](https://github.com/dcodeIO/bcrypt.js/actions/workflows/publish.yml) [](https://www.npmjs.com/package/bcryptjs)
|
||||
|
||||
## Security considerations
|
||||
|
||||
Besides incorporating a salt to protect against rainbow table attacks, bcrypt is an adaptive function: over time, the
|
||||
iteration count can be increased to make it slower, so it remains resistant to brute-force search attacks even with
|
||||
increasing computation power. ([see](http://en.wikipedia.org/wiki/Bcrypt))
|
||||
|
||||
While bcrypt.js is compatible to the C++ bcrypt binding, it is written in pure JavaScript and thus slower ([about 30%](https://github.com/dcodeIO/bcrypt.js/wiki/Benchmark)), effectively reducing the number of iterations that can be
|
||||
processed in an equal time span.
|
||||
|
||||
The maximum input length is 72 bytes (note that UTF-8 encoded characters use up to 4 bytes) and the length of generated
|
||||
hashes is 60 characters. Note that maximum input length is not implicitly checked by the library for compatibility with
|
||||
the C++ binding on Node.js, but should be checked with `bcrypt.truncates(password)` where necessary.
|
||||
|
||||
## Usage
|
||||
|
||||
The package exports an ECMAScript module with an UMD fallback.
|
||||
|
||||
```
|
||||
$> npm install bcryptjs
|
||||
```
|
||||
|
||||
```ts
|
||||
import bcrypt from "bcryptjs";
|
||||
```
|
||||
|
||||
### Usage with a CDN
|
||||
|
||||
- From GitHub via [jsDelivr](https://www.jsdelivr.com):<br />
|
||||
`https://cdn.jsdelivr.net/gh/dcodeIO/bcrypt.js@TAG/index.js` (ESM)
|
||||
- From npm via [jsDelivr](https://www.jsdelivr.com):<br />
|
||||
`https://cdn.jsdelivr.net/npm/bcryptjs@VERSION/index.js` (ESM)<br />
|
||||
`https://cdn.jsdelivr.net/npm/bcryptjs@VERSION/umd/index.js` (UMD)
|
||||
- From npm via [unpkg](https://unpkg.com):<br />
|
||||
`https://unpkg.com/bcryptjs@VERSION/index.js` (ESM)<br />
|
||||
`https://unpkg.com/bcryptjs@VERSION/umd/index.js` (UMD)
|
||||
|
||||
Replace `TAG` respectively `VERSION` with a [specific version](https://github.com/dcodeIO/bcrypt.js/releases) or omit it (not recommended in production) to use latest.
|
||||
|
||||
When using the ESM variant in a browser, the `crypto` import needs to be stubbed out, for example using an [import map](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/script/type/importmap). Bundlers should omit it automatically.
|
||||
|
||||
### Usage - Sync
|
||||
|
||||
To hash a password:
|
||||
|
||||
```ts
|
||||
const salt = bcrypt.genSaltSync(10);
|
||||
const hash = bcrypt.hashSync("B4c0/\/", salt);
|
||||
// Store hash in your password DB
|
||||
```
|
||||
|
||||
To check a password:
|
||||
|
||||
```ts
|
||||
// Load hash from your password DB
|
||||
bcrypt.compareSync("B4c0/\/", hash); // true
|
||||
bcrypt.compareSync("not_bacon", hash); // false
|
||||
```
|
||||
|
||||
Auto-gen a salt and hash:
|
||||
|
||||
```ts
|
||||
const hash = bcrypt.hashSync("bacon", 10);
|
||||
```
|
||||
|
||||
### Usage - Async
|
||||
|
||||
To hash a password:
|
||||
|
||||
```ts
|
||||
const salt = await bcrypt.genSalt(10);
|
||||
const hash = await bcrypt.hash("B4c0/\/", salt);
|
||||
// Store hash in your password DB
|
||||
```
|
||||
|
||||
```ts
|
||||
bcrypt.genSalt(10, (err, salt) => {
|
||||
bcrypt.hash("B4c0/\/", salt, function (err, hash) {
|
||||
// Store hash in your password DB
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
To check a password:
|
||||
|
||||
```ts
|
||||
// Load hash from your password DB
|
||||
await bcrypt.compare("B4c0/\/", hash); // true
|
||||
await bcrypt.compare("not_bacon", hash); // false
|
||||
```
|
||||
|
||||
```ts
|
||||
// Load hash from your password DB
|
||||
bcrypt.compare("B4c0/\/", hash, (err, res) => {
|
||||
// res === true
|
||||
});
|
||||
bcrypt.compare("not_bacon", hash, (err, res) => {
|
||||
// res === false
|
||||
});
|
||||
```
|
||||
|
||||
Auto-gen a salt and hash:
|
||||
|
||||
```ts
|
||||
await bcrypt.hash("B4c0/\/", 10);
|
||||
// Store hash in your password DB
|
||||
```
|
||||
|
||||
```ts
|
||||
bcrypt.hash("B4c0/\/", 10, (err, hash) => {
|
||||
// Store hash in your password DB
|
||||
});
|
||||
```
|
||||
|
||||
**Note:** Under the hood, asynchronous APIs split an operation into small chunks. After the completion of a chunk, the execution of the next chunk is placed on the back of the [JS event queue](https://developer.mozilla.org/en/docs/Web/JavaScript/EventLoop), efficiently yielding for other computation to execute.
|
||||
|
||||
### Usage - Command Line
|
||||
|
||||
```
|
||||
Usage: bcrypt <input> [rounds|salt]
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### Callback types
|
||||
|
||||
- **Callback<`T`>**: `(err: Error | null, result?: T) => void`<br />
|
||||
Called with an error on failure or a value of type `T` upon success.
|
||||
|
||||
- **ProgressCallback**: `(percentage: number) => void`<br />
|
||||
Called with the percentage of rounds completed (0.0 - 1.0), maximally once per `MAX_EXECUTION_TIME = 100` ms.
|
||||
|
||||
- **RandomFallback**: `(length: number) => number[]`<br />
|
||||
Called to obtain random bytes when both [Web Crypto API](http://www.w3.org/TR/WebCryptoAPI/) and Node.js
|
||||
[crypto](http://nodejs.org/api/crypto.html) are not available.
|
||||
|
||||
### Functions
|
||||
|
||||
- bcrypt.**genSaltSync**(rounds?: `number`): `string`<br />
|
||||
Synchronously generates a salt. Number of rounds defaults to 10 when omitted.
|
||||
|
||||
- bcrypt.**genSalt**(rounds?: `number`): `Promise<string>`<br />
|
||||
Asynchronously generates a salt. Number of rounds defaults to 10 when omitted.
|
||||
|
||||
- bcrypt.**genSalt**([rounds: `number`, ]callback: `Callback<string>`): `void`<br />
|
||||
Asynchronously generates a salt. Number of rounds defaults to 10 when omitted.
|
||||
|
||||
- bcrypt.**truncates**(password: `string`): `boolean`<br />
|
||||
Tests if a password will be truncated when hashed, that is its length is greater than 72 bytes when converted to UTF-8.
|
||||
|
||||
- bcrypt.**hashSync**(password: `string`, salt?: `number | string`): `string`
|
||||
Synchronously generates a hash for the given password. Number of rounds defaults to 10 when omitted.
|
||||
|
||||
- bcrypt.**hash**(password: `string`, salt: `number | string`): `Promise<string>`<br />
|
||||
Asynchronously generates a hash for the given password.
|
||||
|
||||
- bcrypt.**hash**(password: `string`, salt: `number | string`, callback: `Callback<string>`, progressCallback?: `ProgressCallback`): `void`<br />
|
||||
Asynchronously generates a hash for the given password.
|
||||
|
||||
- bcrypt.**compareSync**(password: `string`, hash: `string`): `boolean`<br />
|
||||
Synchronously tests a password against a hash.
|
||||
|
||||
- bcrypt.**compare**(password: `string`, hash: `string`): `Promise<boolean>`<br />
|
||||
Asynchronously compares a password against a hash.
|
||||
|
||||
- bcrypt.**compare**(password: `string`, hash: `string`, callback: `Callback<boolean>`, progressCallback?: `ProgressCallback`)<br />
|
||||
Asynchronously compares a password against a hash.
|
||||
|
||||
- bcrypt.**getRounds**(hash: `string`): `number`<br />
|
||||
Gets the number of rounds used to encrypt the specified hash.
|
||||
|
||||
- bcrypt.**getSalt**(hash: `string`): `string`<br />
|
||||
Gets the salt portion from a hash. Does not validate the hash.
|
||||
|
||||
- bcrypt.**setRandomFallback**(random: `RandomFallback`): `void`<br />
|
||||
Sets the pseudo random number generator to use as a fallback if neither [Web Crypto API](http://www.w3.org/TR/WebCryptoAPI/) nor Node.js [crypto](http://nodejs.org/api/crypto.html) are available. Please note: It is highly important that the PRNG used is cryptographically secure and that it is seeded properly!
|
||||
|
||||
## Building
|
||||
|
||||
Building the UMD fallback:
|
||||
|
||||
```
|
||||
$> npm run build
|
||||
```
|
||||
|
||||
Running the [tests](./tests):
|
||||
|
||||
```
|
||||
$> npm test
|
||||
```
|
||||
|
||||
## Credits
|
||||
|
||||
Based on work started by Shane Girish at [bcrypt-nodejs](https://github.com/shaneGirish/bcrypt-nodejs), which is itself
|
||||
based on [javascript-bcrypt](http://code.google.com/p/javascript-bcrypt/) (New BSD-licensed).
|
||||
23
node_modules/bcryptjs/bin/bcrypt
generated
vendored
Executable file
23
node_modules/bcryptjs/bin/bcrypt
generated
vendored
Executable file
@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import path from "node:path";
|
||||
import bcrypt from "../index.js";
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.log(
|
||||
"Usage: " + path.basename(process.argv[1]) + " <input> [rounds|salt]",
|
||||
);
|
||||
process.exit(1);
|
||||
} else {
|
||||
var salt;
|
||||
if (process.argv.length > 3) {
|
||||
salt = process.argv[3];
|
||||
var rounds = parseInt(salt, 10);
|
||||
if (rounds == salt) {
|
||||
salt = bcrypt.genSaltSync(rounds);
|
||||
}
|
||||
} else {
|
||||
salt = bcrypt.genSaltSync();
|
||||
}
|
||||
console.log(bcrypt.hashSync(process.argv[2], salt));
|
||||
}
|
||||
3
node_modules/bcryptjs/index.d.ts
generated
vendored
Normal file
3
node_modules/bcryptjs/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import * as bcrypt from "./types.js";
|
||||
export * from "./types.js";
|
||||
export default bcrypt;
|
||||
1161
node_modules/bcryptjs/index.js
generated
vendored
Normal file
1161
node_modules/bcryptjs/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
76
node_modules/bcryptjs/package.json
generated
vendored
Normal file
76
node_modules/bcryptjs/package.json
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
{
|
||||
"name": "bcryptjs",
|
||||
"description": "Optimized bcrypt in plain JavaScript with zero dependencies, with TypeScript support. Compatible to 'bcrypt'.",
|
||||
"version": "3.0.2",
|
||||
"author": "Daniel Wirtz <dcode@dcode.io>",
|
||||
"contributors": [
|
||||
"Shane Girish <shaneGirish@gmail.com> (https://github.com/shaneGirish)",
|
||||
"Alex Murray <> (https://github.com/alexmurray)",
|
||||
"Nicolas Pelletier <> (https://github.com/NicolasPelletier)",
|
||||
"Josh Rogers <> (https://github.com/geekymole)",
|
||||
"Noah Isaacson <noah@nisaacson.com> (https://github.com/nisaacson)"
|
||||
],
|
||||
"repository": {
|
||||
"type": "url",
|
||||
"url": "https://github.com/dcodeIO/bcrypt.js.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/dcodeIO/bcrypt.js/issues"
|
||||
},
|
||||
"keywords": [
|
||||
"bcrypt",
|
||||
"password",
|
||||
"auth",
|
||||
"authentication",
|
||||
"encryption",
|
||||
"crypt",
|
||||
"crypto"
|
||||
],
|
||||
"type": "module",
|
||||
"main": "umd/index.js",
|
||||
"types": "umd/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.js"
|
||||
},
|
||||
"require": {
|
||||
"types": "./umd/index.d.ts",
|
||||
"default": "./umd/index.js"
|
||||
}
|
||||
}
|
||||
},
|
||||
"bin": {
|
||||
"bcrypt": "bin/bcrypt"
|
||||
},
|
||||
"license": "BSD-3-Clause",
|
||||
"scripts": {
|
||||
"build": "node scripts/build.js",
|
||||
"lint": "prettier --check .",
|
||||
"format": "prettier --write .",
|
||||
"test": "npm run test:unit && npm run test:typescript",
|
||||
"test:unit": "node tests",
|
||||
"test:typescript": "tsc --project tests/typescript/tsconfig.esnext.json && tsc --project tests/typescript/tsconfig.nodenext.json && tsc --project tests/typescript/tsconfig.commonjs.json && tsc --project tests/typescript/tsconfig.global.json"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"types.d.ts",
|
||||
"umd/index.js",
|
||||
"umd/index.d.ts",
|
||||
"umd/types.d.ts",
|
||||
"umd/package.json",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"browser": {
|
||||
"crypto": false
|
||||
},
|
||||
"devDependencies": {
|
||||
"bcrypt": "^5.1.1",
|
||||
"esm2umd": "^0.3.1",
|
||||
"prettier": "^3.5.0",
|
||||
"typescript": "^5.7.3"
|
||||
}
|
||||
}
|
||||
157
node_modules/bcryptjs/types.d.ts
generated
vendored
Normal file
157
node_modules/bcryptjs/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
// Originally imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/8b36dbdf95b624b8a7cd7f8416f06c15d274f9e6/types/bcryptjs/index.d.ts
|
||||
// MIT license.
|
||||
|
||||
/** Called with an error on failure or a value of type `T` upon success. */
|
||||
type Callback<T> = (err: Error | null, result?: T) => void;
|
||||
/** Called with the percentage of rounds completed (0.0 - 1.0), maximally once per `MAX_EXECUTION_TIME = 100` ms. */
|
||||
type ProgressCallback = (percentage: number) => void;
|
||||
/** Called to obtain random bytes when both Web Crypto API and Node.js crypto are not available. */
|
||||
type RandomFallback = (length: number) => number[];
|
||||
|
||||
/**
|
||||
* Sets the pseudo random number generator to use as a fallback if neither node's crypto module nor the Web Crypto API is available.
|
||||
* Please note: It is highly important that the PRNG used is cryptographically secure and that it is seeded properly!
|
||||
* @param random Function taking the number of bytes to generate as its sole argument, returning the corresponding array of cryptographically secure random byte values.
|
||||
*/
|
||||
export declare function setRandomFallback(random: RandomFallback): void;
|
||||
|
||||
/**
|
||||
* Synchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @return Resulting salt
|
||||
* @throws If a random fallback is required but not set
|
||||
*/
|
||||
export declare function genSaltSync(rounds?: number): string;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @return Promise with resulting salt, if callback has been omitted
|
||||
*/
|
||||
export declare function genSalt(rounds?: number): Promise<string>;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param callback Callback receiving the error, if any, and the resulting salt
|
||||
*/
|
||||
export declare function genSalt(callback: Callback<string>): void;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @param callback Callback receiving the error, if any, and the resulting salt
|
||||
*/
|
||||
export declare function genSalt(
|
||||
rounds: number,
|
||||
callback: Callback<string>,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Synchronously generates a hash for the given password.
|
||||
* @param password Password to hash
|
||||
* @param salt Salt length to generate or salt to use, default to 10
|
||||
* @return Resulting hash
|
||||
*/
|
||||
export declare function hashSync(
|
||||
password: string,
|
||||
salt?: number | string,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a hash for the given password.
|
||||
* @param password Password to hash
|
||||
* @param salt Salt length to generate or salt to use
|
||||
* @return Promise with resulting hash, if callback has been omitted
|
||||
*/
|
||||
export declare function hash(
|
||||
password: string,
|
||||
salt: number | string,
|
||||
): Promise<string>;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a hash for the given password.
|
||||
* @param password Password to hash
|
||||
* @param salt Salt length to generate or salt to use
|
||||
* @param callback Callback receiving the error, if any, and the resulting hash
|
||||
* @param progressCallback Callback successively called with the percentage of rounds completed (0.0 - 1.0), maximally once per MAX_EXECUTION_TIME = 100 ms.
|
||||
*/
|
||||
export declare function hash(
|
||||
password: string,
|
||||
salt: number | string,
|
||||
callback?: Callback<string>,
|
||||
progressCallback?: ProgressCallback,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Synchronously tests a password against a hash.
|
||||
* @param password Password to test
|
||||
* @param hash Hash to test against
|
||||
* @return true if matching, otherwise false
|
||||
*/
|
||||
export declare function compareSync(password: string, hash: string): boolean;
|
||||
|
||||
/**
|
||||
* Asynchronously tests a password against a hash.
|
||||
* @param password Password to test
|
||||
* @param hash Hash to test against
|
||||
* @return Promise, if callback has been omitted
|
||||
*/
|
||||
export declare function compare(
|
||||
password: string,
|
||||
hash: string,
|
||||
): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Asynchronously tests a password against a hash.
|
||||
* @param password Password to test
|
||||
* @param hash Hash to test against
|
||||
* @param callback Callback receiving the error, if any, otherwise the result
|
||||
* @param progressCallback Callback successively called with the percentage of rounds completed (0.0 - 1.0), maximally once per MAX_EXECUTION_TIME = 100 ms.
|
||||
*/
|
||||
export declare function compare(
|
||||
password: string,
|
||||
hash: string,
|
||||
callback?: Callback<boolean>,
|
||||
progressCallback?: ProgressCallback,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Gets the number of rounds used to encrypt the specified hash.
|
||||
* @param hash Hash to extract the used number of rounds from
|
||||
* @return Number of rounds used
|
||||
*/
|
||||
export declare function getRounds(hash: string): number;
|
||||
|
||||
/**
|
||||
* Gets the salt portion from a hash. Does not validate the hash.
|
||||
* @param hash Hash to extract the salt from
|
||||
* @return Extracted salt part
|
||||
*/
|
||||
export declare function getSalt(hash: string): string;
|
||||
|
||||
/**
|
||||
* Tests if a password will be truncated when hashed, that is its length is
|
||||
* greater than 72 bytes when converted to UTF-8.
|
||||
* @param password The password to test
|
||||
* @returns `true` if truncated, otherwise `false`
|
||||
*/
|
||||
export declare function truncates(password: string): boolean;
|
||||
|
||||
/**
|
||||
* Encodes a byte array to base64 with up to len bytes of input, using the custom bcrypt alphabet.
|
||||
* @function
|
||||
* @param b Byte array
|
||||
* @param len Maximum input length
|
||||
*/
|
||||
export declare function encodeBase64(
|
||||
b: Readonly<ArrayLike<number>>,
|
||||
len: number,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Decodes a base64 encoded string to up to len bytes of output, using the custom bcrypt alphabet.
|
||||
* @function
|
||||
* @param s String to decode
|
||||
* @param len Maximum output length
|
||||
*/
|
||||
export declare function decodeBase64(s: string, len: number): number[];
|
||||
3
node_modules/bcryptjs/umd/index.d.ts
generated
vendored
Normal file
3
node_modules/bcryptjs/umd/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import * as bcrypt from "./types.js";
|
||||
export = bcrypt;
|
||||
export as namespace bcrypt;
|
||||
1221
node_modules/bcryptjs/umd/index.js
generated
vendored
Normal file
1221
node_modules/bcryptjs/umd/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3
node_modules/bcryptjs/umd/package.json
generated
vendored
Normal file
3
node_modules/bcryptjs/umd/package.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"type": "commonjs"
|
||||
}
|
||||
157
node_modules/bcryptjs/umd/types.d.ts
generated
vendored
Normal file
157
node_modules/bcryptjs/umd/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
// Originally imported from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/8b36dbdf95b624b8a7cd7f8416f06c15d274f9e6/types/bcryptjs/index.d.ts
|
||||
// MIT license.
|
||||
|
||||
/** Called with an error on failure or a value of type `T` upon success. */
|
||||
type Callback<T> = (err: Error | null, result?: T) => void;
|
||||
/** Called with the percentage of rounds completed (0.0 - 1.0), maximally once per `MAX_EXECUTION_TIME = 100` ms. */
|
||||
type ProgressCallback = (percentage: number) => void;
|
||||
/** Called to obtain random bytes when both Web Crypto API and Node.js crypto are not available. */
|
||||
type RandomFallback = (length: number) => number[];
|
||||
|
||||
/**
|
||||
* Sets the pseudo random number generator to use as a fallback if neither node's crypto module nor the Web Crypto API is available.
|
||||
* Please note: It is highly important that the PRNG used is cryptographically secure and that it is seeded properly!
|
||||
* @param random Function taking the number of bytes to generate as its sole argument, returning the corresponding array of cryptographically secure random byte values.
|
||||
*/
|
||||
export declare function setRandomFallback(random: RandomFallback): void;
|
||||
|
||||
/**
|
||||
* Synchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @return Resulting salt
|
||||
* @throws If a random fallback is required but not set
|
||||
*/
|
||||
export declare function genSaltSync(rounds?: number): string;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @return Promise with resulting salt, if callback has been omitted
|
||||
*/
|
||||
export declare function genSalt(rounds?: number): Promise<string>;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param callback Callback receiving the error, if any, and the resulting salt
|
||||
*/
|
||||
export declare function genSalt(callback: Callback<string>): void;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a salt.
|
||||
* @param rounds Number of rounds to use, defaults to 10 if omitted
|
||||
* @param callback Callback receiving the error, if any, and the resulting salt
|
||||
*/
|
||||
export declare function genSalt(
|
||||
rounds: number,
|
||||
callback: Callback<string>,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Synchronously generates a hash for the given password.
|
||||
* @param password Password to hash
|
||||
* @param salt Salt length to generate or salt to use, default to 10
|
||||
* @return Resulting hash
|
||||
*/
|
||||
export declare function hashSync(
|
||||
password: string,
|
||||
salt?: number | string,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a hash for the given password.
|
||||
* @param password Password to hash
|
||||
* @param salt Salt length to generate or salt to use
|
||||
* @return Promise with resulting hash, if callback has been omitted
|
||||
*/
|
||||
export declare function hash(
|
||||
password: string,
|
||||
salt: number | string,
|
||||
): Promise<string>;
|
||||
|
||||
/**
|
||||
* Asynchronously generates a hash for the given password.
|
||||
* @param password Password to hash
|
||||
* @param salt Salt length to generate or salt to use
|
||||
* @param callback Callback receiving the error, if any, and the resulting hash
|
||||
* @param progressCallback Callback successively called with the percentage of rounds completed (0.0 - 1.0), maximally once per MAX_EXECUTION_TIME = 100 ms.
|
||||
*/
|
||||
export declare function hash(
|
||||
password: string,
|
||||
salt: number | string,
|
||||
callback?: Callback<string>,
|
||||
progressCallback?: ProgressCallback,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Synchronously tests a password against a hash.
|
||||
* @param password Password to test
|
||||
* @param hash Hash to test against
|
||||
* @return true if matching, otherwise false
|
||||
*/
|
||||
export declare function compareSync(password: string, hash: string): boolean;
|
||||
|
||||
/**
|
||||
* Asynchronously tests a password against a hash.
|
||||
* @param password Password to test
|
||||
* @param hash Hash to test against
|
||||
* @return Promise, if callback has been omitted
|
||||
*/
|
||||
export declare function compare(
|
||||
password: string,
|
||||
hash: string,
|
||||
): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Asynchronously tests a password against a hash.
|
||||
* @param password Password to test
|
||||
* @param hash Hash to test against
|
||||
* @param callback Callback receiving the error, if any, otherwise the result
|
||||
* @param progressCallback Callback successively called with the percentage of rounds completed (0.0 - 1.0), maximally once per MAX_EXECUTION_TIME = 100 ms.
|
||||
*/
|
||||
export declare function compare(
|
||||
password: string,
|
||||
hash: string,
|
||||
callback?: Callback<boolean>,
|
||||
progressCallback?: ProgressCallback,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Gets the number of rounds used to encrypt the specified hash.
|
||||
* @param hash Hash to extract the used number of rounds from
|
||||
* @return Number of rounds used
|
||||
*/
|
||||
export declare function getRounds(hash: string): number;
|
||||
|
||||
/**
|
||||
* Gets the salt portion from a hash. Does not validate the hash.
|
||||
* @param hash Hash to extract the salt from
|
||||
* @return Extracted salt part
|
||||
*/
|
||||
export declare function getSalt(hash: string): string;
|
||||
|
||||
/**
|
||||
* Tests if a password will be truncated when hashed, that is its length is
|
||||
* greater than 72 bytes when converted to UTF-8.
|
||||
* @param password The password to test
|
||||
* @returns `true` if truncated, otherwise `false`
|
||||
*/
|
||||
export declare function truncates(password: string): boolean;
|
||||
|
||||
/**
|
||||
* Encodes a byte array to base64 with up to len bytes of input, using the custom bcrypt alphabet.
|
||||
* @function
|
||||
* @param b Byte array
|
||||
* @param len Maximum input length
|
||||
*/
|
||||
export declare function encodeBase64(
|
||||
b: Readonly<ArrayLike<number>>,
|
||||
len: number,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Decodes a base64 encoded string to up to len bytes of output, using the custom bcrypt alphabet.
|
||||
* @function
|
||||
* @param s String to decode
|
||||
* @param len Maximum output length
|
||||
*/
|
||||
export declare function decodeBase64(s: string, len: number): number[];
|
||||
33
node_modules/create-require/CHANGELOG.md
generated
vendored
Normal file
33
node_modules/create-require/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [1.1.1](https://github.com/nuxt-contrib/create-require/compare/v1.1.0...v1.1.1) (2020-11-26)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **types:** explicitly import the URL type ([#3](https://github.com/nuxt-contrib/create-require/issues/3)) ([66a98cc](https://github.com/nuxt-contrib/create-require/commit/66a98cc60a430c3689f11ad111c5fbf4574a37b6))
|
||||
|
||||
## [1.1.0](https://github.com/nuxt-contrib/create-require/compare/v1.0.2...v1.1.0) (2020-11-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* fallback to process.cwd() if no filename provided either ([ae5e0d6](https://github.com/nuxt-contrib/create-require/commit/ae5e0d665945b980b82ae6e998146c32295a6734))
|
||||
|
||||
### [1.0.2](https://github.com/nuxt-contrib/create-require/compare/v1.0.1...v1.0.2) (2020-06-12)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use fake path if filename is directory ([c8e0983](https://github.com/nuxt-contrib/create-require/commit/c8e09834e322d8a106ac8018011f799e2fed03f2))
|
||||
|
||||
### [1.0.1](https://github.com/nuxt-contrib/create-require/compare/v1.0.0...v1.0.1) (2020-06-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **types:** specify types field ([2c06464](https://github.com/nuxt-contrib/create-require/commit/2c0646407704c1c534babdfed39a48f51fc4f616))
|
||||
|
||||
### 0.0.1 (2020-05-04)
|
||||
25
node_modules/create-require/LICENSE
generated
vendored
Normal file
25
node_modules/create-require/LICENSE
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020
|
||||
|
||||
Maël Nison <nison.mael@gmail.com>
|
||||
Paul Soporan <paul.soporan@gmail.com>
|
||||
Pooya Parsa <pyapar@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
46
node_modules/create-require/README.md
generated
vendored
Normal file
46
node_modules/create-require/README.md
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
# `create-require`
|
||||
|
||||
[![npm version][npm-version-src]][npm-version-href]
|
||||
[![npm downloads][npm-downloads-src]][npm-downloads-href]
|
||||
[![Github Actions][github-actions-src]][github-actions-href]
|
||||
[![Codecov][codecov-src]][codecov-href]
|
||||
|
||||
Polyfill for Node.js [`module.createRequire`](https://nodejs.org/api/modules.html#modules_module_createrequire_filename) (<= v12.2.0)
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
yarn add create-require
|
||||
|
||||
npm install create-require
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
function createRequire (filename: string | URL): NodeRequire;
|
||||
```
|
||||
|
||||
```js
|
||||
const createRequire = require('create-require')
|
||||
|
||||
const myRequire = createRequire('path/to/test.js')
|
||||
const myModule = myRequire('./test-sibling-module')
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
[MIT](./LICENSE)
|
||||
|
||||
<!-- Badges -->
|
||||
[npm-version-src]: https://img.shields.io/npm/v/create-require?style=flat-square
|
||||
[npm-version-href]: https://npmjs.com/package/create-require
|
||||
|
||||
[npm-downloads-src]: https://img.shields.io/npm/dm/create-require?style=flat-square
|
||||
[npm-downloads-href]: https://npmjs.com/package/create-require
|
||||
|
||||
[github-actions-src]: https://img.shields.io/github/workflow/status/nuxt-contrib/create-require/test/master?style=flat-square
|
||||
[github-actions-href]: https://github.com/nuxt-contrib/create-require/actions?query=workflow%3Atest
|
||||
|
||||
[codecov-src]: https://img.shields.io/codecov/c/gh/nuxt-contrib/create-require/master?style=flat-square
|
||||
[codecov-href]: https://codecov.io/gh/nuxt-contrib/create-require
|
||||
3
node_modules/create-require/create-require.d.ts
generated
vendored
Normal file
3
node_modules/create-require/create-require.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { URL } from 'url';
|
||||
|
||||
export default function createRequire (filename: string | URL): NodeRequire;
|
||||
49
node_modules/create-require/create-require.js
generated
vendored
Normal file
49
node_modules/create-require/create-require.js
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
const nativeModule = require('module')
|
||||
const path = require('path')
|
||||
const fs = require('fs')
|
||||
|
||||
function createRequire (filename) {
|
||||
// Fallback to process.cwd() if no filename passed
|
||||
if (!filename) {
|
||||
filename = process.cwd()
|
||||
}
|
||||
|
||||
// If filename is dir, createRequire goes with parent directory, so we need fakepath
|
||||
if (isDir(filename)) {
|
||||
filename = path.join(filename, 'index.js')
|
||||
}
|
||||
|
||||
// Added in Node v12.2.0
|
||||
if (nativeModule.createRequire) {
|
||||
return nativeModule.createRequire(filename)
|
||||
}
|
||||
|
||||
// Added in Node v10.12.0 and deprecated since Node v12.2.0
|
||||
if (nativeModule.createRequireFromPath) {
|
||||
return nativeModule.createRequireFromPath(filename)
|
||||
}
|
||||
|
||||
// Polyfill
|
||||
return _createRequire(filename)
|
||||
}
|
||||
|
||||
// Polyfill
|
||||
function _createRequire (filename) {
|
||||
const mod = new nativeModule.Module(filename, null)
|
||||
mod.filename = filename
|
||||
mod.paths = nativeModule.Module._nodeModulePaths(path.dirname(filename))
|
||||
mod._compile('module.exports = require;', filename)
|
||||
return mod.exports
|
||||
}
|
||||
|
||||
function isDir (path) {
|
||||
try {
|
||||
const stat = fs.lstatSync(path)
|
||||
return stat.isDirectory()
|
||||
} catch (e) {
|
||||
// lstatSync throws an error if path doesn't exist
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createRequire
|
||||
39
node_modules/create-require/package.json
generated
vendored
Normal file
39
node_modules/create-require/package.json
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
{
|
||||
"name": "create-require",
|
||||
"version": "1.1.1",
|
||||
"description": "Polyfill for Node.js module.createRequire (<= v12.2.0)",
|
||||
"repository": "nuxt-contrib/create-require",
|
||||
"license": "MIT",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Maël Nison",
|
||||
"email": "nison.mael@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Paul Soporan",
|
||||
"email": "paul.soporan@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Pooya Parsa",
|
||||
"email": "pyapar@gmail.com"
|
||||
}
|
||||
],
|
||||
"main": "./create-require.js",
|
||||
"types": "./create-require.d.ts",
|
||||
"files": [
|
||||
"create-require.js",
|
||||
"create-require.d.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"release": "yarn test && standard-version && git push --follow-tags && npm publish",
|
||||
"test:matrix": "tap --no-esm --no-timeout ./test/matrix.js",
|
||||
"test": "yarn lint && yarn test:matrix"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nuxtjs/eslint-config": "latest",
|
||||
"eslint": "latest",
|
||||
"tap": "latest",
|
||||
"standard-version": "latest"
|
||||
}
|
||||
}
|
||||
39
node_modules/diff/CONTRIBUTING.md
generated
vendored
Normal file
39
node_modules/diff/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
# How to Contribute
|
||||
|
||||
## Pull Requests
|
||||
|
||||
We also accept [pull requests][pull-request]!
|
||||
|
||||
Generally we like to see pull requests that
|
||||
|
||||
- Maintain the existing code style
|
||||
- Are focused on a single change (i.e. avoid large refactoring or style adjustments in untouched code if not the primary goal of the pull request)
|
||||
- Have [good commit messages](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html)
|
||||
- Have tests
|
||||
- Don't decrease the current code coverage (see coverage/lcov-report/index.html)
|
||||
|
||||
## Building
|
||||
|
||||
```
|
||||
npm install
|
||||
npm test
|
||||
```
|
||||
|
||||
The `npm test -- dev` implements watching for tests within Node and `karma start` may be used for manual testing in browsers.
|
||||
|
||||
If you notice any problems, please report them to the GitHub issue tracker at
|
||||
[http://github.com/kpdecker/jsdiff/issues](http://github.com/kpdecker/jsdiff/issues).
|
||||
|
||||
## Releasing
|
||||
|
||||
JsDiff utilizes the [release yeoman generator][generator-release] to perform most release tasks.
|
||||
|
||||
A full release may be completed with the following:
|
||||
|
||||
```
|
||||
yo release
|
||||
npm publish
|
||||
```
|
||||
|
||||
[generator-release]: https://github.com/walmartlabs/generator-release
|
||||
[pull-request]: https://github.com/kpdecker/jsdiff/pull/new/master
|
||||
31
node_modules/diff/LICENSE
generated
vendored
Normal file
31
node_modules/diff/LICENSE
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
Software License Agreement (BSD License)
|
||||
|
||||
Copyright (c) 2009-2015, Kevin Decker <kpdecker@gmail.com>
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer in the documentation and/or other
|
||||
materials provided with the distribution.
|
||||
|
||||
* Neither the name of Kevin Decker nor the names of its
|
||||
contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior
|
||||
written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
|
||||
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
|
||||
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
207
node_modules/diff/README.md
generated
vendored
Normal file
207
node_modules/diff/README.md
generated
vendored
Normal file
@ -0,0 +1,207 @@
|
||||
# jsdiff
|
||||
|
||||
[](http://travis-ci.org/kpdecker/jsdiff)
|
||||
[](https://saucelabs.com/u/jsdiff)
|
||||
|
||||
A javascript text differencing implementation.
|
||||
|
||||
Based on the algorithm proposed in
|
||||
["An O(ND) Difference Algorithm and its Variations" (Myers, 1986)](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927).
|
||||
|
||||
## Installation
|
||||
```bash
|
||||
npm install diff --save
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
* `JsDiff.diffChars(oldStr, newStr[, options])` - diffs two blocks of text, comparing character by character.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
Options
|
||||
* `ignoreCase`: `true` to ignore casing difference. Defaults to `false`.
|
||||
|
||||
* `JsDiff.diffWords(oldStr, newStr[, options])` - diffs two blocks of text, comparing word by word, ignoring whitespace.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
Options
|
||||
* `ignoreCase`: Same as in `diffChars`.
|
||||
|
||||
* `JsDiff.diffWordsWithSpace(oldStr, newStr[, options])` - diffs two blocks of text, comparing word by word, treating whitespace as significant.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
* `JsDiff.diffLines(oldStr, newStr[, options])` - diffs two blocks of text, comparing line by line.
|
||||
|
||||
Options
|
||||
* `ignoreWhitespace`: `true` to ignore leading and trailing whitespace. This is the same as `diffTrimmedLines`
|
||||
* `newlineIsToken`: `true` to treat newline characters as separate tokens. This allows for changes to the newline structure to occur independently of the line content and to be treated as such. In general this is the more human friendly form of `diffLines` and `diffLines` is better suited for patches and other computer friendly output.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
* `JsDiff.diffTrimmedLines(oldStr, newStr[, options])` - diffs two blocks of text, comparing line by line, ignoring leading and trailing whitespace.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
* `JsDiff.diffSentences(oldStr, newStr[, options])` - diffs two blocks of text, comparing sentence by sentence.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
* `JsDiff.diffCss(oldStr, newStr[, options])` - diffs two blocks of text, comparing CSS tokens.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
* `JsDiff.diffJson(oldObj, newObj[, options])` - diffs two JSON objects, comparing the fields defined on each. The order of fields, etc does not matter in this comparison.
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
* `JsDiff.diffArrays(oldArr, newArr[, options])` - diffs two arrays, comparing each item for strict equality (===).
|
||||
|
||||
Options
|
||||
* `comparator`: `function(left, right)` for custom equality checks
|
||||
|
||||
Returns a list of change objects (See below).
|
||||
|
||||
* `JsDiff.createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader)` - creates a unified diff patch.
|
||||
|
||||
Parameters:
|
||||
* `oldFileName` : String to be output in the filename section of the patch for the removals
|
||||
* `newFileName` : String to be output in the filename section of the patch for the additions
|
||||
* `oldStr` : Original string value
|
||||
* `newStr` : New string value
|
||||
* `oldHeader` : Additional information to include in the old file header
|
||||
* `newHeader` : Additional information to include in the new file header
|
||||
* `options` : An object with options. Currently, only `context` is supported and describes how many lines of context should be included.
|
||||
|
||||
* `JsDiff.createPatch(fileName, oldStr, newStr, oldHeader, newHeader)` - creates a unified diff patch.
|
||||
|
||||
Just like JsDiff.createTwoFilesPatch, but with oldFileName being equal to newFileName.
|
||||
|
||||
|
||||
* `JsDiff.structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options)` - returns an object with an array of hunk objects.
|
||||
|
||||
This method is similar to createTwoFilesPatch, but returns a data structure
|
||||
suitable for further processing. Parameters are the same as createTwoFilesPatch. The data structure returned may look like this:
|
||||
|
||||
```js
|
||||
{
|
||||
oldFileName: 'oldfile', newFileName: 'newfile',
|
||||
oldHeader: 'header1', newHeader: 'header2',
|
||||
hunks: [{
|
||||
oldStart: 1, oldLines: 3, newStart: 1, newLines: 3,
|
||||
lines: [' line2', ' line3', '-line4', '+line5', '\\ No newline at end of file'],
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
* `JsDiff.applyPatch(source, patch[, options])` - applies a unified diff patch.
|
||||
|
||||
Return a string containing new version of provided data. `patch` may be a string diff or the output from the `parsePatch` or `structuredPatch` methods.
|
||||
|
||||
The optional `options` object may have the following keys:
|
||||
|
||||
- `fuzzFactor`: Number of lines that are allowed to differ before rejecting a patch. Defaults to 0.
|
||||
- `compareLine(lineNumber, line, operation, patchContent)`: Callback used to compare to given lines to determine if they should be considered equal when patching. Defaults to strict equality but may be overridden to provide fuzzier comparison. Should return false if the lines should be rejected.
|
||||
|
||||
* `JsDiff.applyPatches(patch, options)` - applies one or more patches.
|
||||
|
||||
This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is:
|
||||
|
||||
- `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution.
|
||||
- `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution.
|
||||
|
||||
Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made.
|
||||
|
||||
* `JsDiff.parsePatch(diffStr)` - Parses a patch into structured data
|
||||
|
||||
Return a JSON object representation of the a patch, suitable for use with the `applyPatch` method. This parses to the same structure returned by `JsDiff.structuredPatch`.
|
||||
|
||||
* `convertChangesToXML(changes)` - converts a list of changes to a serialized XML format
|
||||
|
||||
|
||||
All methods above which accept the optional `callback` method will run in sync mode when that parameter is omitted and in async mode when supplied. This allows for larger diffs without blocking the event loop. This may be passed either directly as the final parameter or as the `callback` field in the `options` object.
|
||||
|
||||
### Change Objects
|
||||
Many of the methods above return change objects. These objects consist of the following fields:
|
||||
|
||||
* `value`: Text content
|
||||
* `added`: True if the value was inserted into the new string
|
||||
* `removed`: True if the value was removed from the old string
|
||||
|
||||
Note that some cases may omit a particular flag field. Comparison on the flag fields should always be done in a truthy or falsy manner.
|
||||
|
||||
## Examples
|
||||
|
||||
Basic example in Node
|
||||
|
||||
```js
|
||||
require('colors');
|
||||
var jsdiff = require('diff');
|
||||
|
||||
var one = 'beep boop';
|
||||
var other = 'beep boob blah';
|
||||
|
||||
var diff = jsdiff.diffChars(one, other);
|
||||
|
||||
diff.forEach(function(part){
|
||||
// green for additions, red for deletions
|
||||
// grey for common parts
|
||||
var color = part.added ? 'green' :
|
||||
part.removed ? 'red' : 'grey';
|
||||
process.stderr.write(part.value[color]);
|
||||
});
|
||||
|
||||
console.log();
|
||||
```
|
||||
Running the above program should yield
|
||||
|
||||
<img src="images/node_example.png" alt="Node Example">
|
||||
|
||||
Basic example in a web page
|
||||
|
||||
```html
|
||||
<pre id="display"></pre>
|
||||
<script src="diff.js"></script>
|
||||
<script>
|
||||
var one = 'beep boop',
|
||||
other = 'beep boob blah',
|
||||
color = '',
|
||||
span = null;
|
||||
|
||||
var diff = JsDiff.diffChars(one, other),
|
||||
display = document.getElementById('display'),
|
||||
fragment = document.createDocumentFragment();
|
||||
|
||||
diff.forEach(function(part){
|
||||
// green for additions, red for deletions
|
||||
// grey for common parts
|
||||
color = part.added ? 'green' :
|
||||
part.removed ? 'red' : 'grey';
|
||||
span = document.createElement('span');
|
||||
span.style.color = color;
|
||||
span.appendChild(document
|
||||
.createTextNode(part.value));
|
||||
fragment.appendChild(span);
|
||||
});
|
||||
|
||||
display.appendChild(fragment);
|
||||
</script>
|
||||
```
|
||||
|
||||
Open the above .html file in a browser and you should see
|
||||
|
||||
<img src="images/web_example.png" alt="Node Example">
|
||||
|
||||
**[Full online demo](http://kpdecker.github.com/jsdiff)**
|
||||
|
||||
## Compatibility
|
||||
|
||||
[](https://saucelabs.com/u/jsdiff)
|
||||
|
||||
jsdiff supports all ES3 environments with some known issues on IE8 and below. Under these browsers some diff algorithms such as word diff and others may fail due to lack of support for capturing groups in the `split` operation.
|
||||
|
||||
## License
|
||||
|
||||
See [LICENSE](https://github.com/kpdecker/jsdiff/blob/master/LICENSE).
|
||||
1585
node_modules/diff/dist/diff.js
generated
vendored
Normal file
1585
node_modules/diff/dist/diff.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
38
node_modules/diff/dist/diff.min.js
generated
vendored
Normal file
38
node_modules/diff/dist/diff.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
32
node_modules/diff/lib/convert/dmp.js
generated
vendored
Normal file
32
node_modules/diff/lib/convert/dmp.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
/*istanbul ignore start*/
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.convertChangesToDMP = convertChangesToDMP;
|
||||
|
||||
/*istanbul ignore end*/
|
||||
// See: http://code.google.com/p/google-diff-match-patch/wiki/API
|
||||
function convertChangesToDMP(changes) {
|
||||
var ret = [],
|
||||
change,
|
||||
operation;
|
||||
|
||||
for (var i = 0; i < changes.length; i++) {
|
||||
change = changes[i];
|
||||
|
||||
if (change.added) {
|
||||
operation = 1;
|
||||
} else if (change.removed) {
|
||||
operation = -1;
|
||||
} else {
|
||||
operation = 0;
|
||||
}
|
||||
|
||||
ret.push([operation, change.value]);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9jb252ZXJ0L2RtcC5qcyJdLCJuYW1lcyI6WyJjb252ZXJ0Q2hhbmdlc1RvRE1QIiwiY2hhbmdlcyIsInJldCIsImNoYW5nZSIsIm9wZXJhdGlvbiIsImkiLCJsZW5ndGgiLCJhZGRlZCIsInJlbW92ZWQiLCJwdXNoIiwidmFsdWUiXSwibWFwcGluZ3MiOiI7Ozs7Ozs7OztBQUFBO0FBQ08sU0FBU0EsbUJBQVQsQ0FBNkJDLE9BQTdCLEVBQXNDO0FBQzNDLE1BQUlDLEdBQUcsR0FBRyxFQUFWO0FBQUEsTUFDSUMsTUFESjtBQUFBLE1BRUlDLFNBRko7O0FBR0EsT0FBSyxJQUFJQyxDQUFDLEdBQUcsQ0FBYixFQUFnQkEsQ0FBQyxHQUFHSixPQUFPLENBQUNLLE1BQTVCLEVBQW9DRCxDQUFDLEVBQXJDLEVBQXlDO0FBQ3ZDRixJQUFBQSxNQUFNLEdBQUdGLE9BQU8sQ0FBQ0ksQ0FBRCxDQUFoQjs7QUFDQSxRQUFJRixNQUFNLENBQUNJLEtBQVgsRUFBa0I7QUFDaEJILE1BQUFBLFNBQVMsR0FBRyxDQUFaO0FBQ0QsS0FGRCxNQUVPLElBQUlELE1BQU0sQ0FBQ0ssT0FBWCxFQUFvQjtBQUN6QkosTUFBQUEsU0FBUyxHQUFHLENBQUMsQ0FBYjtBQUNELEtBRk0sTUFFQTtBQUNMQSxNQUFBQSxTQUFTLEdBQUcsQ0FBWjtBQUNEOztBQUVERixJQUFBQSxHQUFHLENBQUNPLElBQUosQ0FBUyxDQUFDTCxTQUFELEVBQVlELE1BQU0sQ0FBQ08sS0FBbkIsQ0FBVDtBQUNEOztBQUNELFNBQU9SLEdBQVA7QUFDRCIsInNvdXJjZXNDb250ZW50IjpbIi8vIFNlZTogaHR0cDovL2NvZGUuZ29vZ2xlLmNvbS9wL2dvb2dsZS1kaWZmLW1hdGNoLXBhdGNoL3dpa2kvQVBJXG5leHBvcnQgZnVuY3Rpb24gY29udmVydENoYW5nZXNUb0RNUChjaGFuZ2VzKSB7XG4gIGxldCByZXQgPSBbXSxcbiAgICAgIGNoYW5nZSxcbiAgICAgIG9wZXJhdGlvbjtcbiAgZm9yIChsZXQgaSA9IDA7IGkgPCBjaGFuZ2VzLmxlbmd0aDsgaSsrKSB7XG4gICAgY2hhbmdlID0gY2hhbmdlc1tpXTtcbiAgICBpZiAoY2hhbmdlLmFkZGVkKSB7XG4gICAgICBvcGVyYXRpb24gPSAxO1xuICAgIH0gZWxzZSBpZiAoY2hhbmdlLnJlbW92ZWQpIHtcbiAgICAgIG9wZXJhdGlvbiA9IC0xO1xuICAgIH0gZWxzZSB7XG4gICAgICBvcGVyYXRpb24gPSAwO1xuICAgIH1cblxuICAgIHJldC5wdXNoKFtvcGVyYXRpb24sIGNoYW5nZS52YWx1ZV0pO1xuICB9XG4gIHJldHVybiByZXQ7XG59XG4iXX0=
|
||||
42
node_modules/diff/lib/convert/xml.js
generated
vendored
Normal file
42
node_modules/diff/lib/convert/xml.js
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
/*istanbul ignore start*/
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.convertChangesToXML = convertChangesToXML;
|
||||
|
||||
/*istanbul ignore end*/
|
||||
function convertChangesToXML(changes) {
|
||||
var ret = [];
|
||||
|
||||
for (var i = 0; i < changes.length; i++) {
|
||||
var change = changes[i];
|
||||
|
||||
if (change.added) {
|
||||
ret.push('<ins>');
|
||||
} else if (change.removed) {
|
||||
ret.push('<del>');
|
||||
}
|
||||
|
||||
ret.push(escapeHTML(change.value));
|
||||
|
||||
if (change.added) {
|
||||
ret.push('</ins>');
|
||||
} else if (change.removed) {
|
||||
ret.push('</del>');
|
||||
}
|
||||
}
|
||||
|
||||
return ret.join('');
|
||||
}
|
||||
|
||||
function escapeHTML(s) {
|
||||
var n = s;
|
||||
n = n.replace(/&/g, '&');
|
||||
n = n.replace(/</g, '<');
|
||||
n = n.replace(/>/g, '>');
|
||||
n = n.replace(/"/g, '"');
|
||||
return n;
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9jb252ZXJ0L3htbC5qcyJdLCJuYW1lcyI6WyJjb252ZXJ0Q2hhbmdlc1RvWE1MIiwiY2hhbmdlcyIsInJldCIsImkiLCJsZW5ndGgiLCJjaGFuZ2UiLCJhZGRlZCIsInB1c2giLCJyZW1vdmVkIiwiZXNjYXBlSFRNTCIsInZhbHVlIiwiam9pbiIsInMiLCJuIiwicmVwbGFjZSJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O0FBQU8sU0FBU0EsbUJBQVQsQ0FBNkJDLE9BQTdCLEVBQXNDO0FBQzNDLE1BQUlDLEdBQUcsR0FBRyxFQUFWOztBQUNBLE9BQUssSUFBSUMsQ0FBQyxHQUFHLENBQWIsRUFBZ0JBLENBQUMsR0FBR0YsT0FBTyxDQUFDRyxNQUE1QixFQUFvQ0QsQ0FBQyxFQUFyQyxFQUF5QztBQUN2QyxRQUFJRSxNQUFNLEdBQUdKLE9BQU8sQ0FBQ0UsQ0FBRCxDQUFwQjs7QUFDQSxRQUFJRSxNQUFNLENBQUNDLEtBQVgsRUFBa0I7QUFDaEJKLE1BQUFBLEdBQUcsQ0FBQ0ssSUFBSixDQUFTLE9BQVQ7QUFDRCxLQUZELE1BRU8sSUFBSUYsTUFBTSxDQUFDRyxPQUFYLEVBQW9CO0FBQ3pCTixNQUFBQSxHQUFHLENBQUNLLElBQUosQ0FBUyxPQUFUO0FBQ0Q7O0FBRURMLElBQUFBLEdBQUcsQ0FBQ0ssSUFBSixDQUFTRSxVQUFVLENBQUNKLE1BQU0sQ0FBQ0ssS0FBUixDQUFuQjs7QUFFQSxRQUFJTCxNQUFNLENBQUNDLEtBQVgsRUFBa0I7QUFDaEJKLE1BQUFBLEdBQUcsQ0FBQ0ssSUFBSixDQUFTLFFBQVQ7QUFDRCxLQUZELE1BRU8sSUFBSUYsTUFBTSxDQUFDRyxPQUFYLEVBQW9CO0FBQ3pCTixNQUFBQSxHQUFHLENBQUNLLElBQUosQ0FBUyxRQUFUO0FBQ0Q7QUFDRjs7QUFDRCxTQUFPTCxHQUFHLENBQUNTLElBQUosQ0FBUyxFQUFULENBQVA7QUFDRDs7QUFFRCxTQUFTRixVQUFULENBQW9CRyxDQUFwQixFQUF1QjtBQUNyQixNQUFJQyxDQUFDLEdBQUdELENBQVI7QUFDQUMsRUFBQUEsQ0FBQyxHQUFHQSxDQUFDLENBQUNDLE9BQUYsQ0FBVSxJQUFWLEVBQWdCLE9BQWhCLENBQUo7QUFDQUQsRUFBQUEsQ0FBQyxHQUFHQSxDQUFDLENBQUNDLE9BQUYsQ0FBVSxJQUFWLEVBQWdCLE1BQWhCLENBQUo7QUFDQUQsRUFBQUEsQ0FBQyxHQUFHQSxDQUFDLENBQUNDLE9BQUYsQ0FBVSxJQUFWLEVBQWdCLE1BQWhCLENBQUo7QUFDQUQsRUFBQUEsQ0FBQyxHQUFHQSxDQUFDLENBQUNDLE9BQUYsQ0FBVSxJQUFWLEVBQWdCLFFBQWhCLENBQUo7QUFFQSxTQUFPRCxDQUFQO0FBQ0QiLCJzb3VyY2VzQ29udGVudCI6WyJleHBvcnQgZnVuY3Rpb24gY29udmVydENoYW5nZXNUb1hNTChjaGFuZ2VzKSB7XG4gIGxldCByZXQgPSBbXTtcbiAgZm9yIChsZXQgaSA9IDA7IGkgPCBjaGFuZ2VzLmxlbmd0aDsgaSsrKSB7XG4gICAgbGV0IGNoYW5nZSA9IGNoYW5nZXNbaV07XG4gICAgaWYgKGNoYW5nZS5hZGRlZCkge1xuICAgICAgcmV0LnB1c2goJzxpbnM+Jyk7XG4gICAgfSBlbHNlIGlmIChjaGFuZ2UucmVtb3ZlZCkge1xuICAgICAgcmV0LnB1c2goJzxkZWw+Jyk7XG4gICAgfVxuXG4gICAgcmV0LnB1c2goZXNjYXBlSFRNTChjaGFuZ2UudmFsdWUpKTtcblxuICAgIGlmIChjaGFuZ2UuYWRkZWQpIHtcbiAgICAgIHJldC5wdXNoKCc8L2lucz4nKTtcbiAgICB9IGVsc2UgaWYgKGNoYW5nZS5yZW1vdmVkKSB7XG4gICAgICByZXQucHVzaCgnPC9kZWw+Jyk7XG4gICAgfVxuICB9XG4gIHJldHVybiByZXQuam9pbignJyk7XG59XG5cbmZ1bmN0aW9uIGVzY2FwZUhUTUwocykge1xuICBsZXQgbiA9IHM7XG4gIG4gPSBuLnJlcGxhY2UoLyYvZywgJyZhbXA7Jyk7XG4gIG4gPSBuLnJlcGxhY2UoLzwvZywgJyZsdDsnKTtcbiAgbiA9IG4ucmVwbGFjZSgvPi9nLCAnJmd0OycpO1xuICBuID0gbi5yZXBsYWNlKC9cIi9nLCAnJnF1b3Q7Jyk7XG5cbiAgcmV0dXJuIG47XG59XG4iXX0=
|
||||
45
node_modules/diff/lib/diff/array.js
generated
vendored
Normal file
45
node_modules/diff/lib/diff/array.js
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
/*istanbul ignore start*/
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.diffArrays = diffArrays;
|
||||
exports.arrayDiff = void 0;
|
||||
|
||||
/*istanbul ignore end*/
|
||||
var
|
||||
/*istanbul ignore start*/
|
||||
_base = _interopRequireDefault(require("./base"))
|
||||
/*istanbul ignore end*/
|
||||
;
|
||||
|
||||
/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/*istanbul ignore end*/
|
||||
var arrayDiff = new
|
||||
/*istanbul ignore start*/
|
||||
_base
|
||||
/*istanbul ignore end*/
|
||||
.
|
||||
/*istanbul ignore start*/
|
||||
default
|
||||
/*istanbul ignore end*/
|
||||
();
|
||||
|
||||
/*istanbul ignore start*/
|
||||
exports.arrayDiff = arrayDiff;
|
||||
|
||||
/*istanbul ignore end*/
|
||||
arrayDiff.tokenize = function (value) {
|
||||
return value.slice();
|
||||
};
|
||||
|
||||
arrayDiff.join = arrayDiff.removeEmpty = function (value) {
|
||||
return value;
|
||||
};
|
||||
|
||||
function diffArrays(oldArr, newArr, callback) {
|
||||
return arrayDiff.diff(oldArr, newArr, callback);
|
||||
}
|
||||
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9kaWZmL2FycmF5LmpzIl0sIm5hbWVzIjpbImFycmF5RGlmZiIsIkRpZmYiLCJ0b2tlbml6ZSIsInZhbHVlIiwic2xpY2UiLCJqb2luIiwicmVtb3ZlRW1wdHkiLCJkaWZmQXJyYXlzIiwib2xkQXJyIiwibmV3QXJyIiwiY2FsbGJhY2siLCJkaWZmIl0sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7O0FBQUE7QUFBQTtBQUFBO0FBQUE7QUFBQTs7Ozs7QUFFTyxJQUFNQSxTQUFTLEdBQUc7QUFBSUM7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBSjtBQUFBLEVBQWxCOzs7Ozs7QUFDUEQsU0FBUyxDQUFDRSxRQUFWLEdBQXFCLFVBQVNDLEtBQVQsRUFBZ0I7QUFDbkMsU0FBT0EsS0FBSyxDQUFDQyxLQUFOLEVBQVA7QUFDRCxDQUZEOztBQUdBSixTQUFTLENBQUNLLElBQVYsR0FBaUJMLFNBQVMsQ0FBQ00sV0FBVixHQUF3QixVQUFTSCxLQUFULEVBQWdCO0FBQ3ZELFNBQU9BLEtBQVA7QUFDRCxDQUZEOztBQUlPLFNBQVNJLFVBQVQsQ0FBb0JDLE1BQXBCLEVBQTRCQyxNQUE1QixFQUFvQ0MsUUFBcEMsRUFBOEM7QUFBRSxTQUFPVixTQUFTLENBQUNXLElBQVYsQ0FBZUgsTUFBZixFQUF1QkMsTUFBdkIsRUFBK0JDLFFBQS9CLENBQVA7QUFBa0QiLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgRGlmZiBmcm9tICcuL2Jhc2UnO1xuXG5leHBvcnQgY29uc3QgYXJyYXlEaWZmID0gbmV3IERpZmYoKTtcbmFycmF5RGlmZi50b2tlbml6ZSA9IGZ1bmN0aW9uKHZhbHVlKSB7XG4gIHJldHVybiB2YWx1ZS5zbGljZSgpO1xufTtcbmFycmF5RGlmZi5qb2luID0gYXJyYXlEaWZmLnJlbW92ZUVtcHR5ID0gZnVuY3Rpb24odmFsdWUpIHtcbiAgcmV0dXJuIHZhbHVlO1xufTtcblxuZXhwb3J0IGZ1bmN0aW9uIGRpZmZBcnJheXMob2xkQXJyLCBuZXdBcnIsIGNhbGxiYWNrKSB7IHJldHVybiBhcnJheURpZmYuZGlmZihvbGRBcnIsIG5ld0FyciwgY2FsbGJhY2spOyB9XG4iXX0=
|
||||
304
node_modules/diff/lib/diff/base.js
generated
vendored
Normal file
304
node_modules/diff/lib/diff/base.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user