unit tests part 3
This commit is contained in:
parent
bd96958df5
commit
323cea883d
4661 changed files with 410946 additions and 5 deletions
|
@ -1,6 +1,6 @@
|
|||
import { html } from "./broker";
|
||||
//import { test } from 'jest';
|
||||
import { jest } from "@jest/globals";
|
||||
import { test, beforeAll, afterAll } from "@jest/globals";
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(console, "log").mockImplementation(() => {});
|
||||
|
@ -9,20 +9,17 @@ beforeAll(() => {
|
|||
test("Generate HTML for hexagram", () => {
|
||||
expect(html("Hexagram No. 45 ䷬\nGathering Together [Massing]\n萃 (cuì)"))
|
||||
.toBe("<br><p><div style=\"border: 1px dotted gray; border-radius: 10px; padding-left: 10px; padding-right: 10px; padding-top: 10px; padding-bottom: 10px; width: auto; display: inline-block; box-shadow: 4px 4px 10px rgba(0, 0, 0, 0.2);\"><h1>Hexagram No. 45 ䷬</h1><div style=\"border: 1px solid gray; border-radius: 25px; padding-left: 30px; padding-right: 30px; padding-top: 20px; width: auto; display: inline-block; text-align: center; box-shadow: 4px 4px 10px rgba(0, 0, 0, 0.2);\"><h2>Gathering Together [Massing] - 萃 (cuì)<br></h2></div></p>");
|
||||
|
||||
});
|
||||
|
||||
test("Generate HTML for judgement", () => {
|
||||
expect(html("Judgement:\nGathering Together. Success.\nThe king approaches his temple.\nIt furthers one to see the great man.\nThis brings success. Perseverance furthers.\nTo bring great offerings creates good fortune.\nIt furthers one to undertake something."))
|
||||
.toBe("<p><h3>Judgement:</h3>Gathering Together. Success.<br>The king approaches his temple.<br>It furthers one to see the great man.<br>This brings success. Perseverance furthers.<br>To bring great offerings creates good fortune.<br>It furthers one to undertake something.<br></p>");
|
||||
|
||||
});
|
||||
|
||||
test("Generate HTML for images", () => {
|
||||
expect(html("Images:\nOver the earth, the lake:\nThe image of Gathering Together.\nThus the superior man renews his weapons\nIn order to meet the unforseen."))
|
||||
.toBe("<p><h3>Images:</h3>Over the earth, the lake:<br>The image of Gathering Together.<br>Thus the superior man renews his weapons<br>In order to meet the unforseen.<br></p></div>");
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
(console.log as jest.Mock).mockRestore();
|
||||
|
|
1
node_modules/.bin/browserslist
generated
vendored
Symbolic link
1
node_modules/.bin/browserslist
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../browserslist/cli.js
|
1
node_modules/.bin/create-jest
generated
vendored
Symbolic link
1
node_modules/.bin/create-jest
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../create-jest/bin/create-jest.js
|
1
node_modules/.bin/ejs
generated
vendored
Symbolic link
1
node_modules/.bin/ejs
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../ejs/bin/cli.js
|
1
node_modules/.bin/esparse
generated
vendored
Symbolic link
1
node_modules/.bin/esparse
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../esprima/bin/esparse.js
|
1
node_modules/.bin/esvalidate
generated
vendored
Symbolic link
1
node_modules/.bin/esvalidate
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../esprima/bin/esvalidate.js
|
1
node_modules/.bin/import-local-fixture
generated
vendored
Symbolic link
1
node_modules/.bin/import-local-fixture
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../import-local/fixtures/cli.js
|
1
node_modules/.bin/jake
generated
vendored
Symbolic link
1
node_modules/.bin/jake
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../jake/bin/cli.js
|
1
node_modules/.bin/jest
generated
vendored
Symbolic link
1
node_modules/.bin/jest
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../jest/bin/jest.js
|
1
node_modules/.bin/js-yaml
generated
vendored
Symbolic link
1
node_modules/.bin/js-yaml
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../js-yaml/bin/js-yaml.js
|
1
node_modules/.bin/jsesc
generated
vendored
Symbolic link
1
node_modules/.bin/jsesc
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../jsesc/bin/jsesc
|
1
node_modules/.bin/json5
generated
vendored
Symbolic link
1
node_modules/.bin/json5
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../json5/lib/cli.js
|
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
1
node_modules/.bin/node-which
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../which/bin/node-which
|
1
node_modules/.bin/parser
generated
vendored
Symbolic link
1
node_modules/.bin/parser
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../@babel/parser/bin/babel-parser.js
|
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../resolve/bin/resolve
|
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../semver/bin/semver.js
|
1
node_modules/.bin/ts-jest
generated
vendored
Symbolic link
1
node_modules/.bin/ts-jest
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../ts-jest/cli.js
|
1
node_modules/.bin/update-browserslist-db
generated
vendored
Symbolic link
1
node_modules/.bin/update-browserslist-db
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../update-browserslist-db/cli.js
|
3838
node_modules/.package-lock.json
generated
vendored
3838
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load diff
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
218
node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
218
node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
|
@ -0,0 +1,218 @@
|
|||
# @ampproject/remapping
|
||||
|
||||
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||
|
||||
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||
with webpack", all pointing to the correct location in your original source code.
|
||||
|
||||
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||
transformations (think a find-and-replace).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @ampproject/remapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function remapping(
|
||||
map: SourceMap | SourceMap[],
|
||||
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||
): SourceMap;
|
||||
|
||||
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||
// "source" location (where child sources are resolved relative to, or the location of original
|
||||
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||
// output sourcemap.
|
||||
type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
}
|
||||
```
|
||||
|
||||
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||
|
||||
```js
|
||||
// Babel transformed "helloworld.js" into "transformed.js"
|
||||
const transformedMap = JSON.stringify({
|
||||
file: 'transformed.js',
|
||||
// 1st column of 2nd line of output file translates into the 1st source
|
||||
// file, line 3, column 2
|
||||
mappings: ';CAEE',
|
||||
sources: ['helloworld.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||
const minifiedTransformedMap = JSON.stringify({
|
||||
file: 'transformed.min.js',
|
||||
// 0th column of 1st line of output file translates into the 1st source
|
||||
// file, line 2, column 1.
|
||||
mappings: 'AACC',
|
||||
names: [],
|
||||
sources: ['transformed.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
// The "transformed.js" file is an transformed file.
|
||||
if (file === 'transformed.js') {
|
||||
// The root importer is empty.
|
||||
console.assert(ctx.importer === '');
|
||||
// The depth in the sourcemap tree we're currently loading.
|
||||
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||
console.assert(ctx.depth === 1);
|
||||
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
// Loader will be called to load transformedMap's source file pointers as well.
|
||||
console.assert(file === 'helloworld.js');
|
||||
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||
console.assert(ctx.importer === 'transformed.js');
|
||||
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||
console.assert(ctx.depth === 2);
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
In this example, `loader` will be called twice:
|
||||
|
||||
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||
be traced through it into the source files it represents.
|
||||
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||
we return `null`.
|
||||
|
||||
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||
column of the 2nd line of the file `helloworld.js`".
|
||||
|
||||
### Multiple transformations of a file
|
||||
|
||||
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||
written as:
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
[minifiedTransformedMap, transformedMap],
|
||||
() => null
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
### Advanced control of the loading graph
|
||||
|
||||
#### `source`
|
||||
|
||||
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||
for an original source file, it allows us to change the location to the original source regardless
|
||||
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||
relative resolving location for child sources of the loaded sourcemap.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||
// source files are loaded, they will now be relative to `src/`.
|
||||
ctx.source = 'src/transformed.js';
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'src/helloworld.js');
|
||||
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||
// itself. This will be reflected in the remapped sourcemap.
|
||||
ctx.source = 'src/nested/transformed.js';
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sources: ['src/nested/helloworld.js'],
|
||||
// };
|
||||
```
|
||||
|
||||
|
||||
#### `content`
|
||||
|
||||
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||
you to manually provide the source content of the original file regardless of whether the
|
||||
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||
the source content.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||
// would not include any `sourcesContent` values.
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'helloworld.js');
|
||||
// We can read the file to provide the source content.
|
||||
ctx.content = fs.readFileSync(file, 'utf8');
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sourcesContent: [
|
||||
// 'console.log("Hello world!")',
|
||||
// ],
|
||||
// };
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
#### excludeContent
|
||||
|
||||
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||
the size out the sourcemap.
|
||||
|
||||
#### decodedMappings
|
||||
|
||||
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||
encoding into a VLQ string.
|
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
|
@ -0,0 +1,197 @@
|
|||
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||
const EMPTY_SOURCES = [];
|
||||
function SegmentObject(source, line, column, name, content, ignore) {
|
||||
return { source, line, column, name, content, ignore };
|
||||
}
|
||||
function Source(map, sources, source, content, ignore) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
ignore,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null, false);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content, ignore) {
|
||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||
const gen = new GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
const { column, line, name, content, source, ignore } = traced;
|
||||
maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||
if (source && content != null)
|
||||
setSourceContent(gen, source, content);
|
||||
if (ignore)
|
||||
setIgnore(gen, source, true);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||
}
|
||||
const segment = traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
ignore: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content, ignore } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||
return OriginalSource(source, sourceContent, ignored);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.ignoreList = out.ignoreList;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
export { remapping as default };
|
||||
//# sourceMappingURL=remapping.mjs.map
|
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
||||
})(this, (function (traceMapping, genMapping) { 'use strict';
|
||||
|
||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||
const EMPTY_SOURCES = [];
|
||||
function SegmentObject(source, line, column, name, content, ignore) {
|
||||
return { source, line, column, name, content, ignore };
|
||||
}
|
||||
function Source(map, sources, source, content, ignore) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
ignore,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null, false);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content, ignore) {
|
||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = traceMapping.decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
const { column, line, name, content, source, ignore } = traced;
|
||||
genMapping.maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||
if (source && content != null)
|
||||
genMapping.setSourceContent(gen, source, content);
|
||||
if (ignore)
|
||||
genMapping.setIgnore(gen, source, true);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||
}
|
||||
const segment = traceMapping.traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
ignore: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content, ignore } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||
return OriginalSource(source, sourceContent, ignored);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? genMapping.toDecodedMap(map) : genMapping.toEncodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.ignoreList = out.ignoreList;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
return remapping;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=remapping.umd.js.map
|
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
import type { MapSource as MapSourceType } from './source-map-tree';
|
||||
import type { SourceMapInput, SourceMapLoader } from './types';
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
import SourceMap from './source-map';
|
||||
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
||||
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
||||
export type { SourceMap };
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
import { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { TraceMap } from '@jridgewell/trace-mapping';
|
||||
export declare type SourceMapSegmentObject = {
|
||||
column: number;
|
||||
line: number;
|
||||
name: string;
|
||||
source: string;
|
||||
content: string | null;
|
||||
ignore: boolean;
|
||||
};
|
||||
export declare type OriginalSource = {
|
||||
map: null;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: string | null;
|
||||
ignore: boolean;
|
||||
};
|
||||
export declare type MapSource = {
|
||||
map: TraceMap;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: null;
|
||||
ignore: false;
|
||||
};
|
||||
export declare type Sources = OriginalSource | MapSource;
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
export declare function traceMappings(tree: MapSource): GenMapping;
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
import type { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
export default class SourceMap {
|
||||
file?: string | null;
|
||||
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
||||
sourceRoot?: string;
|
||||
names: string[];
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList: number[] | undefined;
|
||||
constructor(map: GenMapping, options: Options);
|
||||
toString(): string;
|
||||
}
|
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapInput };
|
||||
export declare type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
ignore: boolean | undefined;
|
||||
};
|
||||
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
||||
export declare type Options = {
|
||||
excludeContent?: boolean;
|
||||
decodedMappings?: boolean;
|
||||
};
|
19
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
257
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
257
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
|
@ -0,0 +1,257 @@
|
|||
# @jridgewell/trace-mapping
|
||||
|
||||
> Trace the original position through a source map
|
||||
|
||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||
original location in the source file through a source map.
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/trace-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import {
|
||||
TraceMap,
|
||||
originalPositionFor,
|
||||
generatedPositionFor,
|
||||
sourceContentFor,
|
||||
isIgnored,
|
||||
} from '@jridgewell/trace-mapping';
|
||||
|
||||
const tracer = new TraceMap({
|
||||
version: 3,
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['content of input.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'KAyCIA',
|
||||
ignoreList: [],
|
||||
});
|
||||
|
||||
// Lines start at line 1, columns at column 0.
|
||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||
assert.deepEqual(traced, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
const content = sourceContentFor(tracer, traced.source);
|
||||
assert.strictEqual(content, 'content for input.js');
|
||||
|
||||
const generated = generatedPositionFor(tracer, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
});
|
||||
assert.deepEqual(generated, {
|
||||
line: 1,
|
||||
column: 5,
|
||||
});
|
||||
|
||||
const ignored = isIgnored(tracer, 'input.js');
|
||||
assert.equal(ignored, false);
|
||||
```
|
||||
|
||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||
|
||||
```typescript
|
||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||
|
||||
// line is 0-base.
|
||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||
|
||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
// Again, line is 0-base and so is sourceLine
|
||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||
```
|
||||
|
||||
### SectionedSourceMaps
|
||||
|
||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||
`TraceMap` instance:
|
||||
|
||||
```typescript
|
||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||
const fooOutput = 'foo';
|
||||
const barOutput = 'bar';
|
||||
const output = [fooOutput, barOutput].join('\n');
|
||||
|
||||
const sectioned = new AnyMap({
|
||||
version: 3,
|
||||
sections: [
|
||||
{
|
||||
// 0-base line and column
|
||||
offset: { line: 0, column: 0 },
|
||||
// fooOutput's sourcemap
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['foo.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
{
|
||||
// barOutput's sourcemap will not affect the first line, only the second
|
||||
offset: { line: 1, column: 0 },
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['bar.js'],
|
||||
names: ['bar'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const traced = originalPositionFor(sectioned, {
|
||||
line: 2,
|
||||
column: 0,
|
||||
});
|
||||
|
||||
assert.deepEqual(traced, {
|
||||
source: 'bar.js',
|
||||
line: 1,
|
||||
column: 0,
|
||||
name: 'bar',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 562400 bytes
|
||||
trace-mapping encoded 5706544 bytes
|
||||
source-map-js 10717664 bytes
|
||||
source-map-0.6.1 17446384 bytes
|
||||
source-map-0.8.0 9701757 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 89832 bytes
|
||||
trace-mapping encoded 35474640 bytes
|
||||
source-map-js 51257176 bytes
|
||||
source-map-0.6.1 63515664 bytes
|
||||
source-map-0.8.0 42933752 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 37128 bytes
|
||||
trace-mapping encoded 247280 bytes
|
||||
source-map-js 1143536 bytes
|
||||
source-map-0.6.1 1290992 bytes
|
||||
source-map-0.8.0 96544 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 16176 bytes
|
||||
trace-mapping encoded 681552 bytes
|
||||
source-map-js 2418352 bytes
|
||||
source-map-0.6.1 2443672 bytes
|
||||
source-map-0.8.0 111768 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
580
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
580
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
|
@ -0,0 +1,580 @@
|
|||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
600
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
600
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,600 @@
|
|||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
memo.lastIndex = ++index;
|
||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = parse(map);
|
||||
if (!('sections' in parsed)) {
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
}
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
const ignoreList = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
ignoreList,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function parse(map) {
|
||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
||||
}
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const parsed = parse(input);
|
||||
if ('sections' in parsed)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(parsed, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
if (ignores)
|
||||
for (let i = 0; i < ignores.length; i++)
|
||||
ignoreList.push(ignores[i] + sourcesOffset);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function encodedMappings(map) {
|
||||
var _a;
|
||||
var _b;
|
||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
function traceSegment(map, line, column) {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
function generatedPositionFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
}
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
function allGeneratedPositionsFor(map, needle) {
|
||||
const { source, line, column, bias } = needle;
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
}
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
function eachMapping(map, cb) {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function sourceIndex(map, source) {
|
||||
const { sources, resolvedSources } = map;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index;
|
||||
}
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
function sourceContentFor(map, source) {
|
||||
const { sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
}
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
function isIgnored(map, source) {
|
||||
const { ignoreList } = map;
|
||||
if (ignoreList == null)
|
||||
return false;
|
||||
const index = sourceIndex(map, source);
|
||||
return index === -1 ? false : ignoreList.includes(index);
|
||||
}
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
function presortedDecodedMap(map, mapUrl) {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
cast(tracer)._decoded = map.mappings;
|
||||
return tracer;
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function decodedMap(map) {
|
||||
return clone(map, decodedMappings(map));
|
||||
}
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
function encodedMap(map) {
|
||||
return clone(map, encodedMappings(map));
|
||||
}
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
var _a;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
||||
exports.decodedMap = decodedMap;
|
||||
exports.decodedMappings = decodedMappings;
|
||||
exports.eachMapping = eachMapping;
|
||||
exports.encodedMap = encodedMap;
|
||||
exports.encodedMappings = encodedMappings;
|
||||
exports.generatedPositionFor = generatedPositionFor;
|
||||
exports.isIgnored = isIgnored;
|
||||
exports.originalPositionFor = originalPositionFor;
|
||||
exports.presortedDecodedMap = presortedDecodedMap;
|
||||
exports.sourceContentFor = sourceContentFor;
|
||||
exports.traceSegment = traceSegment;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
import { TraceMap } from './trace-mapping';
|
||||
import type { SectionedSourceMapInput } from './types';
|
||||
type AnyMap = {
|
||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
};
|
||||
export declare const AnyMap: AnyMap;
|
||||
export {};
|
32
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||
export type MemoState = {
|
||||
lastKey: number;
|
||||
lastNeedle: number;
|
||||
lastIndex: number;
|
||||
};
|
||||
export declare let found: boolean;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function memoizedState(): MemoState;
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
7
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||
import type { MemoState } from './binary-search';
|
||||
export type Source = {
|
||||
__proto__: null;
|
||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||
};
|
||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export default function resolve(input: string, base: string | undefined): string;
|
2
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
16
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
type GeneratedColumn = number;
|
||||
type SourcesIndex = number;
|
||||
type SourceLine = number;
|
||||
type SourceColumn = number;
|
||||
type NamesIndex = number;
|
||||
type GeneratedLine = number;
|
||||
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export declare const REV_GENERATED_LINE = 1;
|
||||
export declare const REV_GENERATED_COLUMN = 2;
|
||||
export {};
|
4
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
export default function stripFilename(path: string | undefined | null): string;
|
79
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
79
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||
export type { SourceMapSegment } from './sourcemap-segment';
|
||||
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
||||
export declare const LEAST_UPPER_BOUND = -1;
|
||||
export declare const GREATEST_LOWER_BOUND = 1;
|
||||
export { AnyMap } from './any-map';
|
||||
export declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
||||
/**
|
||||
* Determines if the source is marked to ignore by the source map.
|
||||
*/
|
||||
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
};
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
99
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
99
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: number[];
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
export interface Section {
|
||||
offset: {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||
}
|
||||
export interface SectionedSourceMap {
|
||||
file?: string | null;
|
||||
sections: Section[];
|
||||
version: 3;
|
||||
}
|
||||
export type OriginalMapping = {
|
||||
source: string | null;
|
||||
line: number;
|
||||
column: number;
|
||||
name: string | null;
|
||||
};
|
||||
export type InvalidOriginalMapping = {
|
||||
source: null;
|
||||
line: null;
|
||||
column: null;
|
||||
name: null;
|
||||
};
|
||||
export type GeneratedMapping = {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
export type InvalidGeneratedMapping = {
|
||||
line: null;
|
||||
column: null;
|
||||
};
|
||||
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||
export type XInput = {
|
||||
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||
};
|
||||
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
||||
sections: SectionXInput[];
|
||||
};
|
||||
export type SectionXInput = Omit<Section, 'map'> & {
|
||||
map: SectionedSourceMapInput;
|
||||
};
|
||||
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
||||
export type Needle = {
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type SourceNeedle = {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export type EachMapping = {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: null;
|
||||
originalLine: null;
|
||||
originalColumn: null;
|
||||
name: null;
|
||||
} | {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: string | null;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
}
|
77
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
77
node_modules/@ampproject/remapping/node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
{
|
||||
"name": "@jridgewell/trace-mapping",
|
||||
"version": "0.3.25",
|
||||
"description": "Trace the original position through a source map",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/trace-mapping.umd.js",
|
||||
"module": "dist/trace-mapping.mjs",
|
||||
"types": "dist/types/trace-mapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/trace-mapping.d.ts",
|
||||
"browser": "./dist/trace-mapping.umd.js",
|
||||
"require": "./dist/trace-mapping.umd.js",
|
||||
"import": "./dist/trace-mapping.mjs"
|
||||
},
|
||||
"./dist/trace-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.mjs",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "11.1.6",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "20.11.20",
|
||||
"@typescript-eslint/eslint-plugin": "6.18.1",
|
||||
"@typescript-eslint/parser": "6.18.1",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "9.0.0",
|
||||
"esbuild": "0.19.11",
|
||||
"eslint": "8.56.0",
|
||||
"eslint-config-prettier": "9.1.0",
|
||||
"eslint-plugin-no-only-tests": "3.1.0",
|
||||
"mocha": "10.3.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "3.1.1",
|
||||
"rollup": "4.9.4",
|
||||
"tsx": "4.7.0",
|
||||
"typescript": "5.3.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
}
|
75
node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
75
node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
{
|
||||
"name": "@ampproject/remapping",
|
||||
"version": "2.3.0",
|
||||
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map",
|
||||
"remap"
|
||||
],
|
||||
"main": "dist/remapping.umd.js",
|
||||
"module": "dist/remapping.mjs",
|
||||
"types": "dist/types/remapping.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/remapping.d.ts",
|
||||
"browser": "./dist/remapping.umd.js",
|
||||
"require": "./dist/remapping.umd.js",
|
||||
"import": "./dist/remapping.mjs"
|
||||
},
|
||||
"./dist/remapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"author": "Justin Ridgewell <jridgewell@google.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ampproject/remapping.git"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "jest --coverage",
|
||||
"test:watch": "jest --coverage --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/jest": "27.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.20.0",
|
||||
"@typescript-eslint/parser": "5.20.0",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"jest": "27.5.1",
|
||||
"jest-config": "27.5.1",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"ts-jest": "27.1.4",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.3.5",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
}
|
22
node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
22
node_modules/@babel/code-frame/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
19
node_modules/@babel/code-frame/README.md
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
# @babel/code-frame
|
||||
|
||||
> Generate errors that contain a code frame that point to source locations.
|
||||
|
||||
See our website [@babel/code-frame](https://babeljs.io/docs/babel-code-frame) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/code-frame
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/code-frame --dev
|
||||
```
|
216
node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
216
node_modules/@babel/code-frame/lib/index.js
generated
vendored
Normal file
|
@ -0,0 +1,216 @@
|
|||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var picocolors = require('picocolors');
|
||||
var jsTokens = require('js-tokens');
|
||||
var helperValidatorIdentifier = require('@babel/helper-validator-identifier');
|
||||
|
||||
function isColorSupported() {
|
||||
return (typeof process === "object" && (process.env.FORCE_COLOR === "0" || process.env.FORCE_COLOR === "false") ? false : picocolors.isColorSupported
|
||||
);
|
||||
}
|
||||
const compose = (f, g) => v => f(g(v));
|
||||
function buildDefs(colors) {
|
||||
return {
|
||||
keyword: colors.cyan,
|
||||
capitalized: colors.yellow,
|
||||
jsxIdentifier: colors.yellow,
|
||||
punctuator: colors.yellow,
|
||||
number: colors.magenta,
|
||||
string: colors.green,
|
||||
regex: colors.magenta,
|
||||
comment: colors.gray,
|
||||
invalid: compose(compose(colors.white, colors.bgRed), colors.bold),
|
||||
gutter: colors.gray,
|
||||
marker: compose(colors.red, colors.bold),
|
||||
message: compose(colors.red, colors.bold),
|
||||
reset: colors.reset
|
||||
};
|
||||
}
|
||||
const defsOn = buildDefs(picocolors.createColors(true));
|
||||
const defsOff = buildDefs(picocolors.createColors(false));
|
||||
function getDefs(enabled) {
|
||||
return enabled ? defsOn : defsOff;
|
||||
}
|
||||
|
||||
const sometimesKeywords = new Set(["as", "async", "from", "get", "of", "set"]);
|
||||
const NEWLINE$1 = /\r\n|[\n\r\u2028\u2029]/;
|
||||
const BRACKET = /^[()[\]{}]$/;
|
||||
let tokenize;
|
||||
{
|
||||
const JSX_TAG = /^[a-z][\w-]*$/i;
|
||||
const getTokenType = function (token, offset, text) {
|
||||
if (token.type === "name") {
|
||||
if (helperValidatorIdentifier.isKeyword(token.value) || helperValidatorIdentifier.isStrictReservedWord(token.value, true) || sometimesKeywords.has(token.value)) {
|
||||
return "keyword";
|
||||
}
|
||||
if (JSX_TAG.test(token.value) && (text[offset - 1] === "<" || text.slice(offset - 2, offset) === "</")) {
|
||||
return "jsxIdentifier";
|
||||
}
|
||||
if (token.value[0] !== token.value[0].toLowerCase()) {
|
||||
return "capitalized";
|
||||
}
|
||||
}
|
||||
if (token.type === "punctuator" && BRACKET.test(token.value)) {
|
||||
return "bracket";
|
||||
}
|
||||
if (token.type === "invalid" && (token.value === "@" || token.value === "#")) {
|
||||
return "punctuator";
|
||||
}
|
||||
return token.type;
|
||||
};
|
||||
tokenize = function* (text) {
|
||||
let match;
|
||||
while (match = jsTokens.default.exec(text)) {
|
||||
const token = jsTokens.matchToToken(match);
|
||||
yield {
|
||||
type: getTokenType(token, match.index, text),
|
||||
value: token.value
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
function highlight(text) {
|
||||
if (text === "") return "";
|
||||
const defs = getDefs(true);
|
||||
let highlighted = "";
|
||||
for (const {
|
||||
type,
|
||||
value
|
||||
} of tokenize(text)) {
|
||||
if (type in defs) {
|
||||
highlighted += value.split(NEWLINE$1).map(str => defs[type](str)).join("\n");
|
||||
} else {
|
||||
highlighted += value;
|
||||
}
|
||||
}
|
||||
return highlighted;
|
||||
}
|
||||
|
||||
let deprecationWarningShown = false;
|
||||
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
||||
function getMarkerLines(loc, source, opts) {
|
||||
const startLoc = Object.assign({
|
||||
column: 0,
|
||||
line: -1
|
||||
}, loc.start);
|
||||
const endLoc = Object.assign({}, startLoc, loc.end);
|
||||
const {
|
||||
linesAbove = 2,
|
||||
linesBelow = 3
|
||||
} = opts || {};
|
||||
const startLine = startLoc.line;
|
||||
const startColumn = startLoc.column;
|
||||
const endLine = endLoc.line;
|
||||
const endColumn = endLoc.column;
|
||||
let start = Math.max(startLine - (linesAbove + 1), 0);
|
||||
let end = Math.min(source.length, endLine + linesBelow);
|
||||
if (startLine === -1) {
|
||||
start = 0;
|
||||
}
|
||||
if (endLine === -1) {
|
||||
end = source.length;
|
||||
}
|
||||
const lineDiff = endLine - startLine;
|
||||
const markerLines = {};
|
||||
if (lineDiff) {
|
||||
for (let i = 0; i <= lineDiff; i++) {
|
||||
const lineNumber = i + startLine;
|
||||
if (!startColumn) {
|
||||
markerLines[lineNumber] = true;
|
||||
} else if (i === 0) {
|
||||
const sourceLength = source[lineNumber - 1].length;
|
||||
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
|
||||
} else if (i === lineDiff) {
|
||||
markerLines[lineNumber] = [0, endColumn];
|
||||
} else {
|
||||
const sourceLength = source[lineNumber - i].length;
|
||||
markerLines[lineNumber] = [0, sourceLength];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (startColumn === endColumn) {
|
||||
if (startColumn) {
|
||||
markerLines[startLine] = [startColumn, 0];
|
||||
} else {
|
||||
markerLines[startLine] = true;
|
||||
}
|
||||
} else {
|
||||
markerLines[startLine] = [startColumn, endColumn - startColumn];
|
||||
}
|
||||
}
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
};
|
||||
}
|
||||
function codeFrameColumns(rawLines, loc, opts = {}) {
|
||||
const shouldHighlight = opts.forceColor || isColorSupported() && opts.highlightCode;
|
||||
const defs = getDefs(shouldHighlight);
|
||||
const lines = rawLines.split(NEWLINE);
|
||||
const {
|
||||
start,
|
||||
end,
|
||||
markerLines
|
||||
} = getMarkerLines(loc, lines, opts);
|
||||
const hasColumns = loc.start && typeof loc.start.column === "number";
|
||||
const numberMaxWidth = String(end).length;
|
||||
const highlightedLines = shouldHighlight ? highlight(rawLines) : rawLines;
|
||||
let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => {
|
||||
const number = start + 1 + index;
|
||||
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
|
||||
const gutter = ` ${paddedNumber} |`;
|
||||
const hasMarker = markerLines[number];
|
||||
const lastMarkerLine = !markerLines[number + 1];
|
||||
if (hasMarker) {
|
||||
let markerLine = "";
|
||||
if (Array.isArray(hasMarker)) {
|
||||
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
|
||||
const numberOfMarkers = hasMarker[1] || 1;
|
||||
markerLine = ["\n ", defs.gutter(gutter.replace(/\d/g, " ")), " ", markerSpacing, defs.marker("^").repeat(numberOfMarkers)].join("");
|
||||
if (lastMarkerLine && opts.message) {
|
||||
markerLine += " " + defs.message(opts.message);
|
||||
}
|
||||
}
|
||||
return [defs.marker(">"), defs.gutter(gutter), line.length > 0 ? ` ${line}` : "", markerLine].join("");
|
||||
} else {
|
||||
return ` ${defs.gutter(gutter)}${line.length > 0 ? ` ${line}` : ""}`;
|
||||
}
|
||||
}).join("\n");
|
||||
if (opts.message && !hasColumns) {
|
||||
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
|
||||
}
|
||||
if (shouldHighlight) {
|
||||
return defs.reset(frame);
|
||||
} else {
|
||||
return frame;
|
||||
}
|
||||
}
|
||||
function index (rawLines, lineNumber, colNumber, opts = {}) {
|
||||
if (!deprecationWarningShown) {
|
||||
deprecationWarningShown = true;
|
||||
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
|
||||
if (process.emitWarning) {
|
||||
process.emitWarning(message, "DeprecationWarning");
|
||||
} else {
|
||||
const deprecationError = new Error(message);
|
||||
deprecationError.name = "DeprecationWarning";
|
||||
console.warn(new Error(message));
|
||||
}
|
||||
}
|
||||
colNumber = Math.max(colNumber, 0);
|
||||
const location = {
|
||||
start: {
|
||||
column: colNumber,
|
||||
line: lineNumber
|
||||
}
|
||||
};
|
||||
return codeFrameColumns(rawLines, location, opts);
|
||||
}
|
||||
|
||||
exports.codeFrameColumns = codeFrameColumns;
|
||||
exports.default = index;
|
||||
exports.highlight = highlight;
|
||||
//# sourceMappingURL=index.js.map
|
1
node_modules/@babel/code-frame/lib/index.js.map
generated
vendored
Normal file
1
node_modules/@babel/code-frame/lib/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
31
node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
31
node_modules/@babel/code-frame/package.json
generated
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"name": "@babel/code-frame",
|
||||
"version": "7.26.2",
|
||||
"description": "Generate errors that contain a code frame that point to source locations.",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"homepage": "https://babel.dev/docs/en/next/babel-code-frame",
|
||||
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen",
|
||||
"license": "MIT",
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-code-frame"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"dependencies": {
|
||||
"@babel/helper-validator-identifier": "^7.25.9",
|
||||
"js-tokens": "^4.0.0",
|
||||
"picocolors": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"import-meta-resolve": "^4.1.0",
|
||||
"strip-ansi": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"type": "commonjs"
|
||||
}
|
22
node_modules/@babel/compat-data/LICENSE
generated
vendored
Normal file
22
node_modules/@babel/compat-data/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
node_modules/@babel/compat-data/README.md
generated
vendored
Normal file
19
node_modules/@babel/compat-data/README.md
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
# @babel/compat-data
|
||||
|
||||
> The compat-data to determine required Babel plugins
|
||||
|
||||
See our website [@babel/compat-data](https://babeljs.io/docs/babel-compat-data) for more information.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save @babel/compat-data
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/compat-data
|
||||
```
|
2
node_modules/@babel/compat-data/corejs2-built-ins.js
generated
vendored
Normal file
2
node_modules/@babel/compat-data/corejs2-built-ins.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
// Todo (Babel 8): remove this file as Babel 8 drop support of core-js 2
|
||||
module.exports = require("./data/corejs2-built-ins.json");
|
2
node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
2
node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
// Todo (Babel 8): remove this file now that it is included in babel-plugin-polyfill-corejs3
|
||||
module.exports = require("./data/corejs3-shipped-proposals.json");
|
2090
node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
2090
node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
5
node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
5
node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
[
|
||||
"esnext.promise.all-settled",
|
||||
"esnext.string.match-all",
|
||||
"esnext.global-this"
|
||||
]
|
18
node_modules/@babel/compat-data/data/native-modules.json
generated
vendored
Normal file
18
node_modules/@babel/compat-data/data/native-modules.json
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"es6.module": {
|
||||
"chrome": "61",
|
||||
"and_chr": "61",
|
||||
"edge": "16",
|
||||
"firefox": "60",
|
||||
"and_ff": "60",
|
||||
"node": "13.2.0",
|
||||
"opera": "48",
|
||||
"op_mob": "45",
|
||||
"safari": "10.1",
|
||||
"ios": "10.3",
|
||||
"samsung": "8.2",
|
||||
"android": "61",
|
||||
"electron": "2.0",
|
||||
"ios_saf": "10.3"
|
||||
}
|
||||
}
|
35
node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
35
node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"transform-async-to-generator": [
|
||||
"bugfix/transform-async-arrows-in-class"
|
||||
],
|
||||
"transform-parameters": [
|
||||
"bugfix/transform-edge-default-parameters",
|
||||
"bugfix/transform-safari-id-destructuring-collision-in-function-expression"
|
||||
],
|
||||
"transform-function-name": [
|
||||
"bugfix/transform-edge-function-name"
|
||||
],
|
||||
"transform-block-scoping": [
|
||||
"bugfix/transform-safari-block-shadowing",
|
||||
"bugfix/transform-safari-for-shadowing"
|
||||
],
|
||||
"transform-template-literals": [
|
||||
"bugfix/transform-tagged-template-caching"
|
||||
],
|
||||
"transform-optional-chaining": [
|
||||
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
|
||||
],
|
||||
"proposal-optional-chaining": [
|
||||
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
|
||||
],
|
||||
"transform-class-properties": [
|
||||
"bugfix/transform-v8-static-class-fields-redefine-readonly",
|
||||
"bugfix/transform-firefox-class-in-computed-class-key",
|
||||
"bugfix/transform-safari-class-field-initializer-scope"
|
||||
],
|
||||
"proposal-class-properties": [
|
||||
"bugfix/transform-v8-static-class-fields-redefine-readonly",
|
||||
"bugfix/transform-firefox-class-in-computed-class-key",
|
||||
"bugfix/transform-safari-class-field-initializer-scope"
|
||||
]
|
||||
}
|
213
node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
213
node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
|
@ -0,0 +1,213 @@
|
|||
{
|
||||
"bugfix/transform-async-arrows-in-class": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "11",
|
||||
"node": "7.6",
|
||||
"deno": "1",
|
||||
"ios": "11",
|
||||
"samsung": "6",
|
||||
"opera_mobile": "42",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"bugfix/transform-edge-default-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "18",
|
||||
"firefox": "52",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "36",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-edge-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "79",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "41",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"bugfix/transform-safari-block-shadowing": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "12",
|
||||
"firefox": "44",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ie": "11",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "36",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-safari-for-shadowing": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "12",
|
||||
"firefox": "4",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ie": "11",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"rhino": "1.7.13",
|
||||
"opera_mobile": "36",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-safari-id-destructuring-collision-in-function-expression": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "14",
|
||||
"firefox": "2",
|
||||
"safari": "16.3",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "16.3",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "36",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"bugfix/transform-tagged-template-caching": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "13",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "13",
|
||||
"samsung": "3.4",
|
||||
"rhino": "1.7.14",
|
||||
"opera_mobile": "28",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"bugfix/transform-v8-spread-parameters-in-optional-chaining": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "16.9",
|
||||
"deno": "1.9",
|
||||
"ios": "13.4",
|
||||
"samsung": "16",
|
||||
"opera_mobile": "64",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"bugfix/transform-firefox-class-in-computed-class-key": {
|
||||
"chrome": "74",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"safari": "16",
|
||||
"node": "12",
|
||||
"deno": "1",
|
||||
"ios": "16",
|
||||
"samsung": "11",
|
||||
"opera_mobile": "53",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"transform-optional-chaining": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"deno": "1",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"opera_mobile": "57",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"proposal-optional-chaining": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"deno": "1",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"opera_mobile": "57",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"transform-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "36",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-async-to-generator": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "10.1",
|
||||
"node": "7.6",
|
||||
"deno": "1",
|
||||
"ios": "10.3",
|
||||
"samsung": "6",
|
||||
"opera_mobile": "42",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"transform-template-literals": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "13",
|
||||
"firefox": "34",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "9",
|
||||
"samsung": "3.4",
|
||||
"opera_mobile": "28",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "14",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "41",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-block-scoping": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "14",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "37",
|
||||
"electron": "1.1"
|
||||
}
|
||||
}
|
824
node_modules/@babel/compat-data/data/plugins.json
generated
vendored
Normal file
824
node_modules/@babel/compat-data/data/plugins.json
generated
vendored
Normal file
|
@ -0,0 +1,824 @@
|
|||
{
|
||||
"transform-duplicate-named-capturing-groups-regex": {
|
||||
"chrome": "126",
|
||||
"opera": "112",
|
||||
"edge": "126",
|
||||
"firefox": "129",
|
||||
"safari": "17.4",
|
||||
"node": "23",
|
||||
"ios": "17.4",
|
||||
"electron": "31.0"
|
||||
},
|
||||
"transform-regexp-modifiers": {
|
||||
"chrome": "125",
|
||||
"opera": "111",
|
||||
"edge": "125",
|
||||
"firefox": "132",
|
||||
"node": "23",
|
||||
"electron": "31.0"
|
||||
},
|
||||
"transform-unicode-sets-regex": {
|
||||
"chrome": "112",
|
||||
"opera": "98",
|
||||
"edge": "112",
|
||||
"firefox": "116",
|
||||
"safari": "17",
|
||||
"node": "20",
|
||||
"deno": "1.32",
|
||||
"ios": "17",
|
||||
"opera_mobile": "75",
|
||||
"electron": "24.0"
|
||||
},
|
||||
"bugfix/transform-v8-static-class-fields-redefine-readonly": {
|
||||
"chrome": "98",
|
||||
"opera": "84",
|
||||
"edge": "98",
|
||||
"firefox": "75",
|
||||
"safari": "15",
|
||||
"node": "12",
|
||||
"deno": "1.18",
|
||||
"ios": "15",
|
||||
"samsung": "11",
|
||||
"opera_mobile": "52",
|
||||
"electron": "17.0"
|
||||
},
|
||||
"bugfix/transform-firefox-class-in-computed-class-key": {
|
||||
"chrome": "74",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"safari": "16",
|
||||
"node": "12",
|
||||
"deno": "1",
|
||||
"ios": "16",
|
||||
"samsung": "11",
|
||||
"opera_mobile": "53",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"bugfix/transform-safari-class-field-initializer-scope": {
|
||||
"chrome": "74",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "69",
|
||||
"safari": "16",
|
||||
"node": "12",
|
||||
"deno": "1",
|
||||
"ios": "16",
|
||||
"samsung": "11",
|
||||
"opera_mobile": "53",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"transform-class-static-block": {
|
||||
"chrome": "94",
|
||||
"opera": "80",
|
||||
"edge": "94",
|
||||
"firefox": "93",
|
||||
"safari": "16.4",
|
||||
"node": "16.11",
|
||||
"deno": "1.14",
|
||||
"ios": "16.4",
|
||||
"samsung": "17",
|
||||
"opera_mobile": "66",
|
||||
"electron": "15.0"
|
||||
},
|
||||
"proposal-class-static-block": {
|
||||
"chrome": "94",
|
||||
"opera": "80",
|
||||
"edge": "94",
|
||||
"firefox": "93",
|
||||
"safari": "16.4",
|
||||
"node": "16.11",
|
||||
"deno": "1.14",
|
||||
"ios": "16.4",
|
||||
"samsung": "17",
|
||||
"opera_mobile": "66",
|
||||
"electron": "15.0"
|
||||
},
|
||||
"transform-private-property-in-object": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "16.9",
|
||||
"deno": "1.9",
|
||||
"ios": "15",
|
||||
"samsung": "16",
|
||||
"opera_mobile": "64",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-private-property-in-object": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "16.9",
|
||||
"deno": "1.9",
|
||||
"ios": "15",
|
||||
"samsung": "16",
|
||||
"opera_mobile": "64",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"transform-class-properties": {
|
||||
"chrome": "74",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "90",
|
||||
"safari": "14.1",
|
||||
"node": "12",
|
||||
"deno": "1",
|
||||
"ios": "14.5",
|
||||
"samsung": "11",
|
||||
"opera_mobile": "53",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"proposal-class-properties": {
|
||||
"chrome": "74",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "90",
|
||||
"safari": "14.1",
|
||||
"node": "12",
|
||||
"deno": "1",
|
||||
"ios": "14.5",
|
||||
"samsung": "11",
|
||||
"opera_mobile": "53",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"transform-private-methods": {
|
||||
"chrome": "84",
|
||||
"opera": "70",
|
||||
"edge": "84",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "14.6",
|
||||
"deno": "1",
|
||||
"ios": "15",
|
||||
"samsung": "14",
|
||||
"opera_mobile": "60",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"proposal-private-methods": {
|
||||
"chrome": "84",
|
||||
"opera": "70",
|
||||
"edge": "84",
|
||||
"firefox": "90",
|
||||
"safari": "15",
|
||||
"node": "14.6",
|
||||
"deno": "1",
|
||||
"ios": "15",
|
||||
"samsung": "14",
|
||||
"opera_mobile": "60",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"transform-numeric-separator": {
|
||||
"chrome": "75",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "70",
|
||||
"safari": "13",
|
||||
"node": "12.5",
|
||||
"deno": "1",
|
||||
"ios": "13",
|
||||
"samsung": "11",
|
||||
"rhino": "1.7.14",
|
||||
"opera_mobile": "54",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"proposal-numeric-separator": {
|
||||
"chrome": "75",
|
||||
"opera": "62",
|
||||
"edge": "79",
|
||||
"firefox": "70",
|
||||
"safari": "13",
|
||||
"node": "12.5",
|
||||
"deno": "1",
|
||||
"ios": "13",
|
||||
"samsung": "11",
|
||||
"rhino": "1.7.14",
|
||||
"opera_mobile": "54",
|
||||
"electron": "6.0"
|
||||
},
|
||||
"transform-logical-assignment-operators": {
|
||||
"chrome": "85",
|
||||
"opera": "71",
|
||||
"edge": "85",
|
||||
"firefox": "79",
|
||||
"safari": "14",
|
||||
"node": "15",
|
||||
"deno": "1.2",
|
||||
"ios": "14",
|
||||
"samsung": "14",
|
||||
"opera_mobile": "60",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"proposal-logical-assignment-operators": {
|
||||
"chrome": "85",
|
||||
"opera": "71",
|
||||
"edge": "85",
|
||||
"firefox": "79",
|
||||
"safari": "14",
|
||||
"node": "15",
|
||||
"deno": "1.2",
|
||||
"ios": "14",
|
||||
"samsung": "14",
|
||||
"opera_mobile": "60",
|
||||
"electron": "10.0"
|
||||
},
|
||||
"transform-nullish-coalescing-operator": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "72",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"deno": "1",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"opera_mobile": "57",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"proposal-nullish-coalescing-operator": {
|
||||
"chrome": "80",
|
||||
"opera": "67",
|
||||
"edge": "80",
|
||||
"firefox": "72",
|
||||
"safari": "13.1",
|
||||
"node": "14",
|
||||
"deno": "1",
|
||||
"ios": "13.4",
|
||||
"samsung": "13",
|
||||
"opera_mobile": "57",
|
||||
"electron": "8.0"
|
||||
},
|
||||
"transform-optional-chaining": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "16.9",
|
||||
"deno": "1.9",
|
||||
"ios": "13.4",
|
||||
"samsung": "16",
|
||||
"opera_mobile": "64",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"proposal-optional-chaining": {
|
||||
"chrome": "91",
|
||||
"opera": "77",
|
||||
"edge": "91",
|
||||
"firefox": "74",
|
||||
"safari": "13.1",
|
||||
"node": "16.9",
|
||||
"deno": "1.9",
|
||||
"ios": "13.4",
|
||||
"samsung": "16",
|
||||
"opera_mobile": "64",
|
||||
"electron": "13.0"
|
||||
},
|
||||
"transform-json-strings": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "62",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "12",
|
||||
"samsung": "9",
|
||||
"rhino": "1.7.14",
|
||||
"opera_mobile": "47",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-json-strings": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "62",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "12",
|
||||
"samsung": "9",
|
||||
"rhino": "1.7.14",
|
||||
"opera_mobile": "47",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-optional-catch-binding": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "58",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"opera_mobile": "47",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-optional-catch-binding": {
|
||||
"chrome": "66",
|
||||
"opera": "53",
|
||||
"edge": "79",
|
||||
"firefox": "58",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"opera_mobile": "47",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-parameters": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "18",
|
||||
"firefox": "53",
|
||||
"safari": "16.3",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "16.3",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "36",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-async-generator-functions": {
|
||||
"chrome": "63",
|
||||
"opera": "50",
|
||||
"edge": "79",
|
||||
"firefox": "57",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "12",
|
||||
"samsung": "8",
|
||||
"opera_mobile": "46",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-async-generator-functions": {
|
||||
"chrome": "63",
|
||||
"opera": "50",
|
||||
"edge": "79",
|
||||
"firefox": "57",
|
||||
"safari": "12",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "12",
|
||||
"samsung": "8",
|
||||
"opera_mobile": "46",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-object-rest-spread": {
|
||||
"chrome": "60",
|
||||
"opera": "47",
|
||||
"edge": "79",
|
||||
"firefox": "55",
|
||||
"safari": "11.1",
|
||||
"node": "8.3",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "8",
|
||||
"opera_mobile": "44",
|
||||
"electron": "2.0"
|
||||
},
|
||||
"proposal-object-rest-spread": {
|
||||
"chrome": "60",
|
||||
"opera": "47",
|
||||
"edge": "79",
|
||||
"firefox": "55",
|
||||
"safari": "11.1",
|
||||
"node": "8.3",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "8",
|
||||
"opera_mobile": "44",
|
||||
"electron": "2.0"
|
||||
},
|
||||
"transform-dotall-regex": {
|
||||
"chrome": "62",
|
||||
"opera": "49",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "8.10",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "8",
|
||||
"rhino": "1.7.15",
|
||||
"opera_mobile": "46",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-unicode-property-regex": {
|
||||
"chrome": "64",
|
||||
"opera": "51",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"opera_mobile": "47",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"proposal-unicode-property-regex": {
|
||||
"chrome": "64",
|
||||
"opera": "51",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"opera_mobile": "47",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-named-capturing-groups-regex": {
|
||||
"chrome": "64",
|
||||
"opera": "51",
|
||||
"edge": "79",
|
||||
"firefox": "78",
|
||||
"safari": "11.1",
|
||||
"node": "10",
|
||||
"deno": "1",
|
||||
"ios": "11.3",
|
||||
"samsung": "9",
|
||||
"opera_mobile": "47",
|
||||
"electron": "3.0"
|
||||
},
|
||||
"transform-async-to-generator": {
|
||||
"chrome": "55",
|
||||
"opera": "42",
|
||||
"edge": "15",
|
||||
"firefox": "52",
|
||||
"safari": "11",
|
||||
"node": "7.6",
|
||||
"deno": "1",
|
||||
"ios": "11",
|
||||
"samsung": "6",
|
||||
"opera_mobile": "42",
|
||||
"electron": "1.6"
|
||||
},
|
||||
"transform-exponentiation-operator": {
|
||||
"chrome": "52",
|
||||
"opera": "39",
|
||||
"edge": "14",
|
||||
"firefox": "52",
|
||||
"safari": "10.1",
|
||||
"node": "7",
|
||||
"deno": "1",
|
||||
"ios": "10.3",
|
||||
"samsung": "6",
|
||||
"rhino": "1.7.14",
|
||||
"opera_mobile": "41",
|
||||
"electron": "1.3"
|
||||
},
|
||||
"transform-template-literals": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "13",
|
||||
"firefox": "34",
|
||||
"safari": "13",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "13",
|
||||
"samsung": "3.4",
|
||||
"opera_mobile": "28",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-literals": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "53",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"rhino": "1.7.15",
|
||||
"opera_mobile": "32",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-function-name": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "79",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "41",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-arrow-functions": {
|
||||
"chrome": "47",
|
||||
"opera": "34",
|
||||
"edge": "13",
|
||||
"firefox": "43",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"rhino": "1.7.13",
|
||||
"opera_mobile": "34",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-block-scoped-functions": {
|
||||
"chrome": "41",
|
||||
"opera": "28",
|
||||
"edge": "12",
|
||||
"firefox": "46",
|
||||
"safari": "10",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ie": "11",
|
||||
"ios": "10",
|
||||
"samsung": "3.4",
|
||||
"opera_mobile": "28",
|
||||
"electron": "0.21"
|
||||
},
|
||||
"transform-classes": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "33",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-object-super": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "33",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-shorthand-properties": {
|
||||
"chrome": "43",
|
||||
"opera": "30",
|
||||
"edge": "12",
|
||||
"firefox": "33",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"rhino": "1.7.14",
|
||||
"opera_mobile": "30",
|
||||
"electron": "0.27"
|
||||
},
|
||||
"transform-duplicate-keys": {
|
||||
"chrome": "42",
|
||||
"opera": "29",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "9",
|
||||
"samsung": "3.4",
|
||||
"opera_mobile": "29",
|
||||
"electron": "0.25"
|
||||
},
|
||||
"transform-computed-properties": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "34",
|
||||
"safari": "7.1",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "8",
|
||||
"samsung": "4",
|
||||
"opera_mobile": "32",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-for-of": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "41",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-sticky-regex": {
|
||||
"chrome": "49",
|
||||
"opera": "36",
|
||||
"edge": "13",
|
||||
"firefox": "3",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"rhino": "1.7.15",
|
||||
"opera_mobile": "36",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-unicode-escapes": {
|
||||
"chrome": "44",
|
||||
"opera": "31",
|
||||
"edge": "12",
|
||||
"firefox": "53",
|
||||
"safari": "9",
|
||||
"node": "4",
|
||||
"deno": "1",
|
||||
"ios": "9",
|
||||
"samsung": "4",
|
||||
"rhino": "1.7.15",
|
||||
"opera_mobile": "32",
|
||||
"electron": "0.30"
|
||||
},
|
||||
"transform-unicode-regex": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "13",
|
||||
"firefox": "46",
|
||||
"safari": "12",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "12",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "37",
|
||||
"electron": "1.1"
|
||||
},
|
||||
"transform-spread": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "13",
|
||||
"firefox": "45",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "33",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-destructuring": {
|
||||
"chrome": "51",
|
||||
"opera": "38",
|
||||
"edge": "15",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6.5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "41",
|
||||
"electron": "1.2"
|
||||
},
|
||||
"transform-block-scoping": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "14",
|
||||
"firefox": "53",
|
||||
"safari": "11",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "11",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "37",
|
||||
"electron": "1.1"
|
||||
},
|
||||
"transform-typeof-symbol": {
|
||||
"chrome": "48",
|
||||
"opera": "35",
|
||||
"edge": "12",
|
||||
"firefox": "36",
|
||||
"safari": "9",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "9",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "35",
|
||||
"electron": "0.37"
|
||||
},
|
||||
"transform-new-target": {
|
||||
"chrome": "46",
|
||||
"opera": "33",
|
||||
"edge": "14",
|
||||
"firefox": "41",
|
||||
"safari": "10",
|
||||
"node": "5",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "33",
|
||||
"electron": "0.36"
|
||||
},
|
||||
"transform-regenerator": {
|
||||
"chrome": "50",
|
||||
"opera": "37",
|
||||
"edge": "13",
|
||||
"firefox": "53",
|
||||
"safari": "10",
|
||||
"node": "6",
|
||||
"deno": "1",
|
||||
"ios": "10",
|
||||
"samsung": "5",
|
||||
"opera_mobile": "37",
|
||||
"electron": "1.1"
|
||||
},
|
||||
"transform-member-expression-literals": {
|
||||
"chrome": "7",
|
||||
"opera": "12",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "5.1",
|
||||
"node": "0.4",
|
||||
"deno": "1",
|
||||
"ie": "9",
|
||||
"android": "4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"opera_mobile": "12",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-property-literals": {
|
||||
"chrome": "7",
|
||||
"opera": "12",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "5.1",
|
||||
"node": "0.4",
|
||||
"deno": "1",
|
||||
"ie": "9",
|
||||
"android": "4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"opera_mobile": "12",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-reserved-words": {
|
||||
"chrome": "13",
|
||||
"opera": "10.50",
|
||||
"edge": "12",
|
||||
"firefox": "2",
|
||||
"safari": "3.1",
|
||||
"node": "0.6",
|
||||
"deno": "1",
|
||||
"ie": "9",
|
||||
"android": "4.4",
|
||||
"ios": "6",
|
||||
"phantom": "1.9",
|
||||
"samsung": "1",
|
||||
"rhino": "1.7.13",
|
||||
"opera_mobile": "10.1",
|
||||
"electron": "0.20"
|
||||
},
|
||||
"transform-export-namespace-from": {
|
||||
"chrome": "72",
|
||||
"deno": "1.0",
|
||||
"edge": "79",
|
||||
"firefox": "80",
|
||||
"node": "13.2",
|
||||
"opera": "60",
|
||||
"opera_mobile": "51",
|
||||
"safari": "14.1",
|
||||
"ios": "14.5",
|
||||
"samsung": "11.0",
|
||||
"android": "72",
|
||||
"electron": "5.0"
|
||||
},
|
||||
"proposal-export-namespace-from": {
|
||||
"chrome": "72",
|
||||
"deno": "1.0",
|
||||
"edge": "79",
|
||||
"firefox": "80",
|
||||
"node": "13.2",
|
||||
"opera": "60",
|
||||
"opera_mobile": "51",
|
||||
"safari": "14.1",
|
||||
"ios": "14.5",
|
||||
"samsung": "11.0",
|
||||
"android": "72",
|
||||
"electron": "5.0"
|
||||
}
|
||||
}
|
1
node_modules/@babel/compat-data/native-modules.js
generated
vendored
Normal file
1
node_modules/@babel/compat-data/native-modules.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require("./data/native-modules.json");
|
1
node_modules/@babel/compat-data/overlapping-plugins.js
generated
vendored
Normal file
1
node_modules/@babel/compat-data/overlapping-plugins.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require("./data/overlapping-plugins.json");
|
40
node_modules/@babel/compat-data/package.json
generated
vendored
Normal file
40
node_modules/@babel/compat-data/package.json
generated
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"name": "@babel/compat-data",
|
||||
"version": "7.26.8",
|
||||
"author": "The Babel Team (https://babel.dev/team)",
|
||||
"license": "MIT",
|
||||
"description": "The compat-data to determine required Babel plugins",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/babel/babel.git",
|
||||
"directory": "packages/babel-compat-data"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"exports": {
|
||||
"./plugins": "./plugins.js",
|
||||
"./native-modules": "./native-modules.js",
|
||||
"./corejs2-built-ins": "./corejs2-built-ins.js",
|
||||
"./corejs3-shipped-proposals": "./corejs3-shipped-proposals.js",
|
||||
"./overlapping-plugins": "./overlapping-plugins.js",
|
||||
"./plugin-bugfixes": "./plugin-bugfixes.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build-data": "./scripts/download-compat-table.sh && node ./scripts/build-data.js && node ./scripts/build-modules-support.js && node ./scripts/build-bugfixes-targets.js"
|
||||
},
|
||||
"keywords": [
|
||||
"babel",
|
||||
"compat-table",
|
||||
"compat-data"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@mdn/browser-compat-data": "^5.5.36",
|
||||
"core-js-compat": "^3.40.0",
|
||||
"electron-to-chromium": "^1.4.816"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
},
|
||||
"type": "commonjs"
|
||||
}
|
1
node_modules/@babel/compat-data/plugin-bugfixes.js
generated
vendored
Normal file
1
node_modules/@babel/compat-data/plugin-bugfixes.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require("./data/plugin-bugfixes.json");
|
1
node_modules/@babel/compat-data/plugins.js
generated
vendored
Normal file
1
node_modules/@babel/compat-data/plugins.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require("./data/plugins.json");
|
22
node_modules/@babel/core/LICENSE
generated
vendored
Normal file
22
node_modules/@babel/core/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
19
node_modules/@babel/core/README.md
generated
vendored
Normal file
19
node_modules/@babel/core/README.md
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
# @babel/core
|
||||
|
||||
> Babel compiler core.
|
||||
|
||||
See our website [@babel/core](https://babeljs.io/docs/babel-core) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
|
||||
|
||||
## Install
|
||||
|
||||
Using npm:
|
||||
|
||||
```sh
|
||||
npm install --save-dev @babel/core
|
||||
```
|
||||
|
||||
or using yarn:
|
||||
|
||||
```sh
|
||||
yarn add @babel/core --dev
|
||||
```
|
68
node_modules/@babel/core/cjs-proxy.cjs
generated
vendored
Normal file
68
node_modules/@babel/core/cjs-proxy.cjs
generated
vendored
Normal file
|
@ -0,0 +1,68 @@
|
|||
"use strict";
|
||||
|
||||
const babelP = import("./lib/index.js");
|
||||
let babel = null;
|
||||
Object.defineProperty(exports, "__ initialize @babel/core cjs proxy __", {
|
||||
set(val) {
|
||||
babel = val;
|
||||
},
|
||||
});
|
||||
|
||||
exports.version = require("./package.json").version;
|
||||
|
||||
const functionNames = [
|
||||
"createConfigItem",
|
||||
"loadPartialConfig",
|
||||
"loadOptions",
|
||||
"transform",
|
||||
"transformFile",
|
||||
"transformFromAst",
|
||||
"parse",
|
||||
];
|
||||
const propertyNames = [
|
||||
"buildExternalHelpers",
|
||||
"types",
|
||||
"tokTypes",
|
||||
"traverse",
|
||||
"template",
|
||||
];
|
||||
|
||||
for (const name of functionNames) {
|
||||
exports[name] = function (...args) {
|
||||
if (
|
||||
process.env.BABEL_8_BREAKING &&
|
||||
typeof args[args.length - 1] !== "function"
|
||||
) {
|
||||
throw new Error(
|
||||
`Starting from Babel 8.0.0, the '${name}' function expects a callback. If you need to call it synchronously, please use '${name}Sync'.`
|
||||
);
|
||||
}
|
||||
|
||||
babelP.then(babel => {
|
||||
babel[name](...args);
|
||||
});
|
||||
};
|
||||
exports[`${name}Async`] = function (...args) {
|
||||
return babelP.then(babel => babel[`${name}Async`](...args));
|
||||
};
|
||||
exports[`${name}Sync`] = function (...args) {
|
||||
if (!babel) throw notLoadedError(`${name}Sync`, "callable");
|
||||
return babel[`${name}Sync`](...args);
|
||||
};
|
||||
}
|
||||
|
||||
for (const name of propertyNames) {
|
||||
Object.defineProperty(exports, name, {
|
||||
get() {
|
||||
if (!babel) throw notLoadedError(name, "accessible");
|
||||
return babel[name];
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function notLoadedError(name, keyword) {
|
||||
return new Error(
|
||||
`The \`${name}\` export of @babel/core is only ${keyword}` +
|
||||
` from the CommonJS version after that the ESM version is loaded.`
|
||||
);
|
||||
}
|
3
node_modules/@babel/core/lib/config/cache-contexts.js
generated
vendored
Normal file
3
node_modules/@babel/core/lib/config/cache-contexts.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=cache-contexts.js.map
|
1
node_modules/@babel/core/lib/config/cache-contexts.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/cache-contexts.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"names":[],"sources":["../../src/config/cache-contexts.ts"],"sourcesContent":["import type { Targets } from \"@babel/helper-compilation-targets\";\n\nimport type { ConfigContext } from \"./config-chain.ts\";\nimport type { CallerMetadata } from \"./validation/options.ts\";\n\nexport type { ConfigContext as FullConfig };\n\nexport type FullPreset = {\n targets: Targets;\n} & ConfigContext;\nexport type FullPlugin = {\n assumptions: { [name: string]: boolean };\n} & FullPreset;\n\n// Context not including filename since it is used in places that cannot\n// process 'ignore'/'only' and other filename-based logic.\nexport type SimpleConfig = {\n envName: string;\n caller: CallerMetadata | undefined;\n};\nexport type SimplePreset = {\n targets: Targets;\n} & SimpleConfig;\nexport type SimplePlugin = {\n assumptions: {\n [name: string]: boolean;\n };\n} & SimplePreset;\n"],"mappings":"","ignoreList":[]}
|
261
node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
261
node_modules/@babel/core/lib/config/caching.js
generated
vendored
Normal file
|
@ -0,0 +1,261 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.assertSimpleType = assertSimpleType;
|
||||
exports.makeStrongCache = makeStrongCache;
|
||||
exports.makeStrongCacheSync = makeStrongCacheSync;
|
||||
exports.makeWeakCache = makeWeakCache;
|
||||
exports.makeWeakCacheSync = makeWeakCacheSync;
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _async = require("../gensync-utils/async.js");
|
||||
var _util = require("./util.js");
|
||||
const synchronize = gen => {
|
||||
return _gensync()(gen).sync;
|
||||
};
|
||||
function* genTrue() {
|
||||
return true;
|
||||
}
|
||||
function makeWeakCache(handler) {
|
||||
return makeCachedFunction(WeakMap, handler);
|
||||
}
|
||||
function makeWeakCacheSync(handler) {
|
||||
return synchronize(makeWeakCache(handler));
|
||||
}
|
||||
function makeStrongCache(handler) {
|
||||
return makeCachedFunction(Map, handler);
|
||||
}
|
||||
function makeStrongCacheSync(handler) {
|
||||
return synchronize(makeStrongCache(handler));
|
||||
}
|
||||
function makeCachedFunction(CallCache, handler) {
|
||||
const callCacheSync = new CallCache();
|
||||
const callCacheAsync = new CallCache();
|
||||
const futureCache = new CallCache();
|
||||
return function* cachedFunction(arg, data) {
|
||||
const asyncContext = yield* (0, _async.isAsync)();
|
||||
const callCache = asyncContext ? callCacheAsync : callCacheSync;
|
||||
const cached = yield* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data);
|
||||
if (cached.valid) return cached.value;
|
||||
const cache = new CacheConfigurator(data);
|
||||
const handlerResult = handler(arg, cache);
|
||||
let finishLock;
|
||||
let value;
|
||||
if ((0, _util.isIterableIterator)(handlerResult)) {
|
||||
value = yield* (0, _async.onFirstPause)(handlerResult, () => {
|
||||
finishLock = setupAsyncLocks(cache, futureCache, arg);
|
||||
});
|
||||
} else {
|
||||
value = handlerResult;
|
||||
}
|
||||
updateFunctionCache(callCache, cache, arg, value);
|
||||
if (finishLock) {
|
||||
futureCache.delete(arg);
|
||||
finishLock.release(value);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
function* getCachedValue(cache, arg, data) {
|
||||
const cachedValue = cache.get(arg);
|
||||
if (cachedValue) {
|
||||
for (const {
|
||||
value,
|
||||
valid
|
||||
} of cachedValue) {
|
||||
if (yield* valid(data)) return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
function* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data) {
|
||||
const cached = yield* getCachedValue(callCache, arg, data);
|
||||
if (cached.valid) {
|
||||
return cached;
|
||||
}
|
||||
if (asyncContext) {
|
||||
const cached = yield* getCachedValue(futureCache, arg, data);
|
||||
if (cached.valid) {
|
||||
const value = yield* (0, _async.waitFor)(cached.value.promise);
|
||||
return {
|
||||
valid: true,
|
||||
value
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
function setupAsyncLocks(config, futureCache, arg) {
|
||||
const finishLock = new Lock();
|
||||
updateFunctionCache(futureCache, config, arg, finishLock);
|
||||
return finishLock;
|
||||
}
|
||||
function updateFunctionCache(cache, config, arg, value) {
|
||||
if (!config.configured()) config.forever();
|
||||
let cachedValue = cache.get(arg);
|
||||
config.deactivate();
|
||||
switch (config.mode()) {
|
||||
case "forever":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: genTrue
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
case "invalidate":
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
break;
|
||||
case "valid":
|
||||
if (cachedValue) {
|
||||
cachedValue.push({
|
||||
value,
|
||||
valid: config.validator()
|
||||
});
|
||||
} else {
|
||||
cachedValue = [{
|
||||
value,
|
||||
valid: config.validator()
|
||||
}];
|
||||
cache.set(arg, cachedValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
class CacheConfigurator {
|
||||
constructor(data) {
|
||||
this._active = true;
|
||||
this._never = false;
|
||||
this._forever = false;
|
||||
this._invalidate = false;
|
||||
this._configured = false;
|
||||
this._pairs = [];
|
||||
this._data = void 0;
|
||||
this._data = data;
|
||||
}
|
||||
simple() {
|
||||
return makeSimpleConfigurator(this);
|
||||
}
|
||||
mode() {
|
||||
if (this._never) return "never";
|
||||
if (this._forever) return "forever";
|
||||
if (this._invalidate) return "invalidate";
|
||||
return "valid";
|
||||
}
|
||||
forever() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
if (this._never) {
|
||||
throw new Error("Caching has already been configured with .never()");
|
||||
}
|
||||
this._forever = true;
|
||||
this._configured = true;
|
||||
}
|
||||
never() {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
if (this._forever) {
|
||||
throw new Error("Caching has already been configured with .forever()");
|
||||
}
|
||||
this._never = true;
|
||||
this._configured = true;
|
||||
}
|
||||
using(handler) {
|
||||
if (!this._active) {
|
||||
throw new Error("Cannot change caching after evaluation has completed.");
|
||||
}
|
||||
if (this._never || this._forever) {
|
||||
throw new Error("Caching has already been configured with .never or .forever()");
|
||||
}
|
||||
this._configured = true;
|
||||
const key = handler(this._data);
|
||||
const fn = (0, _async.maybeAsync)(handler, `You appear to be using an async cache handler, but Babel has been called synchronously`);
|
||||
if ((0, _async.isThenable)(key)) {
|
||||
return key.then(key => {
|
||||
this._pairs.push([key, fn]);
|
||||
return key;
|
||||
});
|
||||
}
|
||||
this._pairs.push([key, fn]);
|
||||
return key;
|
||||
}
|
||||
invalidate(handler) {
|
||||
this._invalidate = true;
|
||||
return this.using(handler);
|
||||
}
|
||||
validator() {
|
||||
const pairs = this._pairs;
|
||||
return function* (data) {
|
||||
for (const [key, fn] of pairs) {
|
||||
if (key !== (yield* fn(data))) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
deactivate() {
|
||||
this._active = false;
|
||||
}
|
||||
configured() {
|
||||
return this._configured;
|
||||
}
|
||||
}
|
||||
function makeSimpleConfigurator(cache) {
|
||||
function cacheFn(val) {
|
||||
if (typeof val === "boolean") {
|
||||
if (val) cache.forever();else cache.never();
|
||||
return;
|
||||
}
|
||||
return cache.using(() => assertSimpleType(val()));
|
||||
}
|
||||
cacheFn.forever = () => cache.forever();
|
||||
cacheFn.never = () => cache.never();
|
||||
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
||||
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
||||
return cacheFn;
|
||||
}
|
||||
function assertSimpleType(value) {
|
||||
if ((0, _async.isThenable)(value)) {
|
||||
throw new Error(`You appear to be using an async cache handler, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously handle your caching logic.`);
|
||||
}
|
||||
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
||||
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
||||
}
|
||||
return value;
|
||||
}
|
||||
class Lock {
|
||||
constructor() {
|
||||
this.released = false;
|
||||
this.promise = void 0;
|
||||
this._resolve = void 0;
|
||||
this.promise = new Promise(resolve => {
|
||||
this._resolve = resolve;
|
||||
});
|
||||
}
|
||||
release(value) {
|
||||
this.released = true;
|
||||
this._resolve(value);
|
||||
}
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=caching.js.map
|
1
node_modules/@babel/core/lib/config/caching.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/caching.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
469
node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
469
node_modules/@babel/core/lib/config/config-chain.js
generated
vendored
Normal file
|
@ -0,0 +1,469 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.buildPresetChain = buildPresetChain;
|
||||
exports.buildPresetChainWalker = void 0;
|
||||
exports.buildRootChain = buildRootChain;
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _options = require("./validation/options.js");
|
||||
var _patternToRegex = require("./pattern-to-regex.js");
|
||||
var _printer = require("./printer.js");
|
||||
var _rewriteStackTrace = require("../errors/rewrite-stack-trace.js");
|
||||
var _configError = require("../errors/config-error.js");
|
||||
var _index = require("./files/index.js");
|
||||
var _caching = require("./caching.js");
|
||||
var _configDescriptors = require("./config-descriptors.js");
|
||||
const debug = _debug()("babel:config:config-chain");
|
||||
function* buildPresetChain(arg, context) {
|
||||
const chain = yield* buildPresetChainWalker(arg, context);
|
||||
if (!chain) return null;
|
||||
return {
|
||||
plugins: dedupDescriptors(chain.plugins),
|
||||
presets: dedupDescriptors(chain.presets),
|
||||
options: chain.options.map(o => normalizeOptions(o)),
|
||||
files: new Set()
|
||||
};
|
||||
}
|
||||
const buildPresetChainWalker = exports.buildPresetChainWalker = makeChainWalker({
|
||||
root: preset => loadPresetDescriptors(preset),
|
||||
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
|
||||
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
|
||||
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName),
|
||||
createLogger: () => () => {}
|
||||
});
|
||||
const loadPresetDescriptors = (0, _caching.makeWeakCacheSync)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
|
||||
const loadPresetEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
function* buildRootChain(opts, context) {
|
||||
let configReport, babelRcReport;
|
||||
const programmaticLogger = new _printer.ConfigPrinter();
|
||||
const programmaticChain = yield* loadProgrammaticChain({
|
||||
options: opts,
|
||||
dirname: context.cwd
|
||||
}, context, undefined, programmaticLogger);
|
||||
if (!programmaticChain) return null;
|
||||
const programmaticReport = yield* programmaticLogger.output();
|
||||
let configFile;
|
||||
if (typeof opts.configFile === "string") {
|
||||
configFile = yield* (0, _index.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
|
||||
} else if (opts.configFile !== false) {
|
||||
configFile = yield* (0, _index.findRootConfig)(context.root, context.envName, context.caller);
|
||||
}
|
||||
let {
|
||||
babelrc,
|
||||
babelrcRoots
|
||||
} = opts;
|
||||
let babelrcRootsDirectory = context.cwd;
|
||||
const configFileChain = emptyChain();
|
||||
const configFileLogger = new _printer.ConfigPrinter();
|
||||
if (configFile) {
|
||||
const validatedFile = validateConfigFile(configFile);
|
||||
const result = yield* loadFileChain(validatedFile, context, undefined, configFileLogger);
|
||||
if (!result) return null;
|
||||
configReport = yield* configFileLogger.output();
|
||||
if (babelrc === undefined) {
|
||||
babelrc = validatedFile.options.babelrc;
|
||||
}
|
||||
if (babelrcRoots === undefined) {
|
||||
babelrcRootsDirectory = validatedFile.dirname;
|
||||
babelrcRoots = validatedFile.options.babelrcRoots;
|
||||
}
|
||||
mergeChain(configFileChain, result);
|
||||
}
|
||||
let ignoreFile, babelrcFile;
|
||||
let isIgnored = false;
|
||||
const fileChain = emptyChain();
|
||||
if ((babelrc === true || babelrc === undefined) && typeof context.filename === "string") {
|
||||
const pkgData = yield* (0, _index.findPackageData)(context.filename);
|
||||
if (pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
|
||||
({
|
||||
ignore: ignoreFile,
|
||||
config: babelrcFile
|
||||
} = yield* (0, _index.findRelativeConfig)(pkgData, context.envName, context.caller));
|
||||
if (ignoreFile) {
|
||||
fileChain.files.add(ignoreFile.filepath);
|
||||
}
|
||||
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
|
||||
isIgnored = true;
|
||||
}
|
||||
if (babelrcFile && !isIgnored) {
|
||||
const validatedFile = validateBabelrcFile(babelrcFile);
|
||||
const babelrcLogger = new _printer.ConfigPrinter();
|
||||
const result = yield* loadFileChain(validatedFile, context, undefined, babelrcLogger);
|
||||
if (!result) {
|
||||
isIgnored = true;
|
||||
} else {
|
||||
babelRcReport = yield* babelrcLogger.output();
|
||||
mergeChain(fileChain, result);
|
||||
}
|
||||
}
|
||||
if (babelrcFile && isIgnored) {
|
||||
fileChain.files.add(babelrcFile.filepath);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (context.showConfig) {
|
||||
console.log(`Babel configs on "${context.filename}" (ascending priority):\n` + [configReport, babelRcReport, programmaticReport].filter(x => !!x).join("\n\n") + "\n-----End Babel configs-----");
|
||||
}
|
||||
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
|
||||
return {
|
||||
plugins: isIgnored ? [] : dedupDescriptors(chain.plugins),
|
||||
presets: isIgnored ? [] : dedupDescriptors(chain.presets),
|
||||
options: isIgnored ? [] : chain.options.map(o => normalizeOptions(o)),
|
||||
fileHandling: isIgnored ? "ignored" : "transpile",
|
||||
ignore: ignoreFile || undefined,
|
||||
babelrc: babelrcFile || undefined,
|
||||
config: configFile || undefined,
|
||||
files: chain.files
|
||||
};
|
||||
}
|
||||
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
|
||||
if (typeof babelrcRoots === "boolean") return babelrcRoots;
|
||||
const absoluteRoot = context.root;
|
||||
if (babelrcRoots === undefined) {
|
||||
return pkgData.directories.includes(absoluteRoot);
|
||||
}
|
||||
let babelrcPatterns = babelrcRoots;
|
||||
if (!Array.isArray(babelrcPatterns)) {
|
||||
babelrcPatterns = [babelrcPatterns];
|
||||
}
|
||||
babelrcPatterns = babelrcPatterns.map(pat => {
|
||||
return typeof pat === "string" ? _path().resolve(babelrcRootsDirectory, pat) : pat;
|
||||
});
|
||||
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
|
||||
return pkgData.directories.includes(absoluteRoot);
|
||||
}
|
||||
return babelrcPatterns.some(pat => {
|
||||
if (typeof pat === "string") {
|
||||
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
|
||||
}
|
||||
return pkgData.directories.some(directory => {
|
||||
return matchPattern(pat, babelrcRootsDirectory, directory, context);
|
||||
});
|
||||
});
|
||||
}
|
||||
const validateConfigFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("configfile", file.options, file.filepath)
|
||||
}));
|
||||
const validateBabelrcFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("babelrcfile", file.options, file.filepath)
|
||||
}));
|
||||
const validateExtendFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: (0, _options.validate)("extendsfile", file.options, file.filepath)
|
||||
}));
|
||||
const loadProgrammaticChain = makeChainWalker({
|
||||
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
|
||||
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
|
||||
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
|
||||
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName),
|
||||
createLogger: (input, context, baseLogger) => buildProgrammaticLogger(input, context, baseLogger)
|
||||
});
|
||||
const loadFileChainWalker = makeChainWalker({
|
||||
root: file => loadFileDescriptors(file),
|
||||
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
|
||||
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
|
||||
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName),
|
||||
createLogger: (file, context, baseLogger) => buildFileLogger(file.filepath, context, baseLogger)
|
||||
});
|
||||
function* loadFileChain(input, context, files, baseLogger) {
|
||||
const chain = yield* loadFileChainWalker(input, context, files, baseLogger);
|
||||
chain == null || chain.files.add(input.filepath);
|
||||
return chain;
|
||||
}
|
||||
const loadFileDescriptors = (0, _caching.makeWeakCacheSync)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
|
||||
const loadFileEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
|
||||
const loadFileOverridesDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
|
||||
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||
function buildFileLogger(filepath, context, baseLogger) {
|
||||
if (!baseLogger) {
|
||||
return () => {};
|
||||
}
|
||||
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Config, {
|
||||
filepath
|
||||
});
|
||||
}
|
||||
function buildRootDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors) {
|
||||
return descriptors(dirname, options, alias);
|
||||
}
|
||||
function buildProgrammaticLogger(_, context, baseLogger) {
|
||||
var _context$caller;
|
||||
if (!baseLogger) {
|
||||
return () => {};
|
||||
}
|
||||
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Programmatic, {
|
||||
callerName: (_context$caller = context.caller) == null ? void 0 : _context$caller.name
|
||||
});
|
||||
}
|
||||
function buildEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, envName) {
|
||||
var _options$env;
|
||||
const opts = (_options$env = options.env) == null ? void 0 : _options$env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
|
||||
}
|
||||
function buildOverrideDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index) {
|
||||
var _options$overrides;
|
||||
const opts = (_options$overrides = options.overrides) == null ? void 0 : _options$overrides[index];
|
||||
if (!opts) throw new Error("Assertion failure - missing override");
|
||||
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
|
||||
}
|
||||
function buildOverrideEnvDescriptors({
|
||||
dirname,
|
||||
options
|
||||
}, alias, descriptors, index, envName) {
|
||||
var _options$overrides2, _override$env;
|
||||
const override = (_options$overrides2 = options.overrides) == null ? void 0 : _options$overrides2[index];
|
||||
if (!override) throw new Error("Assertion failure - missing override");
|
||||
const opts = (_override$env = override.env) == null ? void 0 : _override$env[envName];
|
||||
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
|
||||
}
|
||||
function makeChainWalker({
|
||||
root,
|
||||
env,
|
||||
overrides,
|
||||
overridesEnv,
|
||||
createLogger
|
||||
}) {
|
||||
return function* chainWalker(input, context, files = new Set(), baseLogger) {
|
||||
const {
|
||||
dirname
|
||||
} = input;
|
||||
const flattenedConfigs = [];
|
||||
const rootOpts = root(input);
|
||||
if (configIsApplicable(rootOpts, dirname, context, input.filepath)) {
|
||||
flattenedConfigs.push({
|
||||
config: rootOpts,
|
||||
envName: undefined,
|
||||
index: undefined
|
||||
});
|
||||
const envOpts = env(input, context.envName);
|
||||
if (envOpts && configIsApplicable(envOpts, dirname, context, input.filepath)) {
|
||||
flattenedConfigs.push({
|
||||
config: envOpts,
|
||||
envName: context.envName,
|
||||
index: undefined
|
||||
});
|
||||
}
|
||||
(rootOpts.options.overrides || []).forEach((_, index) => {
|
||||
const overrideOps = overrides(input, index);
|
||||
if (configIsApplicable(overrideOps, dirname, context, input.filepath)) {
|
||||
flattenedConfigs.push({
|
||||
config: overrideOps,
|
||||
index,
|
||||
envName: undefined
|
||||
});
|
||||
const overrideEnvOpts = overridesEnv(input, index, context.envName);
|
||||
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context, input.filepath)) {
|
||||
flattenedConfigs.push({
|
||||
config: overrideEnvOpts,
|
||||
index,
|
||||
envName: context.envName
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (flattenedConfigs.some(({
|
||||
config: {
|
||||
options: {
|
||||
ignore,
|
||||
only
|
||||
}
|
||||
}
|
||||
}) => shouldIgnore(context, ignore, only, dirname))) {
|
||||
return null;
|
||||
}
|
||||
const chain = emptyChain();
|
||||
const logger = createLogger(input, context, baseLogger);
|
||||
for (const {
|
||||
config,
|
||||
index,
|
||||
envName
|
||||
} of flattenedConfigs) {
|
||||
if (!(yield* mergeExtendsChain(chain, config.options, dirname, context, files, baseLogger))) {
|
||||
return null;
|
||||
}
|
||||
logger(config, index, envName);
|
||||
yield* mergeChainOpts(chain, config);
|
||||
}
|
||||
return chain;
|
||||
};
|
||||
}
|
||||
function* mergeExtendsChain(chain, opts, dirname, context, files, baseLogger) {
|
||||
if (opts.extends === undefined) return true;
|
||||
const file = yield* (0, _index.loadConfig)(opts.extends, dirname, context.envName, context.caller);
|
||||
if (files.has(file)) {
|
||||
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
|
||||
}
|
||||
files.add(file);
|
||||
const fileChain = yield* loadFileChain(validateExtendFile(file), context, files, baseLogger);
|
||||
files.delete(file);
|
||||
if (!fileChain) return false;
|
||||
mergeChain(chain, fileChain);
|
||||
return true;
|
||||
}
|
||||
function mergeChain(target, source) {
|
||||
target.options.push(...source.options);
|
||||
target.plugins.push(...source.plugins);
|
||||
target.presets.push(...source.presets);
|
||||
for (const file of source.files) {
|
||||
target.files.add(file);
|
||||
}
|
||||
return target;
|
||||
}
|
||||
function* mergeChainOpts(target, {
|
||||
options,
|
||||
plugins,
|
||||
presets
|
||||
}) {
|
||||
target.options.push(options);
|
||||
target.plugins.push(...(yield* plugins()));
|
||||
target.presets.push(...(yield* presets()));
|
||||
return target;
|
||||
}
|
||||
function emptyChain() {
|
||||
return {
|
||||
options: [],
|
||||
presets: [],
|
||||
plugins: [],
|
||||
files: new Set()
|
||||
};
|
||||
}
|
||||
function normalizeOptions(opts) {
|
||||
const options = Object.assign({}, opts);
|
||||
delete options.extends;
|
||||
delete options.env;
|
||||
delete options.overrides;
|
||||
delete options.plugins;
|
||||
delete options.presets;
|
||||
delete options.passPerPreset;
|
||||
delete options.ignore;
|
||||
delete options.only;
|
||||
delete options.test;
|
||||
delete options.include;
|
||||
delete options.exclude;
|
||||
if (hasOwnProperty.call(options, "sourceMap")) {
|
||||
options.sourceMaps = options.sourceMap;
|
||||
delete options.sourceMap;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
function dedupDescriptors(items) {
|
||||
const map = new Map();
|
||||
const descriptors = [];
|
||||
for (const item of items) {
|
||||
if (typeof item.value === "function") {
|
||||
const fnKey = item.value;
|
||||
let nameMap = map.get(fnKey);
|
||||
if (!nameMap) {
|
||||
nameMap = new Map();
|
||||
map.set(fnKey, nameMap);
|
||||
}
|
||||
let desc = nameMap.get(item.name);
|
||||
if (!desc) {
|
||||
desc = {
|
||||
value: item
|
||||
};
|
||||
descriptors.push(desc);
|
||||
if (!item.ownPass) nameMap.set(item.name, desc);
|
||||
} else {
|
||||
desc.value = item;
|
||||
}
|
||||
} else {
|
||||
descriptors.push({
|
||||
value: item
|
||||
});
|
||||
}
|
||||
}
|
||||
return descriptors.reduce((acc, desc) => {
|
||||
acc.push(desc.value);
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
function configIsApplicable({
|
||||
options
|
||||
}, dirname, context, configName) {
|
||||
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname, configName)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname, configName)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname, configName));
|
||||
}
|
||||
function configFieldIsApplicable(context, test, dirname, configName) {
|
||||
const patterns = Array.isArray(test) ? test : [test];
|
||||
return matchesPatterns(context, patterns, dirname, configName);
|
||||
}
|
||||
function ignoreListReplacer(_key, value) {
|
||||
if (value instanceof RegExp) {
|
||||
return String(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
function shouldIgnore(context, ignore, only, dirname) {
|
||||
if (ignore && matchesPatterns(context, ignore, dirname)) {
|
||||
var _context$filename;
|
||||
const message = `No config is applied to "${(_context$filename = context.filename) != null ? _context$filename : "(unknown)"}" because it matches one of \`ignore: ${JSON.stringify(ignore, ignoreListReplacer)}\` from "${dirname}"`;
|
||||
debug(message);
|
||||
if (context.showConfig) {
|
||||
console.log(message);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (only && !matchesPatterns(context, only, dirname)) {
|
||||
var _context$filename2;
|
||||
const message = `No config is applied to "${(_context$filename2 = context.filename) != null ? _context$filename2 : "(unknown)"}" because it fails to match one of \`only: ${JSON.stringify(only, ignoreListReplacer)}\` from "${dirname}"`;
|
||||
debug(message);
|
||||
if (context.showConfig) {
|
||||
console.log(message);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function matchesPatterns(context, patterns, dirname, configName) {
|
||||
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context, configName));
|
||||
}
|
||||
function matchPattern(pattern, dirname, pathToTest, context, configName) {
|
||||
if (typeof pattern === "function") {
|
||||
return !!(0, _rewriteStackTrace.endHiddenCallStack)(pattern)(pathToTest, {
|
||||
dirname,
|
||||
envName: context.envName,
|
||||
caller: context.caller
|
||||
});
|
||||
}
|
||||
if (typeof pathToTest !== "string") {
|
||||
throw new _configError.default(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`, configName);
|
||||
}
|
||||
if (typeof pattern === "string") {
|
||||
pattern = (0, _patternToRegex.default)(pattern, dirname);
|
||||
}
|
||||
return pattern.test(pathToTest);
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=config-chain.js.map
|
1
node_modules/@babel/core/lib/config/config-chain.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/config-chain.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
190
node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
190
node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
|
@ -0,0 +1,190 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createCachedDescriptors = createCachedDescriptors;
|
||||
exports.createDescriptor = createDescriptor;
|
||||
exports.createUncachedDescriptors = createUncachedDescriptors;
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _functional = require("../gensync-utils/functional.js");
|
||||
var _index = require("./files/index.js");
|
||||
var _item = require("./item.js");
|
||||
var _caching = require("./caching.js");
|
||||
var _resolveTargets = require("./resolve-targets.js");
|
||||
function isEqualDescriptor(a, b) {
|
||||
var _a$file, _b$file, _a$file2, _b$file2;
|
||||
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && ((_a$file = a.file) == null ? void 0 : _a$file.request) === ((_b$file = b.file) == null ? void 0 : _b$file.request) && ((_a$file2 = a.file) == null ? void 0 : _a$file2.resolved) === ((_b$file2 = b.file) == null ? void 0 : _b$file2.resolved);
|
||||
}
|
||||
function* handlerOf(value) {
|
||||
return value;
|
||||
}
|
||||
function optionsWithResolvedBrowserslistConfigFile(options, dirname) {
|
||||
if (typeof options.browserslistConfigFile === "string") {
|
||||
options.browserslistConfigFile = (0, _resolveTargets.resolveBrowserslistConfigFile)(options.browserslistConfigFile, dirname);
|
||||
}
|
||||
return options;
|
||||
}
|
||||
function createCachedDescriptors(dirname, options, alias) {
|
||||
const {
|
||||
plugins,
|
||||
presets,
|
||||
passPerPreset
|
||||
} = options;
|
||||
return {
|
||||
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => handlerOf([]),
|
||||
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => handlerOf([])
|
||||
};
|
||||
}
|
||||
function createUncachedDescriptors(dirname, options, alias) {
|
||||
return {
|
||||
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||
plugins: (0, _functional.once)(() => createPluginDescriptors(options.plugins || [], dirname, alias)),
|
||||
presets: (0, _functional.once)(() => createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset))
|
||||
};
|
||||
}
|
||||
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPresetDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCacheSync)(alias => (0, _caching.makeStrongCache)(function* (passPerPreset) {
|
||||
const descriptors = yield* createPresetDescriptors(items, dirname, alias, passPerPreset);
|
||||
return descriptors.map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc));
|
||||
}));
|
||||
});
|
||||
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
||||
const createCachedPluginDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||
const dirname = cache.using(dir => dir);
|
||||
return (0, _caching.makeStrongCache)(function* (alias) {
|
||||
const descriptors = yield* createPluginDescriptors(items, dirname, alias);
|
||||
return descriptors.map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc));
|
||||
});
|
||||
});
|
||||
const DEFAULT_OPTIONS = {};
|
||||
function loadCachedDescriptor(cache, desc) {
|
||||
const {
|
||||
value,
|
||||
options = DEFAULT_OPTIONS
|
||||
} = desc;
|
||||
if (options === false) return desc;
|
||||
let cacheByOptions = cache.get(value);
|
||||
if (!cacheByOptions) {
|
||||
cacheByOptions = new WeakMap();
|
||||
cache.set(value, cacheByOptions);
|
||||
}
|
||||
let possibilities = cacheByOptions.get(options);
|
||||
if (!possibilities) {
|
||||
possibilities = [];
|
||||
cacheByOptions.set(options, possibilities);
|
||||
}
|
||||
if (!possibilities.includes(desc)) {
|
||||
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
|
||||
if (matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
possibilities.push(desc);
|
||||
}
|
||||
return desc;
|
||||
}
|
||||
function* createPresetDescriptors(items, dirname, alias, passPerPreset) {
|
||||
return yield* createDescriptors("preset", items, dirname, alias, passPerPreset);
|
||||
}
|
||||
function* createPluginDescriptors(items, dirname, alias) {
|
||||
return yield* createDescriptors("plugin", items, dirname, alias);
|
||||
}
|
||||
function* createDescriptors(type, items, dirname, alias, ownPass) {
|
||||
const descriptors = yield* _gensync().all(items.map((item, index) => createDescriptor(item, dirname, {
|
||||
type,
|
||||
alias: `${alias}$${index}`,
|
||||
ownPass: !!ownPass
|
||||
})));
|
||||
assertNoDuplicates(descriptors);
|
||||
return descriptors;
|
||||
}
|
||||
function* createDescriptor(pair, dirname, {
|
||||
type,
|
||||
alias,
|
||||
ownPass
|
||||
}) {
|
||||
const desc = (0, _item.getItemDescriptor)(pair);
|
||||
if (desc) {
|
||||
return desc;
|
||||
}
|
||||
let name;
|
||||
let options;
|
||||
let value = pair;
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 3) {
|
||||
[value, options, name] = value;
|
||||
} else {
|
||||
[value, options] = value;
|
||||
}
|
||||
}
|
||||
let file = undefined;
|
||||
let filepath = null;
|
||||
if (typeof value === "string") {
|
||||
if (typeof type !== "string") {
|
||||
throw new Error("To resolve a string-based item, the type of item must be given");
|
||||
}
|
||||
const resolver = type === "plugin" ? _index.loadPlugin : _index.loadPreset;
|
||||
const request = value;
|
||||
({
|
||||
filepath,
|
||||
value
|
||||
} = yield* resolver(value, dirname));
|
||||
file = {
|
||||
request,
|
||||
resolved: filepath
|
||||
};
|
||||
}
|
||||
if (!value) {
|
||||
throw new Error(`Unexpected falsy value: ${String(value)}`);
|
||||
}
|
||||
if (typeof value === "object" && value.__esModule) {
|
||||
if (value.default) {
|
||||
value = value.default;
|
||||
} else {
|
||||
throw new Error("Must export a default export when using ES6 modules.");
|
||||
}
|
||||
}
|
||||
if (typeof value !== "object" && typeof value !== "function") {
|
||||
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
|
||||
}
|
||||
if (filepath !== null && typeof value === "object" && value) {
|
||||
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
|
||||
}
|
||||
return {
|
||||
name,
|
||||
alias: filepath || alias,
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
ownPass,
|
||||
file
|
||||
};
|
||||
}
|
||||
function assertNoDuplicates(items) {
|
||||
const map = new Map();
|
||||
for (const item of items) {
|
||||
if (typeof item.value !== "function") continue;
|
||||
let nameMap = map.get(item.value);
|
||||
if (!nameMap) {
|
||||
nameMap = new Set();
|
||||
map.set(item.value, nameMap);
|
||||
}
|
||||
if (nameMap.has(item.name)) {
|
||||
const conflicts = items.filter(i => i.value === item.value);
|
||||
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
|
||||
}
|
||||
nameMap.add(item.name);
|
||||
}
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=config-descriptors.js.map
|
1
node_modules/@babel/core/lib/config/config-descriptors.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/config-descriptors.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
290
node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
290
node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
|
@ -0,0 +1,290 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ROOT_CONFIG_FILENAMES = void 0;
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.resolveShowConfigPath = resolveShowConfigPath;
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _fs() {
|
||||
const data = require("fs");
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _json() {
|
||||
const data = require("json5");
|
||||
_json = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _caching = require("../caching.js");
|
||||
var _configApi = require("../helpers/config-api.js");
|
||||
var _utils = require("./utils.js");
|
||||
var _moduleTypes = require("./module-types.js");
|
||||
var _patternToRegex = require("../pattern-to-regex.js");
|
||||
var _configError = require("../../errors/config-error.js");
|
||||
var fs = require("../../gensync-utils/fs.js");
|
||||
require("module");
|
||||
var _rewriteStackTrace = require("../../errors/rewrite-stack-trace.js");
|
||||
var _async = require("../../gensync-utils/async.js");
|
||||
const debug = _debug()("babel:config:loading:files:configuration");
|
||||
const ROOT_CONFIG_FILENAMES = exports.ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.mjs", "babel.config.json", "babel.config.cts"];
|
||||
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs", ".babelrc.mjs", ".babelrc.json", ".babelrc.cts"];
|
||||
const BABELIGNORE_FILENAME = ".babelignore";
|
||||
const runConfig = (0, _caching.makeWeakCache)(function* runConfig(options, cache) {
|
||||
yield* [];
|
||||
return {
|
||||
options: (0, _rewriteStackTrace.endHiddenCallStack)(options)((0, _configApi.makeConfigAPI)(cache)),
|
||||
cacheNeedsConfiguration: !cache.configured()
|
||||
};
|
||||
});
|
||||
function* readConfigCode(filepath, data) {
|
||||
if (!_fs().existsSync(filepath)) return null;
|
||||
let options = yield* (0, _moduleTypes.default)(filepath, (yield* (0, _async.isAsync)()) ? "auto" : "require", "You appear to be using a native ECMAScript module configuration " + "file, which is only supported when running Babel asynchronously " + "or when using the Node.js `--experimental-require-module` flag.", "You appear to be using a configuration file that contains top-level " + "await, which is only supported when running Babel asynchronously.");
|
||||
let cacheNeedsConfiguration = false;
|
||||
if (typeof options === "function") {
|
||||
({
|
||||
options,
|
||||
cacheNeedsConfiguration
|
||||
} = yield* runConfig(options, data));
|
||||
}
|
||||
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
||||
throw new _configError.default(`Configuration should be an exported JavaScript object.`, filepath);
|
||||
}
|
||||
if (typeof options.then === "function") {
|
||||
options.catch == null || options.catch(() => {});
|
||||
throw new _configError.default(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`, filepath);
|
||||
}
|
||||
if (cacheNeedsConfiguration) throwConfigError(filepath);
|
||||
return buildConfigFileObject(options, filepath);
|
||||
}
|
||||
const cfboaf = new WeakMap();
|
||||
function buildConfigFileObject(options, filepath) {
|
||||
let configFilesByFilepath = cfboaf.get(options);
|
||||
if (!configFilesByFilepath) {
|
||||
cfboaf.set(options, configFilesByFilepath = new Map());
|
||||
}
|
||||
let configFile = configFilesByFilepath.get(filepath);
|
||||
if (!configFile) {
|
||||
configFile = {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
configFilesByFilepath.set(filepath, configFile);
|
||||
}
|
||||
return configFile;
|
||||
}
|
||||
const packageToBabelConfig = (0, _caching.makeWeakCacheSync)(file => {
|
||||
const babel = file.options["babel"];
|
||||
if (babel === undefined) return null;
|
||||
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
|
||||
throw new _configError.default(`.babel property must be an object`, file.filepath);
|
||||
}
|
||||
return {
|
||||
filepath: file.filepath,
|
||||
dirname: file.dirname,
|
||||
options: babel
|
||||
};
|
||||
});
|
||||
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
try {
|
||||
options = _json().parse(content);
|
||||
} catch (err) {
|
||||
throw new _configError.default(`Error while parsing config - ${err.message}`, filepath);
|
||||
}
|
||||
if (!options) throw new _configError.default(`No config detected`, filepath);
|
||||
if (typeof options !== "object") {
|
||||
throw new _configError.default(`Config returned typeof ${typeof options}`, filepath);
|
||||
}
|
||||
if (Array.isArray(options)) {
|
||||
throw new _configError.default(`Expected config object but found array`, filepath);
|
||||
}
|
||||
delete options["$schema"];
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
const ignoreDir = _path().dirname(filepath);
|
||||
const ignorePatterns = content.split("\n").map(line => line.replace(/#.*$/, "").trim()).filter(Boolean);
|
||||
for (const pattern of ignorePatterns) {
|
||||
if (pattern[0] === "!") {
|
||||
throw new _configError.default(`Negation of file paths is not supported.`, filepath);
|
||||
}
|
||||
}
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
|
||||
};
|
||||
});
|
||||
function findConfigUpwards(rootDir) {
|
||||
let dirname = rootDir;
|
||||
for (;;) {
|
||||
for (const filename of ROOT_CONFIG_FILENAMES) {
|
||||
if (_fs().existsSync(_path().join(dirname, filename))) {
|
||||
return dirname;
|
||||
}
|
||||
}
|
||||
const nextDir = _path().dirname(dirname);
|
||||
if (dirname === nextDir) break;
|
||||
dirname = nextDir;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function* findRelativeConfig(packageData, envName, caller) {
|
||||
let config = null;
|
||||
let ignore = null;
|
||||
const dirname = _path().dirname(packageData.filepath);
|
||||
for (const loc of packageData.directories) {
|
||||
if (!config) {
|
||||
var _packageData$pkg;
|
||||
config = yield* loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, ((_packageData$pkg = packageData.pkg) == null ? void 0 : _packageData$pkg.dirname) === loc ? packageToBabelConfig(packageData.pkg) : null);
|
||||
}
|
||||
if (!ignore) {
|
||||
const ignoreLoc = _path().join(loc, BABELIGNORE_FILENAME);
|
||||
ignore = yield* readIgnoreConfig(ignoreLoc);
|
||||
if (ignore) {
|
||||
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
config,
|
||||
ignore
|
||||
};
|
||||
}
|
||||
function findRootConfig(dirname, envName, caller) {
|
||||
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
|
||||
}
|
||||
function* loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
|
||||
const configs = yield* _gensync().all(names.map(filename => readConfig(_path().join(dirname, filename), envName, caller)));
|
||||
const config = configs.reduce((previousConfig, config) => {
|
||||
if (config && previousConfig) {
|
||||
throw new _configError.default(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().basename(previousConfig.filepath)}\n` + ` - ${config.filepath}\n` + `from ${dirname}`);
|
||||
}
|
||||
return config || previousConfig;
|
||||
}, previousConfig);
|
||||
if (config) {
|
||||
debug("Found configuration %o from %o.", config.filepath, dirname);
|
||||
}
|
||||
return config;
|
||||
}
|
||||
function* loadConfig(name, dirname, envName, caller) {
|
||||
const filepath = (((v, w) => (v = v.split("."), w = w.split("."), +v[0] > +w[0] || v[0] == w[0] && +v[1] >= +w[1]))(process.versions.node, "8.9") ? require.resolve : (r, {
|
||||
paths: [b]
|
||||
}, M = require("module")) => {
|
||||
let f = M._findPath(r, M._nodeModulePaths(b).concat(b));
|
||||
if (f) return f;
|
||||
f = new Error(`Cannot resolve module '${r}'`);
|
||||
f.code = "MODULE_NOT_FOUND";
|
||||
throw f;
|
||||
})(name, {
|
||||
paths: [dirname]
|
||||
});
|
||||
const conf = yield* readConfig(filepath, envName, caller);
|
||||
if (!conf) {
|
||||
throw new _configError.default(`Config file contains no configuration data`, filepath);
|
||||
}
|
||||
debug("Loaded config %o from %o.", name, dirname);
|
||||
return conf;
|
||||
}
|
||||
function readConfig(filepath, envName, caller) {
|
||||
const ext = _path().extname(filepath);
|
||||
switch (ext) {
|
||||
case ".js":
|
||||
case ".cjs":
|
||||
case ".mjs":
|
||||
case ".ts":
|
||||
case ".cts":
|
||||
case ".mts":
|
||||
return readConfigCode(filepath, {
|
||||
envName,
|
||||
caller
|
||||
});
|
||||
default:
|
||||
return readConfigJSON5(filepath);
|
||||
}
|
||||
}
|
||||
function* resolveShowConfigPath(dirname) {
|
||||
const targetPath = process.env.BABEL_SHOW_CONFIG_FOR;
|
||||
if (targetPath != null) {
|
||||
const absolutePath = _path().resolve(dirname, targetPath);
|
||||
const stats = yield* fs.stat(absolutePath);
|
||||
if (!stats.isFile()) {
|
||||
throw new Error(`${absolutePath}: BABEL_SHOW_CONFIG_FOR must refer to a regular file, directories are not supported.`);
|
||||
}
|
||||
return absolutePath;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function throwConfigError(filepath) {
|
||||
throw new _configError.default(`\
|
||||
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
|
||||
for various types of caching, using the first param of their handler functions:
|
||||
|
||||
module.exports = function(api) {
|
||||
// The API exposes the following:
|
||||
|
||||
// Cache the returned value forever and don't call this function again.
|
||||
api.cache(true);
|
||||
|
||||
// Don't cache at all. Not recommended because it will be very slow.
|
||||
api.cache(false);
|
||||
|
||||
// Cached based on the value of some function. If this function returns a value different from
|
||||
// a previously-encountered value, the plugins will re-evaluate.
|
||||
var env = api.cache(() => process.env.NODE_ENV);
|
||||
|
||||
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
|
||||
// any possible NODE_ENV value that might come up during plugin execution.
|
||||
var isProd = api.cache(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// .cache(fn) will perform a linear search though instances to find the matching plugin based
|
||||
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
|
||||
// previous instance whenever something changes, you may use:
|
||||
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
|
||||
|
||||
// Note, we also expose the following more-verbose versions of the above examples:
|
||||
api.cache.forever(); // api.cache(true)
|
||||
api.cache.never(); // api.cache(false)
|
||||
api.cache.using(fn); // api.cache(fn)
|
||||
|
||||
// Return the value that will be cached.
|
||||
return { };
|
||||
};`, filepath);
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=configuration.js.map
|
1
node_modules/@babel/core/lib/config/files/configuration.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/configuration.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/@babel/core/lib/config/files/import.cjs
generated
vendored
Normal file
6
node_modules/@babel/core/lib/config/files/import.cjs
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
module.exports = function import_(filepath) {
|
||||
return import(filepath);
|
||||
};
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=import.cjs.map
|
1
node_modules/@babel/core/lib/config/files/import.cjs.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/import.cjs.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"names":["module","exports","import_","filepath"],"sources":["../../../src/config/files/import.cjs"],"sourcesContent":["// We keep this in a separate file so that in older node versions, where\n// import() isn't supported, we can try/catch around the require() call\n// when loading this file.\n\nmodule.exports = function import_(filepath) {\n return import(filepath);\n};\n"],"mappings":"AAIAA,MAAM,CAACC,OAAO,GAAG,SAASC,OAAOA,CAACC,QAAQ,EAAE;EAC1C,OAAO,OAAOA,QAAQ,CAAC;AACzB,CAAC;AAAC","ignoreList":[]}
|
58
node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
Normal file
58
node_modules/@babel/core/lib/config/files/index-browser.js
generated
vendored
Normal file
|
@ -0,0 +1,58 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ROOT_CONFIG_FILENAMES = void 0;
|
||||
exports.findConfigUpwards = findConfigUpwards;
|
||||
exports.findPackageData = findPackageData;
|
||||
exports.findRelativeConfig = findRelativeConfig;
|
||||
exports.findRootConfig = findRootConfig;
|
||||
exports.loadConfig = loadConfig;
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
exports.resolvePlugin = resolvePlugin;
|
||||
exports.resolvePreset = resolvePreset;
|
||||
exports.resolveShowConfigPath = resolveShowConfigPath;
|
||||
function findConfigUpwards(rootDir) {
|
||||
return null;
|
||||
}
|
||||
function* findPackageData(filepath) {
|
||||
return {
|
||||
filepath,
|
||||
directories: [],
|
||||
pkg: null,
|
||||
isPackage: false
|
||||
};
|
||||
}
|
||||
function* findRelativeConfig(pkgData, envName, caller) {
|
||||
return {
|
||||
config: null,
|
||||
ignore: null
|
||||
};
|
||||
}
|
||||
function* findRootConfig(dirname, envName, caller) {
|
||||
return null;
|
||||
}
|
||||
function* loadConfig(name, dirname, envName, caller) {
|
||||
throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
function* resolveShowConfigPath(dirname) {
|
||||
return null;
|
||||
}
|
||||
const ROOT_CONFIG_FILENAMES = exports.ROOT_CONFIG_FILENAMES = [];
|
||||
function resolvePlugin(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
function resolvePreset(name, dirname) {
|
||||
return null;
|
||||
}
|
||||
function loadPlugin(name, dirname) {
|
||||
throw new Error(`Cannot load plugin ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
function loadPreset(name, dirname) {
|
||||
throw new Error(`Cannot load preset ${name} relative to ${dirname} in a browser`);
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=index-browser.js.map
|
1
node_modules/@babel/core/lib/config/files/index-browser.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/index-browser.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"names":["findConfigUpwards","rootDir","findPackageData","filepath","directories","pkg","isPackage","findRelativeConfig","pkgData","envName","caller","config","ignore","findRootConfig","dirname","loadConfig","name","Error","resolveShowConfigPath","ROOT_CONFIG_FILENAMES","exports","resolvePlugin","resolvePreset","loadPlugin","loadPreset"],"sources":["../../../src/config/files/index-browser.ts"],"sourcesContent":["import type { Handler } from \"gensync\";\n\nimport type {\n ConfigFile,\n IgnoreFile,\n RelativeConfig,\n FilePackageData,\n} from \"./types.ts\";\n\nimport type { CallerMetadata } from \"../validation/options.ts\";\n\nexport type { ConfigFile, IgnoreFile, RelativeConfig, FilePackageData };\n\nexport function findConfigUpwards(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n rootDir: string,\n): string | null {\n return null;\n}\n\n// eslint-disable-next-line require-yield\nexport function* findPackageData(filepath: string): Handler<FilePackageData> {\n return {\n filepath,\n directories: [],\n pkg: null,\n isPackage: false,\n };\n}\n\n// eslint-disable-next-line require-yield\nexport function* findRelativeConfig(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n pkgData: FilePackageData,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n envName: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n caller: CallerMetadata | undefined,\n): Handler<RelativeConfig> {\n return { config: null, ignore: null };\n}\n\n// eslint-disable-next-line require-yield\nexport function* findRootConfig(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n dirname: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n envName: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n caller: CallerMetadata | undefined,\n): Handler<ConfigFile | null> {\n return null;\n}\n\n// eslint-disable-next-line require-yield\nexport function* loadConfig(\n name: string,\n dirname: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n envName: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n caller: CallerMetadata | undefined,\n): Handler<ConfigFile> {\n throw new Error(`Cannot load ${name} relative to ${dirname} in a browser`);\n}\n\n// eslint-disable-next-line require-yield\nexport function* resolveShowConfigPath(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n dirname: string,\n): Handler<string | null> {\n return null;\n}\n\nexport const ROOT_CONFIG_FILENAMES: string[] = [];\n\ntype Resolved =\n | { loader: \"require\"; filepath: string }\n | { loader: \"import\"; filepath: string };\n\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport function resolvePlugin(name: string, dirname: string): Resolved | null {\n return null;\n}\n\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport function resolvePreset(name: string, dirname: string): Resolved | null {\n return null;\n}\n\nexport function loadPlugin(\n name: string,\n dirname: string,\n): Handler<{\n filepath: string;\n value: unknown;\n}> {\n throw new Error(\n `Cannot load plugin ${name} relative to ${dirname} in a browser`,\n );\n}\n\nexport function loadPreset(\n name: string,\n dirname: string,\n): Handler<{\n filepath: string;\n value: unknown;\n}> {\n throw new Error(\n `Cannot load preset ${name} relative to ${dirname} in a browser`,\n );\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAaO,SAASA,iBAAiBA,CAE/BC,OAAe,EACA;EACf,OAAO,IAAI;AACb;AAGO,UAAUC,eAAeA,CAACC,QAAgB,EAA4B;EAC3E,OAAO;IACLA,QAAQ;IACRC,WAAW,EAAE,EAAE;IACfC,GAAG,EAAE,IAAI;IACTC,SAAS,EAAE;EACb,CAAC;AACH;AAGO,UAAUC,kBAAkBA,CAEjCC,OAAwB,EAExBC,OAAe,EAEfC,MAAkC,EACT;EACzB,OAAO;IAAEC,MAAM,EAAE,IAAI;IAAEC,MAAM,EAAE;EAAK,CAAC;AACvC;AAGO,UAAUC,cAAcA,CAE7BC,OAAe,EAEfL,OAAe,EAEfC,MAAkC,EACN;EAC5B,OAAO,IAAI;AACb;AAGO,UAAUK,UAAUA,CACzBC,IAAY,EACZF,OAAe,EAEfL,OAAe,EAEfC,MAAkC,EACb;EACrB,MAAM,IAAIO,KAAK,CAAC,eAAeD,IAAI,gBAAgBF,OAAO,eAAe,CAAC;AAC5E;AAGO,UAAUI,qBAAqBA,CAEpCJ,OAAe,EACS;EACxB,OAAO,IAAI;AACb;AAEO,MAAMK,qBAA+B,GAAAC,OAAA,CAAAD,qBAAA,GAAG,EAAE;AAO1C,SAASE,aAAaA,CAACL,IAAY,EAAEF,OAAe,EAAmB;EAC5E,OAAO,IAAI;AACb;AAGO,SAASQ,aAAaA,CAACN,IAAY,EAAEF,OAAe,EAAmB;EAC5E,OAAO,IAAI;AACb;AAEO,SAASS,UAAUA,CACxBP,IAAY,EACZF,OAAe,EAId;EACD,MAAM,IAAIG,KAAK,CACb,sBAAsBD,IAAI,gBAAgBF,OAAO,eACnD,CAAC;AACH;AAEO,SAASU,UAAUA,CACxBR,IAAY,EACZF,OAAe,EAId;EACD,MAAM,IAAIG,KAAK,CACb,sBAAsBD,IAAI,gBAAgBF,OAAO,eACnD,CAAC;AACH;AAAC","ignoreList":[]}
|
78
node_modules/@babel/core/lib/config/files/index.js
generated
vendored
Normal file
78
node_modules/@babel/core/lib/config/files/index.js
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "ROOT_CONFIG_FILENAMES", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.ROOT_CONFIG_FILENAMES;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findConfigUpwards", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findConfigUpwards;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findPackageData", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _package.findPackageData;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRelativeConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRelativeConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "findRootConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.findRootConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadConfig", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.loadConfig;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.loadPlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "loadPreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.loadPreset;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePlugin", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.resolvePlugin;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolvePreset", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _plugins.resolvePreset;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "resolveShowConfigPath", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return _configuration.resolveShowConfigPath;
|
||||
}
|
||||
});
|
||||
var _package = require("./package.js");
|
||||
var _configuration = require("./configuration.js");
|
||||
var _plugins = require("./plugins.js");
|
||||
({});
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=index.js.map
|
1
node_modules/@babel/core/lib/config/files/index.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/index.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"names":["_package","require","_configuration","_plugins"],"sources":["../../../src/config/files/index.ts"],"sourcesContent":["type indexBrowserType = typeof import(\"./index-browser\");\ntype indexType = typeof import(\"./index\");\n\n// Kind of gross, but essentially asserting that the exports of this module are the same as the\n// exports of index-browser, since this file may be replaced at bundle time with index-browser.\n({}) as any as indexBrowserType as indexType;\n\nexport { findPackageData } from \"./package.ts\";\n\nexport {\n findConfigUpwards,\n findRelativeConfig,\n findRootConfig,\n loadConfig,\n resolveShowConfigPath,\n ROOT_CONFIG_FILENAMES,\n} from \"./configuration.ts\";\nexport type {\n ConfigFile,\n IgnoreFile,\n RelativeConfig,\n FilePackageData,\n} from \"./types.ts\";\nexport {\n loadPlugin,\n loadPreset,\n resolvePlugin,\n resolvePreset,\n} from \"./plugins.ts\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAOA,IAAAA,QAAA,GAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAcA,IAAAE,QAAA,GAAAF,OAAA;AAlBA,CAAC,CAAC,CAAC;AAA0C","ignoreList":[]}
|
206
node_modules/@babel/core/lib/config/files/module-types.js
generated
vendored
Normal file
206
node_modules/@babel/core/lib/config/files/module-types.js
generated
vendored
Normal file
|
@ -0,0 +1,206 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = loadCodeDefault;
|
||||
exports.supportsESM = void 0;
|
||||
var _async = require("../../gensync-utils/async.js");
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _url() {
|
||||
const data = require("url");
|
||||
_url = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
require("module");
|
||||
function _semver() {
|
||||
const data = require("semver");
|
||||
_semver = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _rewriteStackTrace = require("../../errors/rewrite-stack-trace.js");
|
||||
var _configError = require("../../errors/config-error.js");
|
||||
var _transformFile = require("../../transform-file.js");
|
||||
function asyncGeneratorStep(n, t, e, r, o, a, c) { try { var i = n[a](c), u = i.value; } catch (n) { return void e(n); } i.done ? t(u) : Promise.resolve(u).then(r, o); }
|
||||
function _asyncToGenerator(n) { return function () { var t = this, e = arguments; return new Promise(function (r, o) { var a = n.apply(t, e); function _next(n) { asyncGeneratorStep(a, r, o, _next, _throw, "next", n); } function _throw(n) { asyncGeneratorStep(a, r, o, _next, _throw, "throw", n); } _next(void 0); }); }; }
|
||||
const debug = _debug()("babel:config:loading:files:module-types");
|
||||
{
|
||||
try {
|
||||
var import_ = require("./import.cjs");
|
||||
} catch (_unused) {}
|
||||
}
|
||||
const supportsESM = exports.supportsESM = _semver().satisfies(process.versions.node, "^12.17 || >=13.2");
|
||||
const LOADING_CJS_FILES = new Set();
|
||||
function loadCjsDefault(filepath) {
|
||||
if (LOADING_CJS_FILES.has(filepath)) {
|
||||
debug("Auto-ignoring usage of config %o.", filepath);
|
||||
return {};
|
||||
}
|
||||
let module;
|
||||
try {
|
||||
LOADING_CJS_FILES.add(filepath);
|
||||
module = (0, _rewriteStackTrace.endHiddenCallStack)(require)(filepath);
|
||||
} finally {
|
||||
LOADING_CJS_FILES.delete(filepath);
|
||||
}
|
||||
{
|
||||
return module != null && (module.__esModule || module[Symbol.toStringTag] === "Module") ? module.default || (arguments[1] ? module : undefined) : module;
|
||||
}
|
||||
}
|
||||
const loadMjsFromPath = (0, _rewriteStackTrace.endHiddenCallStack)(function () {
|
||||
var _loadMjsFromPath = _asyncToGenerator(function* (filepath) {
|
||||
const url = (0, _url().pathToFileURL)(filepath).toString() + "?import";
|
||||
{
|
||||
if (!import_) {
|
||||
throw new _configError.default("Internal error: Native ECMAScript modules aren't supported by this platform.\n", filepath);
|
||||
}
|
||||
return yield import_(url);
|
||||
}
|
||||
});
|
||||
function loadMjsFromPath(_x) {
|
||||
return _loadMjsFromPath.apply(this, arguments);
|
||||
}
|
||||
return loadMjsFromPath;
|
||||
}());
|
||||
const SUPPORTED_EXTENSIONS = {
|
||||
".js": "unknown",
|
||||
".mjs": "esm",
|
||||
".cjs": "cjs",
|
||||
".ts": "unknown",
|
||||
".mts": "esm",
|
||||
".cts": "cjs"
|
||||
};
|
||||
const asyncModules = new Set();
|
||||
function* loadCodeDefault(filepath, loader, esmError, tlaError) {
|
||||
let async;
|
||||
const ext = _path().extname(filepath);
|
||||
const isTS = ext === ".ts" || ext === ".cts" || ext === ".mts";
|
||||
const type = SUPPORTED_EXTENSIONS[hasOwnProperty.call(SUPPORTED_EXTENSIONS, ext) ? ext : ".js"];
|
||||
const pattern = `${loader} ${type}`;
|
||||
switch (pattern) {
|
||||
case "require cjs":
|
||||
case "auto cjs":
|
||||
if (isTS) {
|
||||
return ensureTsSupport(filepath, ext, () => loadCjsDefault(filepath));
|
||||
} else {
|
||||
return loadCjsDefault(filepath, arguments[2]);
|
||||
}
|
||||
case "auto unknown":
|
||||
case "require unknown":
|
||||
case "require esm":
|
||||
try {
|
||||
if (isTS) {
|
||||
return ensureTsSupport(filepath, ext, () => loadCjsDefault(filepath));
|
||||
} else {
|
||||
return loadCjsDefault(filepath, arguments[2]);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.code === "ERR_REQUIRE_ASYNC_MODULE" || e.code === "ERR_REQUIRE_CYCLE_MODULE" && asyncModules.has(filepath)) {
|
||||
asyncModules.add(filepath);
|
||||
if (!(async != null ? async : async = yield* (0, _async.isAsync)())) {
|
||||
throw new _configError.default(tlaError, filepath);
|
||||
}
|
||||
} else if (e.code === "ERR_REQUIRE_ESM" || type === "esm") {} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
case "auto esm":
|
||||
if (async != null ? async : async = yield* (0, _async.isAsync)()) {
|
||||
const promise = isTS ? ensureTsSupport(filepath, ext, () => loadMjsFromPath(filepath)) : loadMjsFromPath(filepath);
|
||||
return (yield* (0, _async.waitFor)(promise)).default;
|
||||
}
|
||||
throw new _configError.default(esmError, filepath);
|
||||
default:
|
||||
throw new Error("Internal Babel error: unreachable code.");
|
||||
}
|
||||
}
|
||||
function ensureTsSupport(filepath, ext, callback) {
|
||||
if (require.extensions[".ts"] || require.extensions[".cts"] || require.extensions[".mts"]) {
|
||||
return callback();
|
||||
}
|
||||
if (ext !== ".cts") {
|
||||
throw new _configError.default(`\
|
||||
You are using a ${ext} config file, but Babel only supports transpiling .cts configs. Either:
|
||||
- Use a .cts config file
|
||||
- Update to Node.js 23.6.0, which has native TypeScript support
|
||||
- Install ts-node to transpile ${ext} files on the fly\
|
||||
`, filepath);
|
||||
}
|
||||
const opts = {
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
sourceType: "unambiguous",
|
||||
sourceMaps: "inline",
|
||||
sourceFileName: _path().basename(filepath),
|
||||
presets: [[getTSPreset(filepath), Object.assign({
|
||||
onlyRemoveTypeImports: true,
|
||||
optimizeConstEnums: true
|
||||
}, {
|
||||
allowDeclareFields: true
|
||||
})]]
|
||||
};
|
||||
let handler = function (m, filename) {
|
||||
if (handler && filename.endsWith(".cts")) {
|
||||
try {
|
||||
return m._compile((0, _transformFile.transformFileSync)(filename, Object.assign({}, opts, {
|
||||
filename
|
||||
})).code, filename);
|
||||
} catch (error) {
|
||||
const packageJson = require("@babel/preset-typescript/package.json");
|
||||
if (_semver().lt(packageJson.version, "7.21.4")) {
|
||||
console.error("`.cts` configuration file failed to load, please try to update `@babel/preset-typescript`.");
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return require.extensions[".js"](m, filename);
|
||||
};
|
||||
require.extensions[ext] = handler;
|
||||
try {
|
||||
return callback();
|
||||
} finally {
|
||||
if (require.extensions[ext] === handler) delete require.extensions[ext];
|
||||
handler = undefined;
|
||||
}
|
||||
}
|
||||
function getTSPreset(filepath) {
|
||||
try {
|
||||
return require("@babel/preset-typescript");
|
||||
} catch (error) {
|
||||
if (error.code !== "MODULE_NOT_FOUND") throw error;
|
||||
let message = "You appear to be using a .cts file as Babel configuration, but the `@babel/preset-typescript` package was not found: please install it!";
|
||||
{
|
||||
if (process.versions.pnp) {
|
||||
message += `
|
||||
If you are using Yarn Plug'n'Play, you may also need to add the following configuration to your .yarnrc.yml file:
|
||||
|
||||
packageExtensions:
|
||||
\t"@babel/core@*":
|
||||
\t\tpeerDependencies:
|
||||
\t\t\t"@babel/preset-typescript": "*"
|
||||
`;
|
||||
}
|
||||
}
|
||||
throw new _configError.default(message, filepath);
|
||||
}
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=module-types.js.map
|
1
node_modules/@babel/core/lib/config/files/module-types.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/module-types.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
61
node_modules/@babel/core/lib/config/files/package.js
generated
vendored
Normal file
61
node_modules/@babel/core/lib/config/files/package.js
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.findPackageData = findPackageData;
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _utils = require("./utils.js");
|
||||
var _configError = require("../../errors/config-error.js");
|
||||
const PACKAGE_FILENAME = "package.json";
|
||||
const readConfigPackage = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||
let options;
|
||||
try {
|
||||
options = JSON.parse(content);
|
||||
} catch (err) {
|
||||
throw new _configError.default(`Error while parsing JSON - ${err.message}`, filepath);
|
||||
}
|
||||
if (!options) throw new Error(`${filepath}: No config detected`);
|
||||
if (typeof options !== "object") {
|
||||
throw new _configError.default(`Config returned typeof ${typeof options}`, filepath);
|
||||
}
|
||||
if (Array.isArray(options)) {
|
||||
throw new _configError.default(`Expected config object but found array`, filepath);
|
||||
}
|
||||
return {
|
||||
filepath,
|
||||
dirname: _path().dirname(filepath),
|
||||
options
|
||||
};
|
||||
});
|
||||
function* findPackageData(filepath) {
|
||||
let pkg = null;
|
||||
const directories = [];
|
||||
let isPackage = true;
|
||||
let dirname = _path().dirname(filepath);
|
||||
while (!pkg && _path().basename(dirname) !== "node_modules") {
|
||||
directories.push(dirname);
|
||||
pkg = yield* readConfigPackage(_path().join(dirname, PACKAGE_FILENAME));
|
||||
const nextLoc = _path().dirname(dirname);
|
||||
if (dirname === nextLoc) {
|
||||
isPackage = false;
|
||||
break;
|
||||
}
|
||||
dirname = nextLoc;
|
||||
}
|
||||
return {
|
||||
filepath,
|
||||
directories,
|
||||
pkg,
|
||||
isPackage
|
||||
};
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=package.js.map
|
1
node_modules/@babel/core/lib/config/files/package.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/package.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"names":["_path","data","require","_utils","_configError","PACKAGE_FILENAME","readConfigPackage","makeStaticFileCache","filepath","content","options","JSON","parse","err","ConfigError","message","Error","Array","isArray","dirname","path","findPackageData","pkg","directories","isPackage","basename","push","join","nextLoc"],"sources":["../../../src/config/files/package.ts"],"sourcesContent":["import path from \"path\";\nimport type { Handler } from \"gensync\";\nimport { makeStaticFileCache } from \"./utils.ts\";\n\nimport type { ConfigFile, FilePackageData } from \"./types.ts\";\n\nimport ConfigError from \"../../errors/config-error.ts\";\n\nconst PACKAGE_FILENAME = \"package.json\";\n\nconst readConfigPackage = makeStaticFileCache(\n (filepath, content): ConfigFile => {\n let options;\n try {\n options = JSON.parse(content) as unknown;\n } catch (err) {\n throw new ConfigError(\n `Error while parsing JSON - ${err.message}`,\n filepath,\n );\n }\n\n if (!options) throw new Error(`${filepath}: No config detected`);\n\n if (typeof options !== \"object\") {\n throw new ConfigError(\n `Config returned typeof ${typeof options}`,\n filepath,\n );\n }\n if (Array.isArray(options)) {\n throw new ConfigError(`Expected config object but found array`, filepath);\n }\n\n return {\n filepath,\n dirname: path.dirname(filepath),\n options,\n };\n },\n);\n\n/**\n * Find metadata about the package that this file is inside of. Resolution\n * of Babel's config requires general package information to decide when to\n * search for .babelrc files\n */\nexport function* findPackageData(filepath: string): Handler<FilePackageData> {\n let pkg = null;\n const directories = [];\n let isPackage = true;\n\n let dirname = path.dirname(filepath);\n while (!pkg && path.basename(dirname) !== \"node_modules\") {\n directories.push(dirname);\n\n pkg = yield* readConfigPackage(path.join(dirname, PACKAGE_FILENAME));\n\n const nextLoc = path.dirname(dirname);\n if (dirname === nextLoc) {\n isPackage = false;\n break;\n }\n dirname = nextLoc;\n }\n\n return { filepath, directories, pkg, isPackage };\n}\n"],"mappings":";;;;;;AAAA,SAAAA,MAAA;EAAA,MAAAC,IAAA,GAAAC,OAAA;EAAAF,KAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAEA,IAAAE,MAAA,GAAAD,OAAA;AAIA,IAAAE,YAAA,GAAAF,OAAA;AAEA,MAAMG,gBAAgB,GAAG,cAAc;AAEvC,MAAMC,iBAAiB,GAAG,IAAAC,0BAAmB,EAC3C,CAACC,QAAQ,EAAEC,OAAO,KAAiB;EACjC,IAAIC,OAAO;EACX,IAAI;IACFA,OAAO,GAAGC,IAAI,CAACC,KAAK,CAACH,OAAO,CAAY;EAC1C,CAAC,CAAC,OAAOI,GAAG,EAAE;IACZ,MAAM,IAAIC,oBAAW,CACnB,8BAA8BD,GAAG,CAACE,OAAO,EAAE,EAC3CP,QACF,CAAC;EACH;EAEA,IAAI,CAACE,OAAO,EAAE,MAAM,IAAIM,KAAK,CAAC,GAAGR,QAAQ,sBAAsB,CAAC;EAEhE,IAAI,OAAOE,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,IAAII,oBAAW,CACnB,0BAA0B,OAAOJ,OAAO,EAAE,EAC1CF,QACF,CAAC;EACH;EACA,IAAIS,KAAK,CAACC,OAAO,CAACR,OAAO,CAAC,EAAE;IAC1B,MAAM,IAAII,oBAAW,CAAC,wCAAwC,EAAEN,QAAQ,CAAC;EAC3E;EAEA,OAAO;IACLA,QAAQ;IACRW,OAAO,EAAEC,MAAGA,CAAC,CAACD,OAAO,CAACX,QAAQ,CAAC;IAC/BE;EACF,CAAC;AACH,CACF,CAAC;AAOM,UAAUW,eAAeA,CAACb,QAAgB,EAA4B;EAC3E,IAAIc,GAAG,GAAG,IAAI;EACd,MAAMC,WAAW,GAAG,EAAE;EACtB,IAAIC,SAAS,GAAG,IAAI;EAEpB,IAAIL,OAAO,GAAGC,MAAGA,CAAC,CAACD,OAAO,CAACX,QAAQ,CAAC;EACpC,OAAO,CAACc,GAAG,IAAIF,MAAGA,CAAC,CAACK,QAAQ,CAACN,OAAO,CAAC,KAAK,cAAc,EAAE;IACxDI,WAAW,CAACG,IAAI,CAACP,OAAO,CAAC;IAEzBG,GAAG,GAAG,OAAOhB,iBAAiB,CAACc,MAAGA,CAAC,CAACO,IAAI,CAACR,OAAO,EAAEd,gBAAgB,CAAC,CAAC;IAEpE,MAAMuB,OAAO,GAAGR,MAAGA,CAAC,CAACD,OAAO,CAACA,OAAO,CAAC;IACrC,IAAIA,OAAO,KAAKS,OAAO,EAAE;MACvBJ,SAAS,GAAG,KAAK;MACjB;IACF;IACAL,OAAO,GAAGS,OAAO;EACnB;EAEA,OAAO;IAAEpB,QAAQ;IAAEe,WAAW;IAAED,GAAG;IAAEE;EAAU,CAAC;AAClD;AAAC","ignoreList":[]}
|
230
node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
Normal file
230
node_modules/@babel/core/lib/config/files/plugins.js
generated
vendored
Normal file
|
@ -0,0 +1,230 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.loadPlugin = loadPlugin;
|
||||
exports.loadPreset = loadPreset;
|
||||
exports.resolvePreset = exports.resolvePlugin = void 0;
|
||||
function _debug() {
|
||||
const data = require("debug");
|
||||
_debug = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _path() {
|
||||
const data = require("path");
|
||||
_path = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _async = require("../../gensync-utils/async.js");
|
||||
var _moduleTypes = require("./module-types.js");
|
||||
function _url() {
|
||||
const data = require("url");
|
||||
_url = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _importMetaResolve = require("../../vendor/import-meta-resolve.js");
|
||||
require("module");
|
||||
function _fs() {
|
||||
const data = require("fs");
|
||||
_fs = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
const debug = _debug()("babel:config:loading:files:plugins");
|
||||
const EXACT_RE = /^module:/;
|
||||
const BABEL_PLUGIN_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-plugin-)/;
|
||||
const BABEL_PRESET_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-preset-)/;
|
||||
const BABEL_PLUGIN_ORG_RE = /^(@babel\/)(?!plugin-|[^/]+\/)/;
|
||||
const BABEL_PRESET_ORG_RE = /^(@babel\/)(?!preset-|[^/]+\/)/;
|
||||
const OTHER_PLUGIN_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-plugin(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_PRESET_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-preset(?:-|\/|$)|[^/]+\/)/;
|
||||
const OTHER_ORG_DEFAULT_RE = /^(@(?!babel$)[^/]+)$/;
|
||||
const resolvePlugin = exports.resolvePlugin = resolveStandardizedName.bind(null, "plugin");
|
||||
const resolvePreset = exports.resolvePreset = resolveStandardizedName.bind(null, "preset");
|
||||
function* loadPlugin(name, dirname) {
|
||||
const {
|
||||
filepath,
|
||||
loader
|
||||
} = resolvePlugin(name, dirname, yield* (0, _async.isAsync)());
|
||||
const value = yield* requireModule("plugin", loader, filepath);
|
||||
debug("Loaded plugin %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
function* loadPreset(name, dirname) {
|
||||
const {
|
||||
filepath,
|
||||
loader
|
||||
} = resolvePreset(name, dirname, yield* (0, _async.isAsync)());
|
||||
const value = yield* requireModule("preset", loader, filepath);
|
||||
debug("Loaded preset %o from %o.", name, dirname);
|
||||
return {
|
||||
filepath,
|
||||
value
|
||||
};
|
||||
}
|
||||
function standardizeName(type, name) {
|
||||
if (_path().isAbsolute(name)) return name;
|
||||
const isPreset = type === "preset";
|
||||
return name.replace(isPreset ? BABEL_PRESET_PREFIX_RE : BABEL_PLUGIN_PREFIX_RE, `babel-${type}-`).replace(isPreset ? BABEL_PRESET_ORG_RE : BABEL_PLUGIN_ORG_RE, `$1${type}-`).replace(isPreset ? OTHER_PRESET_ORG_RE : OTHER_PLUGIN_ORG_RE, `$1babel-${type}-`).replace(OTHER_ORG_DEFAULT_RE, `$1/babel-${type}`).replace(EXACT_RE, "");
|
||||
}
|
||||
function* resolveAlternativesHelper(type, name) {
|
||||
const standardizedName = standardizeName(type, name);
|
||||
const {
|
||||
error,
|
||||
value
|
||||
} = yield standardizedName;
|
||||
if (!error) return value;
|
||||
if (error.code !== "MODULE_NOT_FOUND") throw error;
|
||||
if (standardizedName !== name && !(yield name).error) {
|
||||
error.message += `\n- If you want to resolve "${name}", use "module:${name}"`;
|
||||
}
|
||||
if (!(yield standardizeName(type, "@babel/" + name)).error) {
|
||||
error.message += `\n- Did you mean "@babel/${name}"?`;
|
||||
}
|
||||
const oppositeType = type === "preset" ? "plugin" : "preset";
|
||||
if (!(yield standardizeName(oppositeType, name)).error) {
|
||||
error.message += `\n- Did you accidentally pass a ${oppositeType} as a ${type}?`;
|
||||
}
|
||||
if (type === "plugin") {
|
||||
const transformName = standardizedName.replace("-proposal-", "-transform-");
|
||||
if (transformName !== standardizedName && !(yield transformName).error) {
|
||||
error.message += `\n- Did you mean "${transformName}"?`;
|
||||
}
|
||||
}
|
||||
error.message += `\n
|
||||
Make sure that all the Babel plugins and presets you are using
|
||||
are defined as dependencies or devDependencies in your package.json
|
||||
file. It's possible that the missing plugin is loaded by a preset
|
||||
you are using that forgot to add the plugin to its dependencies: you
|
||||
can workaround this problem by explicitly adding the missing package
|
||||
to your top-level package.json.
|
||||
`;
|
||||
throw error;
|
||||
}
|
||||
function tryRequireResolve(id, dirname) {
|
||||
try {
|
||||
if (dirname) {
|
||||
return {
|
||||
error: null,
|
||||
value: (((v, w) => (v = v.split("."), w = w.split("."), +v[0] > +w[0] || v[0] == w[0] && +v[1] >= +w[1]))(process.versions.node, "8.9") ? require.resolve : (r, {
|
||||
paths: [b]
|
||||
}, M = require("module")) => {
|
||||
let f = M._findPath(r, M._nodeModulePaths(b).concat(b));
|
||||
if (f) return f;
|
||||
f = new Error(`Cannot resolve module '${r}'`);
|
||||
f.code = "MODULE_NOT_FOUND";
|
||||
throw f;
|
||||
})(id, {
|
||||
paths: [dirname]
|
||||
})
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
error: null,
|
||||
value: require.resolve(id)
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
error,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
}
|
||||
function tryImportMetaResolve(id, options) {
|
||||
try {
|
||||
return {
|
||||
error: null,
|
||||
value: (0, _importMetaResolve.resolve)(id, options)
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
error,
|
||||
value: null
|
||||
};
|
||||
}
|
||||
}
|
||||
function resolveStandardizedNameForRequire(type, name, dirname) {
|
||||
const it = resolveAlternativesHelper(type, name);
|
||||
let res = it.next();
|
||||
while (!res.done) {
|
||||
res = it.next(tryRequireResolve(res.value, dirname));
|
||||
}
|
||||
return {
|
||||
loader: "require",
|
||||
filepath: res.value
|
||||
};
|
||||
}
|
||||
function resolveStandardizedNameForImport(type, name, dirname) {
|
||||
const parentUrl = (0, _url().pathToFileURL)(_path().join(dirname, "./babel-virtual-resolve-base.js")).href;
|
||||
const it = resolveAlternativesHelper(type, name);
|
||||
let res = it.next();
|
||||
while (!res.done) {
|
||||
res = it.next(tryImportMetaResolve(res.value, parentUrl));
|
||||
}
|
||||
return {
|
||||
loader: "auto",
|
||||
filepath: (0, _url().fileURLToPath)(res.value)
|
||||
};
|
||||
}
|
||||
function resolveStandardizedName(type, name, dirname, allowAsync) {
|
||||
if (!_moduleTypes.supportsESM || !allowAsync) {
|
||||
return resolveStandardizedNameForRequire(type, name, dirname);
|
||||
}
|
||||
try {
|
||||
const resolved = resolveStandardizedNameForImport(type, name, dirname);
|
||||
if (!(0, _fs().existsSync)(resolved.filepath)) {
|
||||
throw Object.assign(new Error(`Could not resolve "${name}" in file ${dirname}.`), {
|
||||
type: "MODULE_NOT_FOUND"
|
||||
});
|
||||
}
|
||||
return resolved;
|
||||
} catch (e) {
|
||||
try {
|
||||
return resolveStandardizedNameForRequire(type, name, dirname);
|
||||
} catch (e2) {
|
||||
if (e.type === "MODULE_NOT_FOUND") throw e;
|
||||
if (e2.type === "MODULE_NOT_FOUND") throw e2;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
var LOADING_MODULES = new Set();
|
||||
}
|
||||
function* requireModule(type, loader, name) {
|
||||
{
|
||||
if (!(yield* (0, _async.isAsync)()) && LOADING_MODULES.has(name)) {
|
||||
throw new Error(`Reentrant ${type} detected trying to load "${name}". This module is not ignored ` + "and is trying to load itself while compiling itself, leading to a dependency cycle. " + 'We recommend adding it to your "ignore" list in your babelrc, or to a .babelignore.');
|
||||
}
|
||||
}
|
||||
try {
|
||||
{
|
||||
LOADING_MODULES.add(name);
|
||||
}
|
||||
{
|
||||
return yield* (0, _moduleTypes.default)(name, loader, `You appear to be using a native ECMAScript module ${type}, ` + "which is only supported when running Babel asynchronously " + "or when using the Node.js `--experimental-require-module` flag.", `You appear to be using a ${type} that contains top-level await, ` + "which is only supported when running Babel asynchronously.", true);
|
||||
}
|
||||
} catch (err) {
|
||||
err.message = `[BABEL]: ${err.message} (While processing: ${name})`;
|
||||
throw err;
|
||||
} finally {
|
||||
{
|
||||
LOADING_MODULES.delete(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=plugins.js.map
|
1
node_modules/@babel/core/lib/config/files/plugins.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/plugins.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/@babel/core/lib/config/files/types.js
generated
vendored
Normal file
3
node_modules/@babel/core/lib/config/files/types.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=types.js.map
|
1
node_modules/@babel/core/lib/config/files/types.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/types.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"names":[],"sources":["../../../src/config/files/types.ts"],"sourcesContent":["import type { InputOptions } from \"../index.ts\";\n\nexport type ConfigFile = {\n filepath: string;\n dirname: string;\n options: InputOptions & { babel?: unknown };\n};\n\nexport type IgnoreFile = {\n filepath: string;\n dirname: string;\n ignore: Array<RegExp>;\n};\n\nexport type RelativeConfig = {\n // The actual config, either from package.json#babel, .babelrc, or\n // .babelrc.js, if there was one.\n config: ConfigFile | null;\n // The .babelignore, if there was one.\n ignore: IgnoreFile | null;\n};\n\nexport type FilePackageData = {\n // The file in the package.\n filepath: string;\n // Any ancestor directories of the file that are within the package.\n directories: Array<string>;\n // The contents of the package.json. May not be found if the package just\n // terminated at a node_modules folder without finding one.\n pkg: ConfigFile | null;\n // True if a package.json or node_modules folder was found while traversing\n // the directory structure.\n isPackage: boolean;\n};\n"],"mappings":"","ignoreList":[]}
|
36
node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
Normal file
36
node_modules/@babel/core/lib/config/files/utils.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeStaticFileCache = makeStaticFileCache;
|
||||
var _caching = require("../caching.js");
|
||||
var fs = require("../../gensync-utils/fs.js");
|
||||
function _fs2() {
|
||||
const data = require("fs");
|
||||
_fs2 = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function makeStaticFileCache(fn) {
|
||||
return (0, _caching.makeStrongCache)(function* (filepath, cache) {
|
||||
const cached = cache.invalidate(() => fileMtime(filepath));
|
||||
if (cached === null) {
|
||||
return null;
|
||||
}
|
||||
return fn(filepath, yield* fs.readFile(filepath, "utf8"));
|
||||
});
|
||||
}
|
||||
function fileMtime(filepath) {
|
||||
if (!_fs2().existsSync(filepath)) return null;
|
||||
try {
|
||||
return +_fs2().statSync(filepath).mtime;
|
||||
} catch (e) {
|
||||
if (e.code !== "ENOENT" && e.code !== "ENOTDIR") throw e;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=utils.js.map
|
1
node_modules/@babel/core/lib/config/files/utils.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/files/utils.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{"version":3,"names":["_caching","require","fs","_fs2","data","makeStaticFileCache","fn","makeStrongCache","filepath","cache","cached","invalidate","fileMtime","readFile","nodeFs","existsSync","statSync","mtime","e","code"],"sources":["../../../src/config/files/utils.ts"],"sourcesContent":["import type { Handler } from \"gensync\";\n\nimport { makeStrongCache } from \"../caching.ts\";\nimport type { CacheConfigurator } from \"../caching.ts\";\nimport * as fs from \"../../gensync-utils/fs.ts\";\nimport nodeFs from \"fs\";\n\nexport function makeStaticFileCache<T>(\n fn: (filepath: string, contents: string) => T,\n) {\n return makeStrongCache(function* (\n filepath: string,\n cache: CacheConfigurator<void>,\n ): Handler<null | T> {\n const cached = cache.invalidate(() => fileMtime(filepath));\n\n if (cached === null) {\n return null;\n }\n\n return fn(filepath, yield* fs.readFile(filepath, \"utf8\"));\n });\n}\n\nfunction fileMtime(filepath: string): number | null {\n if (!nodeFs.existsSync(filepath)) return null;\n\n try {\n return +nodeFs.statSync(filepath).mtime;\n } catch (e) {\n if (e.code !== \"ENOENT\" && e.code !== \"ENOTDIR\") throw e;\n }\n\n return null;\n}\n"],"mappings":";;;;;;AAEA,IAAAA,QAAA,GAAAC,OAAA;AAEA,IAAAC,EAAA,GAAAD,OAAA;AACA,SAAAE,KAAA;EAAA,MAAAC,IAAA,GAAAH,OAAA;EAAAE,IAAA,YAAAA,CAAA;IAAA,OAAAC,IAAA;EAAA;EAAA,OAAAA,IAAA;AAAA;AAEO,SAASC,mBAAmBA,CACjCC,EAA6C,EAC7C;EACA,OAAO,IAAAC,wBAAe,EAAC,WACrBC,QAAgB,EAChBC,KAA8B,EACX;IACnB,MAAMC,MAAM,GAAGD,KAAK,CAACE,UAAU,CAAC,MAAMC,SAAS,CAACJ,QAAQ,CAAC,CAAC;IAE1D,IAAIE,MAAM,KAAK,IAAI,EAAE;MACnB,OAAO,IAAI;IACb;IAEA,OAAOJ,EAAE,CAACE,QAAQ,EAAE,OAAON,EAAE,CAACW,QAAQ,CAACL,QAAQ,EAAE,MAAM,CAAC,CAAC;EAC3D,CAAC,CAAC;AACJ;AAEA,SAASI,SAASA,CAACJ,QAAgB,EAAiB;EAClD,IAAI,CAACM,KAAKA,CAAC,CAACC,UAAU,CAACP,QAAQ,CAAC,EAAE,OAAO,IAAI;EAE7C,IAAI;IACF,OAAO,CAACM,KAAKA,CAAC,CAACE,QAAQ,CAACR,QAAQ,CAAC,CAACS,KAAK;EACzC,CAAC,CAAC,OAAOC,CAAC,EAAE;IACV,IAAIA,CAAC,CAACC,IAAI,KAAK,QAAQ,IAAID,CAAC,CAACC,IAAI,KAAK,SAAS,EAAE,MAAMD,CAAC;EAC1D;EAEA,OAAO,IAAI;AACb;AAAC","ignoreList":[]}
|
312
node_modules/@babel/core/lib/config/full.js
generated
vendored
Normal file
312
node_modules/@babel/core/lib/config/full.js
generated
vendored
Normal file
|
@ -0,0 +1,312 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
function _gensync() {
|
||||
const data = require("gensync");
|
||||
_gensync = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _async = require("../gensync-utils/async.js");
|
||||
var _util = require("./util.js");
|
||||
var context = require("../index.js");
|
||||
var _plugin = require("./plugin.js");
|
||||
var _item = require("./item.js");
|
||||
var _configChain = require("./config-chain.js");
|
||||
var _deepArray = require("./helpers/deep-array.js");
|
||||
function _traverse() {
|
||||
const data = require("@babel/traverse");
|
||||
_traverse = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _caching = require("./caching.js");
|
||||
var _options = require("./validation/options.js");
|
||||
var _plugins = require("./validation/plugins.js");
|
||||
var _configApi = require("./helpers/config-api.js");
|
||||
var _partial = require("./partial.js");
|
||||
var _configError = require("../errors/config-error.js");
|
||||
var _default = exports.default = _gensync()(function* loadFullConfig(inputOpts) {
|
||||
var _opts$assumptions;
|
||||
const result = yield* (0, _partial.default)(inputOpts);
|
||||
if (!result) {
|
||||
return null;
|
||||
}
|
||||
const {
|
||||
options,
|
||||
context,
|
||||
fileHandling
|
||||
} = result;
|
||||
if (fileHandling === "ignored") {
|
||||
return null;
|
||||
}
|
||||
const optionDefaults = {};
|
||||
const {
|
||||
plugins,
|
||||
presets
|
||||
} = options;
|
||||
if (!plugins || !presets) {
|
||||
throw new Error("Assertion failure - plugins and presets exist");
|
||||
}
|
||||
const presetContext = Object.assign({}, context, {
|
||||
targets: options.targets
|
||||
});
|
||||
const toDescriptor = item => {
|
||||
const desc = (0, _item.getItemDescriptor)(item);
|
||||
if (!desc) {
|
||||
throw new Error("Assertion failure - must be config item");
|
||||
}
|
||||
return desc;
|
||||
};
|
||||
const presetsDescriptors = presets.map(toDescriptor);
|
||||
const initialPluginsDescriptors = plugins.map(toDescriptor);
|
||||
const pluginDescriptorsByPass = [[]];
|
||||
const passes = [];
|
||||
const externalDependencies = [];
|
||||
const ignored = yield* enhanceError(context, function* recursePresetDescriptors(rawPresets, pluginDescriptorsPass) {
|
||||
const presets = [];
|
||||
for (let i = 0; i < rawPresets.length; i++) {
|
||||
const descriptor = rawPresets[i];
|
||||
if (descriptor.options !== false) {
|
||||
try {
|
||||
var preset = yield* loadPresetDescriptor(descriptor, presetContext);
|
||||
} catch (e) {
|
||||
if (e.code === "BABEL_UNKNOWN_OPTION") {
|
||||
(0, _options.checkNoUnwrappedItemOptionPairs)(rawPresets, i, "preset", e);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
externalDependencies.push(preset.externalDependencies);
|
||||
if (descriptor.ownPass) {
|
||||
presets.push({
|
||||
preset: preset.chain,
|
||||
pass: []
|
||||
});
|
||||
} else {
|
||||
presets.unshift({
|
||||
preset: preset.chain,
|
||||
pass: pluginDescriptorsPass
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if (presets.length > 0) {
|
||||
pluginDescriptorsByPass.splice(1, 0, ...presets.map(o => o.pass).filter(p => p !== pluginDescriptorsPass));
|
||||
for (const {
|
||||
preset,
|
||||
pass
|
||||
} of presets) {
|
||||
if (!preset) return true;
|
||||
pass.push(...preset.plugins);
|
||||
const ignored = yield* recursePresetDescriptors(preset.presets, pass);
|
||||
if (ignored) return true;
|
||||
preset.options.forEach(opts => {
|
||||
(0, _util.mergeOptions)(optionDefaults, opts);
|
||||
});
|
||||
}
|
||||
}
|
||||
})(presetsDescriptors, pluginDescriptorsByPass[0]);
|
||||
if (ignored) return null;
|
||||
const opts = optionDefaults;
|
||||
(0, _util.mergeOptions)(opts, options);
|
||||
const pluginContext = Object.assign({}, presetContext, {
|
||||
assumptions: (_opts$assumptions = opts.assumptions) != null ? _opts$assumptions : {}
|
||||
});
|
||||
yield* enhanceError(context, function* loadPluginDescriptors() {
|
||||
pluginDescriptorsByPass[0].unshift(...initialPluginsDescriptors);
|
||||
for (const descs of pluginDescriptorsByPass) {
|
||||
const pass = [];
|
||||
passes.push(pass);
|
||||
for (let i = 0; i < descs.length; i++) {
|
||||
const descriptor = descs[i];
|
||||
if (descriptor.options !== false) {
|
||||
try {
|
||||
var plugin = yield* loadPluginDescriptor(descriptor, pluginContext);
|
||||
} catch (e) {
|
||||
if (e.code === "BABEL_UNKNOWN_PLUGIN_PROPERTY") {
|
||||
(0, _options.checkNoUnwrappedItemOptionPairs)(descs, i, "plugin", e);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
pass.push(plugin);
|
||||
externalDependencies.push(plugin.externalDependencies);
|
||||
}
|
||||
}
|
||||
}
|
||||
})();
|
||||
opts.plugins = passes[0];
|
||||
opts.presets = passes.slice(1).filter(plugins => plugins.length > 0).map(plugins => ({
|
||||
plugins
|
||||
}));
|
||||
opts.passPerPreset = opts.presets.length > 0;
|
||||
return {
|
||||
options: opts,
|
||||
passes: passes,
|
||||
externalDependencies: (0, _deepArray.finalize)(externalDependencies)
|
||||
};
|
||||
});
|
||||
function enhanceError(context, fn) {
|
||||
return function* (arg1, arg2) {
|
||||
try {
|
||||
return yield* fn(arg1, arg2);
|
||||
} catch (e) {
|
||||
if (!/^\[BABEL\]/.test(e.message)) {
|
||||
var _context$filename;
|
||||
e.message = `[BABEL] ${(_context$filename = context.filename) != null ? _context$filename : "unknown file"}: ${e.message}`;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
}
|
||||
const makeDescriptorLoader = apiFactory => (0, _caching.makeWeakCache)(function* ({
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
alias
|
||||
}, cache) {
|
||||
if (options === false) throw new Error("Assertion failure");
|
||||
options = options || {};
|
||||
const externalDependencies = [];
|
||||
let item = value;
|
||||
if (typeof value === "function") {
|
||||
const factory = (0, _async.maybeAsync)(value, `You appear to be using an async plugin/preset, but Babel has been called synchronously`);
|
||||
const api = Object.assign({}, context, apiFactory(cache, externalDependencies));
|
||||
try {
|
||||
item = yield* factory(api, options, dirname);
|
||||
} catch (e) {
|
||||
if (alias) {
|
||||
e.message += ` (While processing: ${JSON.stringify(alias)})`;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (!item || typeof item !== "object") {
|
||||
throw new Error("Plugin/Preset did not return an object.");
|
||||
}
|
||||
if ((0, _async.isThenable)(item)) {
|
||||
yield* [];
|
||||
throw new Error(`You appear to be using a promise as a plugin, ` + `which your current version of Babel does not support. ` + `If you're using a published plugin, ` + `you may need to upgrade your @babel/core version. ` + `As an alternative, you can prefix the promise with "await". ` + `(While processing: ${JSON.stringify(alias)})`);
|
||||
}
|
||||
if (externalDependencies.length > 0 && (!cache.configured() || cache.mode() === "forever")) {
|
||||
let error = `A plugin/preset has external untracked dependencies ` + `(${externalDependencies[0]}), but the cache `;
|
||||
if (!cache.configured()) {
|
||||
error += `has not been configured to be invalidated when the external dependencies change. `;
|
||||
} else {
|
||||
error += ` has been configured to never be invalidated. `;
|
||||
}
|
||||
error += `Plugins/presets should configure their cache to be invalidated when the external ` + `dependencies change, for example using \`api.cache.invalidate(() => ` + `statSync(filepath).mtimeMs)\` or \`api.cache.never()\`\n` + `(While processing: ${JSON.stringify(alias)})`;
|
||||
throw new Error(error);
|
||||
}
|
||||
return {
|
||||
value: item,
|
||||
options,
|
||||
dirname,
|
||||
alias,
|
||||
externalDependencies: (0, _deepArray.finalize)(externalDependencies)
|
||||
};
|
||||
});
|
||||
const pluginDescriptorLoader = makeDescriptorLoader(_configApi.makePluginAPI);
|
||||
const presetDescriptorLoader = makeDescriptorLoader(_configApi.makePresetAPI);
|
||||
const instantiatePlugin = (0, _caching.makeWeakCache)(function* ({
|
||||
value,
|
||||
options,
|
||||
dirname,
|
||||
alias,
|
||||
externalDependencies
|
||||
}, cache) {
|
||||
const pluginObj = (0, _plugins.validatePluginObject)(value);
|
||||
const plugin = Object.assign({}, pluginObj);
|
||||
if (plugin.visitor) {
|
||||
plugin.visitor = _traverse().default.explode(Object.assign({}, plugin.visitor));
|
||||
}
|
||||
if (plugin.inherits) {
|
||||
const inheritsDescriptor = {
|
||||
name: undefined,
|
||||
alias: `${alias}$inherits`,
|
||||
value: plugin.inherits,
|
||||
options,
|
||||
dirname
|
||||
};
|
||||
const inherits = yield* (0, _async.forwardAsync)(loadPluginDescriptor, run => {
|
||||
return cache.invalidate(data => run(inheritsDescriptor, data));
|
||||
});
|
||||
plugin.pre = chainMaybeAsync(inherits.pre, plugin.pre);
|
||||
plugin.post = chainMaybeAsync(inherits.post, plugin.post);
|
||||
plugin.manipulateOptions = chainMaybeAsync(inherits.manipulateOptions, plugin.manipulateOptions);
|
||||
plugin.visitor = _traverse().default.visitors.merge([inherits.visitor || {}, plugin.visitor || {}]);
|
||||
if (inherits.externalDependencies.length > 0) {
|
||||
if (externalDependencies.length === 0) {
|
||||
externalDependencies = inherits.externalDependencies;
|
||||
} else {
|
||||
externalDependencies = (0, _deepArray.finalize)([externalDependencies, inherits.externalDependencies]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new _plugin.default(plugin, options, alias, externalDependencies);
|
||||
});
|
||||
function* loadPluginDescriptor(descriptor, context) {
|
||||
if (descriptor.value instanceof _plugin.default) {
|
||||
if (descriptor.options) {
|
||||
throw new Error("Passed options to an existing Plugin instance will not work.");
|
||||
}
|
||||
return descriptor.value;
|
||||
}
|
||||
return yield* instantiatePlugin(yield* pluginDescriptorLoader(descriptor, context), context);
|
||||
}
|
||||
const needsFilename = val => val && typeof val !== "function";
|
||||
const validateIfOptionNeedsFilename = (options, descriptor) => {
|
||||
if (needsFilename(options.test) || needsFilename(options.include) || needsFilename(options.exclude)) {
|
||||
const formattedPresetName = descriptor.name ? `"${descriptor.name}"` : "/* your preset */";
|
||||
throw new _configError.default([`Preset ${formattedPresetName} requires a filename to be set when babel is called directly,`, `\`\`\``, `babel.transformSync(code, { filename: 'file.ts', presets: [${formattedPresetName}] });`, `\`\`\``, `See https://babeljs.io/docs/en/options#filename for more information.`].join("\n"));
|
||||
}
|
||||
};
|
||||
const validatePreset = (preset, context, descriptor) => {
|
||||
if (!context.filename) {
|
||||
var _options$overrides;
|
||||
const {
|
||||
options
|
||||
} = preset;
|
||||
validateIfOptionNeedsFilename(options, descriptor);
|
||||
(_options$overrides = options.overrides) == null || _options$overrides.forEach(overrideOptions => validateIfOptionNeedsFilename(overrideOptions, descriptor));
|
||||
}
|
||||
};
|
||||
const instantiatePreset = (0, _caching.makeWeakCacheSync)(({
|
||||
value,
|
||||
dirname,
|
||||
alias,
|
||||
externalDependencies
|
||||
}) => {
|
||||
return {
|
||||
options: (0, _options.validate)("preset", value),
|
||||
alias,
|
||||
dirname,
|
||||
externalDependencies
|
||||
};
|
||||
});
|
||||
function* loadPresetDescriptor(descriptor, context) {
|
||||
const preset = instantiatePreset(yield* presetDescriptorLoader(descriptor, context));
|
||||
validatePreset(preset, context, descriptor);
|
||||
return {
|
||||
chain: yield* (0, _configChain.buildPresetChain)(preset, context),
|
||||
externalDependencies: preset.externalDependencies
|
||||
};
|
||||
}
|
||||
function chainMaybeAsync(a, b) {
|
||||
if (!a) return b;
|
||||
if (!b) return a;
|
||||
return function (...args) {
|
||||
const res = a.apply(this, args);
|
||||
if (res && typeof res.then === "function") {
|
||||
return res.then(() => b.apply(this, args));
|
||||
}
|
||||
return b.apply(this, args);
|
||||
};
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=full.js.map
|
1
node_modules/@babel/core/lib/config/full.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/full.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
84
node_modules/@babel/core/lib/config/helpers/config-api.js
generated
vendored
Normal file
84
node_modules/@babel/core/lib/config/helpers/config-api.js
generated
vendored
Normal file
|
@ -0,0 +1,84 @@
|
|||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.makeConfigAPI = makeConfigAPI;
|
||||
exports.makePluginAPI = makePluginAPI;
|
||||
exports.makePresetAPI = makePresetAPI;
|
||||
function _semver() {
|
||||
const data = require("semver");
|
||||
_semver = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _index = require("../../index.js");
|
||||
var _caching = require("../caching.js");
|
||||
function makeConfigAPI(cache) {
|
||||
const env = value => cache.using(data => {
|
||||
if (value === undefined) return data.envName;
|
||||
if (typeof value === "function") {
|
||||
return (0, _caching.assertSimpleType)(value(data.envName));
|
||||
}
|
||||
return (Array.isArray(value) ? value : [value]).some(entry => {
|
||||
if (typeof entry !== "string") {
|
||||
throw new Error("Unexpected non-string value");
|
||||
}
|
||||
return entry === data.envName;
|
||||
});
|
||||
});
|
||||
const caller = cb => cache.using(data => (0, _caching.assertSimpleType)(cb(data.caller)));
|
||||
return {
|
||||
version: _index.version,
|
||||
cache: cache.simple(),
|
||||
env,
|
||||
async: () => false,
|
||||
caller,
|
||||
assertVersion
|
||||
};
|
||||
}
|
||||
function makePresetAPI(cache, externalDependencies) {
|
||||
const targets = () => JSON.parse(cache.using(data => JSON.stringify(data.targets)));
|
||||
const addExternalDependency = ref => {
|
||||
externalDependencies.push(ref);
|
||||
};
|
||||
return Object.assign({}, makeConfigAPI(cache), {
|
||||
targets,
|
||||
addExternalDependency
|
||||
});
|
||||
}
|
||||
function makePluginAPI(cache, externalDependencies) {
|
||||
const assumption = name => cache.using(data => data.assumptions[name]);
|
||||
return Object.assign({}, makePresetAPI(cache, externalDependencies), {
|
||||
assumption
|
||||
});
|
||||
}
|
||||
function assertVersion(range) {
|
||||
if (typeof range === "number") {
|
||||
if (!Number.isInteger(range)) {
|
||||
throw new Error("Expected string or integer value.");
|
||||
}
|
||||
range = `^${range}.0.0-0`;
|
||||
}
|
||||
if (typeof range !== "string") {
|
||||
throw new Error("Expected string or integer value.");
|
||||
}
|
||||
if (range === "*" || _semver().satisfies(_index.version, range)) return;
|
||||
const limit = Error.stackTraceLimit;
|
||||
if (typeof limit === "number" && limit < 25) {
|
||||
Error.stackTraceLimit = 25;
|
||||
}
|
||||
const err = new Error(`Requires Babel "${range}", but was loaded with "${_index.version}". ` + `If you are sure you have a compatible version of @babel/core, ` + `it is likely that something in your build process is loading the ` + `wrong version. Inspect the stack trace of this error to look for ` + `the first entry that doesn't mention "@babel/core" or "babel-core" ` + `to see what is calling Babel.`);
|
||||
if (typeof limit === "number") {
|
||||
Error.stackTraceLimit = limit;
|
||||
}
|
||||
throw Object.assign(err, {
|
||||
code: "BABEL_VERSION_UNSUPPORTED",
|
||||
version: _index.version,
|
||||
range
|
||||
});
|
||||
}
|
||||
0 && 0;
|
||||
|
||||
//# sourceMappingURL=config-api.js.map
|
1
node_modules/@babel/core/lib/config/helpers/config-api.js.map
generated
vendored
Normal file
1
node_modules/@babel/core/lib/config/helpers/config-api.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue