Compare commits
20 Commits
1d4606db27
...
nodejs
Author | SHA1 | Date | |
---|---|---|---|
|
91702fe33a | ||
|
b6fb117ec3 | ||
|
37bed7697b | ||
|
d1c6aeb6cd | ||
|
0f9e793a29 | ||
|
80b9b2e048 | ||
|
f1950ca6bb | ||
|
4d4bcd4368 | ||
|
7c3e347e8c | ||
|
c6dec7838a | ||
|
decfc44592 | ||
|
7641f45abf | ||
|
b700ef7423 | ||
|
2f2ee1100f | ||
|
cc421f40c6 | ||
|
0d2619b9c0 | ||
|
d873640075 | ||
|
cd0b04b488 | ||
|
2747fd5b76 | ||
|
a8b8883b11 |
78
Title.html
Normal file
78
Title.html
Normal file
@@ -0,0 +1,78 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" class="h-full bg-white">
|
||||
<head>
|
||||
<link rel="stylesheet" href="css/output.css">
|
||||
<title>Title</title>
|
||||
</head>
|
||||
<body class="h-full">
|
||||
<div class="flex min-h-full flex-col justify-center px-6 py-12 lg:px-8">
|
||||
<div class="sm:mx-auto sm:w-full sm:max-w-sm">
|
||||
<img class="mx-auto h-10 w-auto" src="https://tailwindui.com/img/logos/mark.svg?color=indigo&shade=600" alt="Your Company">
|
||||
<h2 class="mt-10 text-center text-2xl font-bold leading-9 tracking-tight text-gray-900">Sign in to your account</h2>
|
||||
</div>
|
||||
|
||||
<div class="mt-10 sm:mx-auto sm:w-full sm:max-w-sm">
|
||||
<form class="space-y-6" action="/__login" method="POST"> <!--Change to php-->
|
||||
<div>
|
||||
<label for="email" class="block text-sm font-medium leading-6 text-gray-900">Email address</label>
|
||||
<div class="mt-2">
|
||||
<input id="email" name="email" type="email" autocomplete="email" required class="block w-full rounded-md border-0 py-1.5 text-gray-900 shadow-sm ring-1 ring-inset ring-gray-300 placeholder:text-gray-400 focus:ring-2 focus:ring-inset focus:ring-indigo-600 sm:text-sm sm:leading-6">
|
||||
<div id="email-error" class="hidden mt-2 text-red-800 text-xs">
|
||||
invalid Email
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div class="flex items-center justify-between">
|
||||
<label for="password" class="block text-sm font-medium leading-6 text-gray-900">Password</label>
|
||||
<div class="text-sm">
|
||||
<a href="#" class="font-semibold text-indigo-600 hover:text-indigo-500">Forgot password?</a>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-2 relative block bg-sky-800">
|
||||
<input id="password" name="password" type="password" autocomplete="off" required class="block w-11/12 rounded-md border-0 py-1.5 text-gray-900 shadow-sm ring-1 ring-inset ring-gray-300 placeholder:text-gray-400 focus:ring-2 focus:ring-inset focus:ring-indigo-600 sm:text-sm sm:leading-6">
|
||||
|
||||
<span class="absolute inset-y-0 right-0 flex items-center pr-2 bg-red-500">
|
||||
<svg id="pswd-invisible" class="w-6 h-6 m-auto" fill="none" stroke="currentColor" viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" color="black"
|
||||
d="M15 12a3 3 0 11-6 0 3 3 0 016 0z"></path>
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
|
||||
d="M2.458 12C3.732 7.943 7.523 5 12 5c4.478 0 8.268 2.943 9.542 7-1.274 4.057-5.064 7-9.542 7-4.477 0-8.268-2.943-9.542-7z">
|
||||
</path>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="absolute inset-y-0 right-0 flex items-center pr-2 ">
|
||||
<svg id="pswd-visible" class="w-6 h-6 m-auto" fill="none" stroke="currentColor" viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
|
||||
d="M13.875 18.825A10.05 10.05 0 0112 19c-4.478 0-8.268-2.943-9.543-7a9.97 9.97 0 011.563-3.029m5.858.908a3 3 0 114.243 4.243M9.878 9.878l4.242 4.242M9.88 9.88l-3.29-3.29m7.532 7.532l3.29 3.29M3 3l3.59 3.59m0 0A9.953 9.953 0 0112 5c4.478 0 8.268 2.943 9.543 7a10.025 10.025 0 01-4.132 5.411m0 0L21 21">
|
||||
</path>
|
||||
</svg>
|
||||
</span>
|
||||
</div>
|
||||
<p id="letter" class="hidden text-red-800 text-xs">
|
||||
A <b>lowercase</b> letter
|
||||
</p>
|
||||
<p id="capital" class="hidden text-red-800 text-xs">
|
||||
A <b>capital (uppercase)</b> letter
|
||||
</p>
|
||||
<p id="number" class="hidden text-red-800 text-xs">
|
||||
A <b>number</b>
|
||||
</p>
|
||||
<p id="symbol" class="hidden text-red-800 text-xs">
|
||||
A <b>symbol</b>
|
||||
</p>
|
||||
<p id="length" class="hidden text-red-800 text-xs">
|
||||
Minimum <b>10 characters</b>
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<button type="submit" class="flex w-full justify-center rounded-md bg-indigo-600 px-3 py-1.5 text-sm font-semibold leading-6 text-white shadow-sm hover:bg-indigo-500 focus-visible:outline focus-visible:outline-2 focus-visible:outline-offset-2 focus-visible:outline-indigo-600">Sign in</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<script src="./src/login.js"></script>
|
||||
</body>
|
||||
</html>
|
67
index.js
67
index.js
@@ -1,67 +0,0 @@
|
||||
const submit = document.getElementById("button");
|
||||
submit.addEventListener('click', validate);
|
||||
function validate(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const url = document.getElementById("URL");
|
||||
const glink = document.getElementById("GLink");
|
||||
// if (!url) {
|
||||
// /* Flag */
|
||||
// }
|
||||
let valid = true;
|
||||
const domainExp = new RegExp("http(s)*:\\/\\/[a-zA-Z0-9\\-]+(\\.[a-zA-Z0-9\\-]+)+");
|
||||
const filepathExp = new RegExp("[a-zA-Z]+");
|
||||
let count = 0;
|
||||
let index = -1;
|
||||
let domain = "";
|
||||
let filepath = "";
|
||||
for (let i=0; i < url.value.length; i++) {
|
||||
if (url.value.charAt(i) == '/') {
|
||||
count++;
|
||||
}
|
||||
if (count == 3) {
|
||||
index = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (count >= 3) {
|
||||
domain = url.value.substring(0, index);
|
||||
if (index == url.value.length - 1) {
|
||||
filepath = url.value.charAt(index);
|
||||
} else {
|
||||
filepath = url.value.substring(index, url.value.length - 1);
|
||||
}
|
||||
alert("Domain is " + domain + " filepath is " + filepath);
|
||||
} else {
|
||||
domain = url.value;
|
||||
}
|
||||
console.log(domain);
|
||||
if (domain.match(domainExp)) /** and is available? */{
|
||||
const error = document.getElementById("error");
|
||||
if (error.classList.contains("visible")) {
|
||||
error.classList.remove("visible");
|
||||
}
|
||||
if (url.classList.contains("invalid")) {
|
||||
url.classList.remove("invalid");
|
||||
}
|
||||
url.classList.add("valid");
|
||||
error.setAttribute('aria-hidden', true);
|
||||
error.setAttribute('aria-invalid', false);
|
||||
console.log("Valid");
|
||||
return valid;
|
||||
} else {
|
||||
|
||||
/*flag*/
|
||||
const error = document.getElementById("error");
|
||||
error.classList.add("visible");
|
||||
|
||||
//error.classList.add("hidden");
|
||||
if (url.classList.contains("valid")) {
|
||||
url.classList.remove("valid");
|
||||
}
|
||||
url.classList.add("invalid");
|
||||
error.setAttribute('aria-hidden', false);
|
||||
error.setAttribute('aria-invalid', true);
|
||||
}
|
||||
|
||||
}
|
BIN
node_modules/.DS_Store
generated
vendored
Normal file
BIN
node_modules/.DS_Store
generated
vendored
Normal file
Binary file not shown.
1
node_modules/.bin/color-support
generated
vendored
Symbolic link
1
node_modules/.bin/color-support
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../color-support/bin.js
|
1
node_modules/.bin/cssesc
generated
vendored
Symbolic link
1
node_modules/.bin/cssesc
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cssesc/bin/cssesc
|
1
node_modules/.bin/ejs
generated
vendored
Symbolic link
1
node_modules/.bin/ejs
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../ejs/bin/cli.js
|
1
node_modules/.bin/jake
generated
vendored
Symbolic link
1
node_modules/.bin/jake
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jake/bin/cli.js
|
1
node_modules/.bin/jiti
generated
vendored
Symbolic link
1
node_modules/.bin/jiti
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../jiti/bin/jiti.js
|
1
node_modules/.bin/mime
generated
vendored
Symbolic link
1
node_modules/.bin/mime
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../mime/cli.js
|
1
node_modules/.bin/mini-svg-data-uri
generated
vendored
Symbolic link
1
node_modules/.bin/mini-svg-data-uri
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../mini-svg-data-uri/cli.js
|
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
node_modules/.bin/mkdirp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../mkdirp/bin/cmd.js
|
1
node_modules/.bin/nanoid
generated
vendored
Symbolic link
1
node_modules/.bin/nanoid
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../nanoid/bin/nanoid.cjs
|
1
node_modules/.bin/node-pre-gyp
generated
vendored
Symbolic link
1
node_modules/.bin/node-pre-gyp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../@mapbox/node-pre-gyp/bin/node-pre-gyp
|
1
node_modules/.bin/nopt
generated
vendored
Symbolic link
1
node_modules/.bin/nopt
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../nopt/bin/nopt.js
|
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
1
node_modules/.bin/resolve
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../resolve/bin/resolve
|
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
1
node_modules/.bin/rimraf
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../rimraf/bin.js
|
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../semver/bin/semver.js
|
1
node_modules/.bin/sucrase
generated
vendored
Symbolic link
1
node_modules/.bin/sucrase
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sucrase/bin/sucrase
|
1
node_modules/.bin/sucrase-node
generated
vendored
Symbolic link
1
node_modules/.bin/sucrase-node
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../sucrase/bin/sucrase-node
|
1
node_modules/.bin/tailwind
generated
vendored
Symbolic link
1
node_modules/.bin/tailwind
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../tailwindcss/lib/cli.js
|
1
node_modules/.bin/tailwindcss
generated
vendored
Symbolic link
1
node_modules/.bin/tailwindcss
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../tailwindcss/lib/cli.js
|
2315
node_modules/.package-lock.json
generated
vendored
2315
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
128
node_modules/@alloc/quick-lru/index.d.ts
generated
vendored
Normal file
128
node_modules/@alloc/quick-lru/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
declare namespace QuickLRU {
|
||||
interface Options<KeyType, ValueType> {
|
||||
/**
|
||||
The maximum number of milliseconds an item should remain in the cache.
|
||||
|
||||
@default Infinity
|
||||
|
||||
By default, `maxAge` will be `Infinity`, which means that items will never expire.
|
||||
Lazy expiration upon the next write or read call.
|
||||
|
||||
Individual expiration of an item can be specified by the `set(key, value, maxAge)` method.
|
||||
*/
|
||||
readonly maxAge?: number;
|
||||
|
||||
/**
|
||||
The maximum number of items before evicting the least recently used items.
|
||||
*/
|
||||
readonly maxSize: number;
|
||||
|
||||
/**
|
||||
Called right before an item is evicted from the cache.
|
||||
|
||||
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
|
||||
*/
|
||||
onEviction?: (key: KeyType, value: ValueType) => void;
|
||||
}
|
||||
}
|
||||
|
||||
declare class QuickLRU<KeyType, ValueType>
|
||||
implements Iterable<[KeyType, ValueType]> {
|
||||
/**
|
||||
The stored item count.
|
||||
*/
|
||||
readonly size: number;
|
||||
|
||||
/**
|
||||
Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29).
|
||||
|
||||
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
|
||||
|
||||
@example
|
||||
```
|
||||
import QuickLRU = require('quick-lru');
|
||||
|
||||
const lru = new QuickLRU({maxSize: 1000});
|
||||
|
||||
lru.set('🦄', '🌈');
|
||||
|
||||
lru.has('🦄');
|
||||
//=> true
|
||||
|
||||
lru.get('🦄');
|
||||
//=> '🌈'
|
||||
```
|
||||
*/
|
||||
constructor(options: QuickLRU.Options<KeyType, ValueType>);
|
||||
|
||||
[Symbol.iterator](): IterableIterator<[KeyType, ValueType]>;
|
||||
|
||||
/**
|
||||
Set an item. Returns the instance.
|
||||
|
||||
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified in the constructor, otherwise the item will never expire.
|
||||
|
||||
@returns The list instance.
|
||||
*/
|
||||
set(key: KeyType, value: ValueType, options?: {maxAge?: number}): this;
|
||||
|
||||
/**
|
||||
Get an item.
|
||||
|
||||
@returns The stored item or `undefined`.
|
||||
*/
|
||||
get(key: KeyType): ValueType | undefined;
|
||||
|
||||
/**
|
||||
Check if an item exists.
|
||||
*/
|
||||
has(key: KeyType): boolean;
|
||||
|
||||
/**
|
||||
Get an item without marking it as recently used.
|
||||
|
||||
@returns The stored item or `undefined`.
|
||||
*/
|
||||
peek(key: KeyType): ValueType | undefined;
|
||||
|
||||
/**
|
||||
Delete an item.
|
||||
|
||||
@returns `true` if the item is removed or `false` if the item doesn't exist.
|
||||
*/
|
||||
delete(key: KeyType): boolean;
|
||||
|
||||
/**
|
||||
Delete all items.
|
||||
*/
|
||||
clear(): void;
|
||||
|
||||
/**
|
||||
Update the `maxSize` in-place, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
|
||||
|
||||
Useful for on-the-fly tuning of cache sizes in live systems.
|
||||
*/
|
||||
resize(maxSize: number): void;
|
||||
|
||||
/**
|
||||
Iterable for all the keys.
|
||||
*/
|
||||
keys(): IterableIterator<KeyType>;
|
||||
|
||||
/**
|
||||
Iterable for all the values.
|
||||
*/
|
||||
values(): IterableIterator<ValueType>;
|
||||
|
||||
/**
|
||||
Iterable for all entries, starting with the oldest (ascending in recency).
|
||||
*/
|
||||
entriesAscending(): IterableIterator<[KeyType, ValueType]>;
|
||||
|
||||
/**
|
||||
Iterable for all entries, starting with the newest (descending in recency).
|
||||
*/
|
||||
entriesDescending(): IterableIterator<[KeyType, ValueType]>;
|
||||
}
|
||||
|
||||
export = QuickLRU;
|
263
node_modules/@alloc/quick-lru/index.js
generated
vendored
Normal file
263
node_modules/@alloc/quick-lru/index.js
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
'use strict';
|
||||
|
||||
class QuickLRU {
|
||||
constructor(options = {}) {
|
||||
if (!(options.maxSize && options.maxSize > 0)) {
|
||||
throw new TypeError('`maxSize` must be a number greater than 0');
|
||||
}
|
||||
|
||||
if (typeof options.maxAge === 'number' && options.maxAge === 0) {
|
||||
throw new TypeError('`maxAge` must be a number greater than 0');
|
||||
}
|
||||
|
||||
this.maxSize = options.maxSize;
|
||||
this.maxAge = options.maxAge || Infinity;
|
||||
this.onEviction = options.onEviction;
|
||||
this.cache = new Map();
|
||||
this.oldCache = new Map();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
_emitEvictions(cache) {
|
||||
if (typeof this.onEviction !== 'function') {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const [key, item] of cache) {
|
||||
this.onEviction(key, item.value);
|
||||
}
|
||||
}
|
||||
|
||||
_deleteIfExpired(key, item) {
|
||||
if (typeof item.expiry === 'number' && item.expiry <= Date.now()) {
|
||||
if (typeof this.onEviction === 'function') {
|
||||
this.onEviction(key, item.value);
|
||||
}
|
||||
|
||||
return this.delete(key);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
_getOrDeleteIfExpired(key, item) {
|
||||
const deleted = this._deleteIfExpired(key, item);
|
||||
if (deleted === false) {
|
||||
return item.value;
|
||||
}
|
||||
}
|
||||
|
||||
_getItemValue(key, item) {
|
||||
return item.expiry ? this._getOrDeleteIfExpired(key, item) : item.value;
|
||||
}
|
||||
|
||||
_peek(key, cache) {
|
||||
const item = cache.get(key);
|
||||
|
||||
return this._getItemValue(key, item);
|
||||
}
|
||||
|
||||
_set(key, value) {
|
||||
this.cache.set(key, value);
|
||||
this._size++;
|
||||
|
||||
if (this._size >= this.maxSize) {
|
||||
this._size = 0;
|
||||
this._emitEvictions(this.oldCache);
|
||||
this.oldCache = this.cache;
|
||||
this.cache = new Map();
|
||||
}
|
||||
}
|
||||
|
||||
_moveToRecent(key, item) {
|
||||
this.oldCache.delete(key);
|
||||
this._set(key, item);
|
||||
}
|
||||
|
||||
* _entriesAscending() {
|
||||
for (const item of this.oldCache) {
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of this.cache) {
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get(key) {
|
||||
if (this.cache.has(key)) {
|
||||
const item = this.cache.get(key);
|
||||
|
||||
return this._getItemValue(key, item);
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
const item = this.oldCache.get(key);
|
||||
if (this._deleteIfExpired(key, item) === false) {
|
||||
this._moveToRecent(key, item);
|
||||
return item.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set(key, value, {maxAge = this.maxAge === Infinity ? undefined : Date.now() + this.maxAge} = {}) {
|
||||
if (this.cache.has(key)) {
|
||||
this.cache.set(key, {
|
||||
value,
|
||||
maxAge
|
||||
});
|
||||
} else {
|
||||
this._set(key, {value, expiry: maxAge});
|
||||
}
|
||||
}
|
||||
|
||||
has(key) {
|
||||
if (this.cache.has(key)) {
|
||||
return !this._deleteIfExpired(key, this.cache.get(key));
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
return !this._deleteIfExpired(key, this.oldCache.get(key));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
peek(key) {
|
||||
if (this.cache.has(key)) {
|
||||
return this._peek(key, this.cache);
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
return this._peek(key, this.oldCache);
|
||||
}
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
const deleted = this.cache.delete(key);
|
||||
if (deleted) {
|
||||
this._size--;
|
||||
}
|
||||
|
||||
return this.oldCache.delete(key) || deleted;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.cache.clear();
|
||||
this.oldCache.clear();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
resize(newSize) {
|
||||
if (!(newSize && newSize > 0)) {
|
||||
throw new TypeError('`maxSize` must be a number greater than 0');
|
||||
}
|
||||
|
||||
const items = [...this._entriesAscending()];
|
||||
const removeCount = items.length - newSize;
|
||||
if (removeCount < 0) {
|
||||
this.cache = new Map(items);
|
||||
this.oldCache = new Map();
|
||||
this._size = items.length;
|
||||
} else {
|
||||
if (removeCount > 0) {
|
||||
this._emitEvictions(items.slice(0, removeCount));
|
||||
}
|
||||
|
||||
this.oldCache = new Map(items.slice(removeCount));
|
||||
this.cache = new Map();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
this.maxSize = newSize;
|
||||
}
|
||||
|
||||
* keys() {
|
||||
for (const [key] of this) {
|
||||
yield key;
|
||||
}
|
||||
}
|
||||
|
||||
* values() {
|
||||
for (const [, value] of this) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
|
||||
* [Symbol.iterator]() {
|
||||
for (const item of this.cache) {
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of this.oldCache) {
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
* entriesDescending() {
|
||||
let items = [...this.cache];
|
||||
for (let i = items.length - 1; i >= 0; --i) {
|
||||
const item = items[i];
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
items = [...this.oldCache];
|
||||
for (let i = items.length - 1; i >= 0; --i) {
|
||||
const item = items[i];
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
* entriesAscending() {
|
||||
for (const [key, value] of this._entriesAscending()) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
get size() {
|
||||
if (!this._size) {
|
||||
return this.oldCache.size;
|
||||
}
|
||||
|
||||
let oldCacheSize = 0;
|
||||
for (const key of this.oldCache.keys()) {
|
||||
if (!this.cache.has(key)) {
|
||||
oldCacheSize++;
|
||||
}
|
||||
}
|
||||
|
||||
return Math.min(this._size + oldCacheSize, this.maxSize);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QuickLRU;
|
9
node_modules/@alloc/quick-lru/license
generated
vendored
Normal file
9
node_modules/@alloc/quick-lru/license
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
43
node_modules/@alloc/quick-lru/package.json
generated
vendored
Normal file
43
node_modules/@alloc/quick-lru/package.json
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "@alloc/quick-lru",
|
||||
"version": "5.2.0",
|
||||
"description": "Simple “Least Recently Used” (LRU) cache",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/quick-lru",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && nyc ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"lru",
|
||||
"quick",
|
||||
"cache",
|
||||
"caching",
|
||||
"least",
|
||||
"recently",
|
||||
"used",
|
||||
"fast",
|
||||
"map",
|
||||
"hash",
|
||||
"buffer"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^2.0.0",
|
||||
"coveralls": "^3.0.3",
|
||||
"nyc": "^15.0.0",
|
||||
"tsd": "^0.11.0",
|
||||
"xo": "^0.26.0"
|
||||
}
|
||||
}
|
139
node_modules/@alloc/quick-lru/readme.md
generated
vendored
Normal file
139
node_modules/@alloc/quick-lru/readme.md
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
# quick-lru [](https://travis-ci.org/sindresorhus/quick-lru) [](https://coveralls.io/github/sindresorhus/quick-lru?branch=master)
|
||||
|
||||
> Simple [“Least Recently Used” (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29)
|
||||
|
||||
Useful when you need to cache something and limit memory usage.
|
||||
|
||||
Inspired by the [`hashlru` algorithm](https://github.com/dominictarr/hashlru#algorithm), but instead uses [`Map`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map) to support keys of any type, not just strings, and values can be `undefined`.
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install quick-lru
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const QuickLRU = require('quick-lru');
|
||||
|
||||
const lru = new QuickLRU({maxSize: 1000});
|
||||
|
||||
lru.set('🦄', '🌈');
|
||||
|
||||
lru.has('🦄');
|
||||
//=> true
|
||||
|
||||
lru.get('🦄');
|
||||
//=> '🌈'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### new QuickLRU(options?)
|
||||
|
||||
Returns a new instance.
|
||||
|
||||
### options
|
||||
|
||||
Type: `object`
|
||||
|
||||
#### maxSize
|
||||
|
||||
*Required*\
|
||||
Type: `number`
|
||||
|
||||
The maximum number of items before evicting the least recently used items.
|
||||
|
||||
#### maxAge
|
||||
|
||||
Type: `number`\
|
||||
Default: `Infinity`
|
||||
|
||||
The maximum number of milliseconds an item should remain in cache.
|
||||
By default maxAge will be Infinity, which means that items will never expire.
|
||||
|
||||
Lazy expiration happens upon the next `write` or `read` call.
|
||||
|
||||
Individual expiration of an item can be specified by the `set(key, value, options)` method.
|
||||
|
||||
#### onEviction
|
||||
|
||||
*Optional*\
|
||||
Type: `(key, value) => void`
|
||||
|
||||
Called right before an item is evicted from the cache.
|
||||
|
||||
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
|
||||
|
||||
### Instance
|
||||
|
||||
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
|
||||
|
||||
Both `key` and `value` can be of any type.
|
||||
|
||||
#### .set(key, value, options?)
|
||||
|
||||
Set an item. Returns the instance.
|
||||
|
||||
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified on the constructor, otherwise the item will never expire.
|
||||
|
||||
#### .get(key)
|
||||
|
||||
Get an item.
|
||||
|
||||
#### .has(key)
|
||||
|
||||
Check if an item exists.
|
||||
|
||||
#### .peek(key)
|
||||
|
||||
Get an item without marking it as recently used.
|
||||
|
||||
#### .delete(key)
|
||||
|
||||
Delete an item.
|
||||
|
||||
Returns `true` if the item is removed or `false` if the item doesn't exist.
|
||||
|
||||
#### .clear()
|
||||
|
||||
Delete all items.
|
||||
|
||||
#### .resize(maxSize)
|
||||
|
||||
Update the `maxSize`, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
|
||||
|
||||
Useful for on-the-fly tuning of cache sizes in live systems.
|
||||
|
||||
#### .keys()
|
||||
|
||||
Iterable for all the keys.
|
||||
|
||||
#### .values()
|
||||
|
||||
Iterable for all the values.
|
||||
|
||||
#### .entriesAscending()
|
||||
|
||||
Iterable for all entries, starting with the oldest (ascending in recency).
|
||||
|
||||
#### .entriesDescending()
|
||||
|
||||
Iterable for all entries, starting with the newest (descending in recency).
|
||||
|
||||
#### .size
|
||||
|
||||
The stored item count.
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-quick-lru?utm_source=npm-quick-lru&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
@@ -0,0 +1,227 @@
|
||||
# @jridgewell/gen-mapping
|
||||
|
||||
> Generate source maps
|
||||
|
||||
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
||||
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
||||
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
||||
provides the same `addMapping` and `setSourceContent` API.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/gen-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping({
|
||||
file: 'output.js',
|
||||
sourceRoot: 'https://example.com/',
|
||||
});
|
||||
|
||||
setSourceContent(map, 'input.js', `function foo() {}`);
|
||||
|
||||
addMapping(map, {
|
||||
// Lines start at line 1, columns at column 0.
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
addMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 9 },
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
assert.deepEqual(toDecodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: [
|
||||
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
||||
],
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: 'AAAA,SAASA',
|
||||
});
|
||||
```
|
||||
|
||||
### Smaller Sourcemaps
|
||||
|
||||
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
||||
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
||||
intelligently determine if this marking adds useful information. If not, the marking will be
|
||||
skipped.
|
||||
|
||||
```typescript
|
||||
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping();
|
||||
|
||||
// Adding a sourceless marking at the beginning of a line isn't useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// Adding a new source marking is useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// But adding another marking pointing to the exact same original location isn't, even if the
|
||||
// generated column changed.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
names: [],
|
||||
sources: ['input.js'],
|
||||
sourcesContent: [null],
|
||||
mappings: 'AAAA',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 5852872 bytes
|
||||
gen-mapping: addMapping 7716042 bytes
|
||||
source-map-js 6143250 bytes
|
||||
source-map-0.6.1 6124102 bytes
|
||||
source-map-0.8.0 6121173 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
||||
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
||||
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
||||
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
||||
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
||||
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
||||
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
||||
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
||||
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 37578063 bytes
|
||||
gen-mapping: addMapping 37212897 bytes
|
||||
source-map-js 47638527 bytes
|
||||
source-map-0.6.1 47690503 bytes
|
||||
source-map-0.8.0 47470188 bytes
|
||||
Smallest memory usage is gen-mapping: addMapping
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
||||
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
||||
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
||||
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
||||
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
||||
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
||||
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
||||
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
||||
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 416247 bytes
|
||||
gen-mapping: addMapping 419824 bytes
|
||||
source-map-js 1024619 bytes
|
||||
source-map-0.6.1 1146004 bytes
|
||||
source-map-0.8.0 1113250 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
||||
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
||||
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
||||
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
||||
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
||||
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
||||
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
||||
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 975096 bytes
|
||||
gen-mapping: addMapping 1102981 bytes
|
||||
source-map-js 2918836 bytes
|
||||
source-map-0.6.1 2885435 bytes
|
||||
source-map-0.8.0 2874336 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
||||
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
||||
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
||||
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
||||
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
||||
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
||||
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
||||
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
||||
[trace-mapping]: https://github.com/jridgewell/trace-mapping
|
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
@@ -0,0 +1,230 @@
|
||||
import { SetArray, put } from '@jridgewell/set-array';
|
||||
import { encode } from '@jridgewell/sourcemap-codec';
|
||||
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
const NO_NAME = -1;
|
||||
/**
|
||||
* A low-level API to associate a generated position with an original source position. Line and
|
||||
* column here are 0-based, unlike `addMapping`.
|
||||
*/
|
||||
let addSegment;
|
||||
/**
|
||||
* A high-level API to associate a generated position with an original source position. Line is
|
||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||
*/
|
||||
let addMapping;
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
let maybeAddSegment;
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
let maybeAddMapping;
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
let setSourceContent;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let toDecodedMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let toEncodedMap;
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
let fromMap;
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
let allMappings;
|
||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||
let addSegmentInternal;
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
class GenMapping {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new SetArray();
|
||||
this._sources = new SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
addMapping = (map, mapping) => {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
};
|
||||
maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
setSourceContent = (map, source, content) => {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent } = map;
|
||||
sourcesContent[put(sources, source)] = content;
|
||||
};
|
||||
toDecodedMap = (map) => {
|
||||
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: file || undefined,
|
||||
names: names.array,
|
||||
sourceRoot: sourceRoot || undefined,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
};
|
||||
toEncodedMap = (map) => {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
||||
};
|
||||
allMappings = (map) => {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = map;
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = undefined;
|
||||
let original = undefined;
|
||||
let name = undefined;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5)
|
||||
name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
};
|
||||
fromMap = (input) => {
|
||||
const map = new TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(gen._names, map.names);
|
||||
putAll(gen._sources, map.sources);
|
||||
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
gen._mappings = decodedMappings(map);
|
||||
return gen;
|
||||
};
|
||||
// Internal helpers
|
||||
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||
const line = getLine(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index))
|
||||
return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
const sourcesIndex = put(sources, source);
|
||||
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length)
|
||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(line, index, name
|
||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||
};
|
||||
})();
|
||||
function getLine(mappings, index) {
|
||||
for (let i = mappings.length; i <= index; i++) {
|
||||
mappings[i] = [];
|
||||
}
|
||||
return mappings[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN])
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0)
|
||||
break;
|
||||
}
|
||||
if (len < length)
|
||||
mappings.length = len;
|
||||
}
|
||||
function putAll(strarr, array) {
|
||||
for (let i = 0; i < array.length; i++)
|
||||
put(strarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||
// doesn't generate any useful information.
|
||||
if (index === 0)
|
||||
return true;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||
// a sourceless position, which is useful.
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
// A source/named segment at the start of a line gives position at that genColumn
|
||||
if (index === 0)
|
||||
return false;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||
if (prev.length === 1)
|
||||
return false;
|
||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||
// provide any new position information.
|
||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||
sourceLine === prev[SOURCE_LINE] &&
|
||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||
}
|
||||
const s = source;
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content);
|
||||
}
|
||||
|
||||
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setSourceContent, toDecodedMap, toEncodedMap };
|
||||
//# sourceMappingURL=gen-mapping.mjs.map
|
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
236
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
236
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
@@ -0,0 +1,236 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
|
||||
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
const NO_NAME = -1;
|
||||
/**
|
||||
* A low-level API to associate a generated position with an original source position. Line and
|
||||
* column here are 0-based, unlike `addMapping`.
|
||||
*/
|
||||
exports.addSegment = void 0;
|
||||
/**
|
||||
* A high-level API to associate a generated position with an original source position. Line is
|
||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||
*/
|
||||
exports.addMapping = void 0;
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
exports.maybeAddSegment = void 0;
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
exports.maybeAddMapping = void 0;
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
exports.setSourceContent = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.toDecodedMap = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.toEncodedMap = void 0;
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
exports.fromMap = void 0;
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
exports.allMappings = void 0;
|
||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||
let addSegmentInternal;
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
class GenMapping {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new setArray.SetArray();
|
||||
this._sources = new setArray.SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
exports.addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
exports.maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
||||
};
|
||||
exports.addMapping = (map, mapping) => {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
};
|
||||
exports.maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
exports.setSourceContent = (map, source, content) => {
|
||||
const { _sources: sources, _sourcesContent: sourcesContent } = map;
|
||||
sourcesContent[setArray.put(sources, source)] = content;
|
||||
};
|
||||
exports.toDecodedMap = (map) => {
|
||||
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: file || undefined,
|
||||
names: names.array,
|
||||
sourceRoot: sourceRoot || undefined,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
};
|
||||
exports.toEncodedMap = (map) => {
|
||||
const decoded = exports.toDecodedMap(map);
|
||||
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
|
||||
};
|
||||
exports.allMappings = (map) => {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = map;
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = undefined;
|
||||
let original = undefined;
|
||||
let name = undefined;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5)
|
||||
name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
};
|
||||
exports.fromMap = (input) => {
|
||||
const map = new traceMapping.TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(gen._names, map.names);
|
||||
putAll(gen._sources, map.sources);
|
||||
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
gen._mappings = traceMapping.decodedMappings(map);
|
||||
return gen;
|
||||
};
|
||||
// Internal helpers
|
||||
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map;
|
||||
const line = getLine(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index))
|
||||
return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
const sourcesIndex = setArray.put(sources, source);
|
||||
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length)
|
||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(line, index, name
|
||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
||||
};
|
||||
})();
|
||||
function getLine(mappings, index) {
|
||||
for (let i = mappings.length; i <= index; i++) {
|
||||
mappings[i] = [];
|
||||
}
|
||||
return mappings[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN])
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0)
|
||||
break;
|
||||
}
|
||||
if (len < length)
|
||||
mappings.length = len;
|
||||
}
|
||||
function putAll(strarr, array) {
|
||||
for (let i = 0; i < array.length; i++)
|
||||
setArray.put(strarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||
// doesn't generate any useful information.
|
||||
if (index === 0)
|
||||
return true;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||
// a sourceless position, which is useful.
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
// A source/named segment at the start of a line gives position at that genColumn
|
||||
if (index === 0)
|
||||
return false;
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||
if (prev.length === 1)
|
||||
return false;
|
||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||
// provide any new position information.
|
||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
||||
sourceLine === prev[SOURCE_LINE] &&
|
||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
||||
}
|
||||
const s = source;
|
||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content);
|
||||
}
|
||||
|
||||
exports.GenMapping = GenMapping;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=gen-mapping.umd.js.map
|
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
90
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
90
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||
export declare type Options = {
|
||||
file?: string | null;
|
||||
sourceRoot?: string | null;
|
||||
};
|
||||
/**
|
||||
* A low-level API to associate a generated position with an original source position. Line and
|
||||
* column here are 0-based, unlike `addMapping`.
|
||||
*/
|
||||
export declare let addSegment: {
|
||||
(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
||||
(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
||||
(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
||||
};
|
||||
/**
|
||||
* A high-level API to associate a generated position with an original source position. Line is
|
||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||
*/
|
||||
export declare let addMapping: {
|
||||
(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source?: null;
|
||||
original?: null;
|
||||
name?: null;
|
||||
content?: null;
|
||||
}): void;
|
||||
(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name?: null;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
};
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare let maybeAddSegment: typeof addSegment;
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare let maybeAddMapping: typeof addMapping;
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
export declare let setSourceContent: (map: GenMapping, source: string, content: string | null) => void;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let toDecodedMap: (map: GenMapping) => DecodedSourceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let toEncodedMap: (map: GenMapping) => EncodedSourceMap;
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
export declare let fromMap: (input: SourceMapInput) => GenMapping;
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
export declare let allMappings: (map: GenMapping) => Mapping[];
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
export declare class GenMapping {
|
||||
private _names;
|
||||
private _sources;
|
||||
private _sourcesContent;
|
||||
private _mappings;
|
||||
file: string | null | undefined;
|
||||
sourceRoot: string | null | undefined;
|
||||
constructor({ file, sourceRoot }?: Options);
|
||||
}
|
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
declare type GeneratedColumn = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type SourceLine = number;
|
||||
declare type SourceColumn = number;
|
||||
declare type NamesIndex = number;
|
||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export {};
|
35
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
35
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: readonly string[];
|
||||
sourceRoot?: string;
|
||||
sources: readonly (string | null)[];
|
||||
sourcesContent?: readonly (string | null)[];
|
||||
version: 3;
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
}
|
||||
export interface Pos {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
export declare type Mapping = {
|
||||
generated: Pos;
|
||||
source: undefined;
|
||||
original: undefined;
|
||||
name: undefined;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: undefined;
|
||||
};
|
77
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
77
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"name": "@jridgewell/gen-mapping",
|
||||
"version": "0.3.3",
|
||||
"description": "Generate source maps",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/gen-mapping",
|
||||
"main": "dist/gen-mapping.umd.js",
|
||||
"module": "dist/gen-mapping.mjs",
|
||||
"types": "dist/types/gen-mapping.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/gen-mapping.d.ts",
|
||||
"browser": "./dist/gen-mapping.umd.js",
|
||||
"require": "./dist/gen-mapping.umd.js",
|
||||
"import": "./dist/gen-mapping.mjs"
|
||||
},
|
||||
"./dist/gen-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node benchmark/index.mjs",
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:coverage",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "run-p 'build:rollup -- --watch' 'test:only -- --watch'",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/mocha": "9.1.1",
|
||||
"@types/node": "17.0.29",
|
||||
"@typescript-eslint/eslint-plugin": "5.21.0",
|
||||
"@typescript-eslint/parser": "5.21.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"mocha": "9.2.2",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/set-array": "^1.0.1",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.10",
|
||||
"@jridgewell/trace-mapping": "^0.3.9"
|
||||
}
|
||||
}
|
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
# @jridgewell/resolve-uri
|
||||
|
||||
> Resolve a URI relative to an optional base URI
|
||||
|
||||
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/resolve-uri
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function resolve(input: string, base?: string): string;
|
||||
```
|
||||
|
||||
```js
|
||||
import resolve from '@jridgewell/resolve-uri';
|
||||
|
||||
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
||||
```
|
||||
|
||||
| Input | Base | Resolution | Explanation |
|
||||
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
||||
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
||||
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
||||
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
||||
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
||||
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
||||
| `/example` | _rest_ | `/example` | Input is normalized only |
|
||||
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
||||
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
||||
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
||||
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
||||
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
242
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
242
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
Normal file
@@ -0,0 +1,242 @@
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
var UrlType;
|
||||
(function (UrlType) {
|
||||
UrlType[UrlType["Empty"] = 1] = "Empty";
|
||||
UrlType[UrlType["Hash"] = 2] = "Hash";
|
||||
UrlType[UrlType["Query"] = 3] = "Query";
|
||||
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
|
||||
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
|
||||
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
|
||||
UrlType[UrlType["Absolute"] = 7] = "Absolute";
|
||||
})(UrlType || (UrlType = {}));
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: UrlType.Absolute,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = UrlType.SchemeRelative;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = UrlType.AbsolutePath;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? UrlType.Query
|
||||
: input.startsWith('#')
|
||||
? UrlType.Hash
|
||||
: UrlType.RelativePath
|
||||
: UrlType.Empty;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= UrlType.RelativePath;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== UrlType.Absolute) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case UrlType.Empty:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case UrlType.Hash:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case UrlType.Query:
|
||||
case UrlType.RelativePath:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case UrlType.AbsolutePath:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case UrlType.SchemeRelative:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case UrlType.Hash:
|
||||
case UrlType.Query:
|
||||
return queryHash;
|
||||
case UrlType.RelativePath: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case UrlType.AbsolutePath:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
export { resolve as default };
|
||||
//# sourceMappingURL=resolve-uri.mjs.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
250
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
250
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
Normal file
@@ -0,0 +1,250 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
||||
})(this, (function () { 'use strict';
|
||||
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
var UrlType;
|
||||
(function (UrlType) {
|
||||
UrlType[UrlType["Empty"] = 1] = "Empty";
|
||||
UrlType[UrlType["Hash"] = 2] = "Hash";
|
||||
UrlType[UrlType["Query"] = 3] = "Query";
|
||||
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
|
||||
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
|
||||
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
|
||||
UrlType[UrlType["Absolute"] = 7] = "Absolute";
|
||||
})(UrlType || (UrlType = {}));
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: UrlType.Absolute,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = UrlType.SchemeRelative;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = UrlType.AbsolutePath;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? UrlType.Query
|
||||
: input.startsWith('#')
|
||||
? UrlType.Hash
|
||||
: UrlType.RelativePath
|
||||
: UrlType.Empty;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= UrlType.RelativePath;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== UrlType.Absolute) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case UrlType.Empty:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case UrlType.Hash:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case UrlType.Query:
|
||||
case UrlType.RelativePath:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case UrlType.AbsolutePath:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case UrlType.SchemeRelative:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case UrlType.Hash:
|
||||
case UrlType.Query:
|
||||
return queryHash;
|
||||
case UrlType.RelativePath: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case UrlType.AbsolutePath:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
return resolve;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=resolve-uri.umd.js.map
|
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"name": "@jridgewell/resolve-uri",
|
||||
"version": "3.1.0",
|
||||
"description": "Resolve a URI relative to an optional base URI",
|
||||
"keywords": [
|
||||
"resolve",
|
||||
"uri",
|
||||
"url",
|
||||
"path"
|
||||
],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/resolve-uri",
|
||||
"main": "dist/resolve-uri.umd.js",
|
||||
"module": "dist/resolve-uri.mjs",
|
||||
"typings": "dist/types/resolve-uri.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/resolve-uri.d.ts",
|
||||
"browser": "./dist/resolve-uri.umd.js",
|
||||
"require": "./dist/resolve-uri.umd.js",
|
||||
"import": "./dist/resolve-uri.mjs"
|
||||
},
|
||||
"./dist/resolve-uri.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"typescript": "4.5.5"
|
||||
}
|
||||
}
|
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
# @jridgewell/set-array
|
||||
|
||||
> Like a Set, but provides the index of the `key` in the backing array
|
||||
|
||||
This is designed to allow synchronizing a second array with the contents of the backing array, like
|
||||
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
|
||||
are never duplicates.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/set-array
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { SetArray, get, put, pop } from '@jridgewell/set-array';
|
||||
|
||||
const sa = new SetArray();
|
||||
|
||||
let index = put(sa, 'first');
|
||||
assert.strictEqual(index, 0);
|
||||
|
||||
index = put(sa, 'second');
|
||||
assert.strictEqual(index, 1);
|
||||
|
||||
assert.deepEqual(sa.array, [ 'first', 'second' ]);
|
||||
|
||||
index = get(sa, 'first');
|
||||
assert.strictEqual(index, 0);
|
||||
|
||||
pop(sa);
|
||||
index = get(sa, 'second');
|
||||
assert.strictEqual(index, undefined);
|
||||
assert.deepEqual(sa.array, [ 'first' ]);
|
||||
```
|
48
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
48
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
let get;
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
let put;
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
let pop;
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
class SetArray {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
get = (strarr, key) => strarr._indexes[key];
|
||||
put = (strarr, key) => {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = get(strarr, key);
|
||||
if (index !== undefined)
|
||||
return index;
|
||||
const { array, _indexes: indexes } = strarr;
|
||||
return (indexes[key] = array.push(key) - 1);
|
||||
};
|
||||
pop = (strarr) => {
|
||||
const { array, _indexes: indexes } = strarr;
|
||||
if (array.length === 0)
|
||||
return;
|
||||
const last = array.pop();
|
||||
indexes[last] = undefined;
|
||||
};
|
||||
})();
|
||||
|
||||
export { SetArray, get, pop, put };
|
||||
//# sourceMappingURL=set-array.mjs.map
|
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":[],"mappings":"AAAA;;;IAGW,IAA2D;AAEtE;;;;IAIW,IAA+C;AAE1D;;;IAGW,IAAgC;AAE3C;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CAuBF;AArBC;IACE,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IAE5C,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;QAEhB,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;KAC3D,CAAC;IAEF,GAAG,GAAG,CAAC,MAAM;QACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;QACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;KAC3B,CAAC;AACJ,CAAC,GAAA;;;;"}
|
58
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
58
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
exports.get = void 0;
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
exports.put = void 0;
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
exports.pop = void 0;
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
class SetArray {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
exports.get = (strarr, key) => strarr._indexes[key];
|
||||
exports.put = (strarr, key) => {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = exports.get(strarr, key);
|
||||
if (index !== undefined)
|
||||
return index;
|
||||
const { array, _indexes: indexes } = strarr;
|
||||
return (indexes[key] = array.push(key) - 1);
|
||||
};
|
||||
exports.pop = (strarr) => {
|
||||
const { array, _indexes: indexes } = strarr;
|
||||
if (array.length === 0)
|
||||
return;
|
||||
const last = array.pop();
|
||||
indexes[last] = undefined;
|
||||
};
|
||||
})();
|
||||
|
||||
exports.SetArray = SetArray;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=set-array.umd.js.map
|
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":["get","put","pop"],"mappings":";;;;;;IAAA;;;AAGWA,yBAA2D;IAEtE;;;;AAIWC,yBAA+C;IAE1D;;;AAGWC,yBAAgC;IAE3C;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KAuBF;IArBC;QACEF,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;QAE5CC,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;YAEhB,MAAM,KAAK,GAAGD,WAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;YAC/B,IAAI,KAAK,KAAK,SAAS;gBAAE,OAAO,KAAK,CAAC;YAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;SAC3D,CAAC;QAEFE,WAAG,GAAG,CAAC,MAAM;YACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,OAAO;YAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;YACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;SAC3B,CAAC;IACJ,CAAC,GAAA;;;;;;;;;;"}
|
26
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
26
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
export declare let get: (strarr: SetArray, key: string) => number | undefined;
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
export declare let put: (strarr: SetArray, key: string) => number;
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
export declare let pop: (strarr: SetArray) => void;
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
export declare class SetArray {
|
||||
private _indexes;
|
||||
array: readonly string[];
|
||||
constructor();
|
||||
}
|
66
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
66
node_modules/@jridgewell/set-array/package.json
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
{
|
||||
"name": "@jridgewell/set-array",
|
||||
"version": "1.1.2",
|
||||
"description": "Like a Set, but provides the index of the `key` in the backing array",
|
||||
"keywords": [],
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/jridgewell/set-array",
|
||||
"main": "dist/set-array.umd.js",
|
||||
"module": "dist/set-array.mjs",
|
||||
"typings": "dist/types/set-array.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/set-array.d.ts",
|
||||
"browser": "./dist/set-array.umd.js",
|
||||
"require": "./dist/set-array.umd.js",
|
||||
"import": "./dist/set-array.mjs"
|
||||
},
|
||||
"./dist/set-array.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"src"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "rm -rf dist",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "9.1.1",
|
||||
"@types/node": "17.0.29",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"c8": "7.11.0",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.66.0",
|
||||
"typescript": "4.5.5"
|
||||
}
|
||||
}
|
55
node_modules/@jridgewell/set-array/src/set-array.ts
generated
vendored
Normal file
55
node_modules/@jridgewell/set-array/src/set-array.ts
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
export let get: (strarr: SetArray, key: string) => number | undefined;
|
||||
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
export let put: (strarr: SetArray, key: string) => number;
|
||||
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
export let pop: (strarr: SetArray) => void;
|
||||
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
export class SetArray {
|
||||
private declare _indexes: { [key: string]: number | undefined };
|
||||
declare array: readonly string[];
|
||||
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null } as any;
|
||||
this.array = [];
|
||||
}
|
||||
|
||||
static {
|
||||
get = (strarr, key) => strarr._indexes[key];
|
||||
|
||||
put = (strarr, key) => {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = get(strarr, key);
|
||||
if (index !== undefined) return index;
|
||||
|
||||
const { array, _indexes: indexes } = strarr;
|
||||
|
||||
return (indexes[key] = (array as string[]).push(key) - 1);
|
||||
};
|
||||
|
||||
pop = (strarr) => {
|
||||
const { array, _indexes: indexes } = strarr;
|
||||
if (array.length === 0) return;
|
||||
|
||||
const last = (array as string[]).pop()!;
|
||||
indexes[last] = undefined;
|
||||
};
|
||||
}
|
||||
}
|
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Rich Harris
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
200
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
200
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
# @jridgewell/sourcemap-codec
|
||||
|
||||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||
|
||||
|
||||
## Why?
|
||||
|
||||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||
|
||||
This package makes the process slightly easier.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @jridgewell/sourcemap-codec
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
|
||||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
|
||||
assert.deepEqual( decoded, [
|
||||
// the first line (of the generated code) has no mappings,
|
||||
// as shown by the starting semi-colon (which separates lines)
|
||||
[],
|
||||
|
||||
// the second line contains four (comma-separated) segments
|
||||
[
|
||||
// segments are encoded as you'd expect:
|
||||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||
|
||||
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||
// name in the `map.names` array
|
||||
[ 2, 0, 2, 2, 0 ],
|
||||
|
||||
// the remaining segments are 4-length rather than 5-length,
|
||||
// because they don't map a name
|
||||
[ 4, 0, 2, 4 ],
|
||||
[ 6, 0, 2, 5 ],
|
||||
[ 7, 0, 2, 7 ]
|
||||
],
|
||||
|
||||
// the final line contains two segments
|
||||
[
|
||||
[ 2, 1, 10, 19 ],
|
||||
[ 12, 1, 11, 20 ]
|
||||
]
|
||||
]);
|
||||
|
||||
var encoded = encode( decoded );
|
||||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 5479160 bytes
|
||||
sourcemap-codec 5659336 bytes
|
||||
source-map-0.6.1 17144440 bytes
|
||||
source-map-0.8.0 6867424 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled)
|
||||
decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled)
|
||||
decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled)
|
||||
decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 1261620 bytes
|
||||
sourcemap-codec 9119248 bytes
|
||||
source-map-0.6.1 8968560 bytes
|
||||
source-map-0.8.0 8952952 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled)
|
||||
encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled)
|
||||
encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled)
|
||||
encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 35338184 bytes
|
||||
sourcemap-codec 35922736 bytes
|
||||
source-map-0.6.1 62366360 bytes
|
||||
source-map-0.8.0 44337416 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled)
|
||||
decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled)
|
||||
decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled)
|
||||
decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 7212604 bytes
|
||||
sourcemap-codec 21421456 bytes
|
||||
source-map-0.6.1 25286888 bytes
|
||||
source-map-0.8.0 25498744 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled)
|
||||
encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled)
|
||||
encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled)
|
||||
encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 500272 bytes
|
||||
sourcemap-codec 516864 bytes
|
||||
source-map-0.6.1 1596672 bytes
|
||||
source-map-0.8.0 517272 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled)
|
||||
decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled)
|
||||
decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled)
|
||||
decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 321026 bytes
|
||||
sourcemap-codec 830832 bytes
|
||||
source-map-0.6.1 586608 bytes
|
||||
source-map-0.8.0 586680 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled)
|
||||
encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled)
|
||||
encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled)
|
||||
encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 734848 bytes
|
||||
sourcemap-codec 954200 bytes
|
||||
source-map-0.6.1 2276432 bytes
|
||||
source-map-0.8.0 955488 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled)
|
||||
decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled)
|
||||
decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled)
|
||||
decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 638672 bytes
|
||||
sourcemap-codec 1109840 bytes
|
||||
source-map-0.6.1 1321224 bytes
|
||||
source-map-0.8.0 1324448 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled)
|
||||
encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled)
|
||||
encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled)
|
||||
encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
164
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
function decode(mappings) {
|
||||
const state = new Int32Array(5);
|
||||
const decoded = [];
|
||||
let index = 0;
|
||||
do {
|
||||
const semi = indexOf(mappings, index);
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
state[0] = 0;
|
||||
for (let i = index; i < semi; i++) {
|
||||
let seg;
|
||||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||
const col = state[0];
|
||||
if (col < lastCol)
|
||||
sorted = false;
|
||||
lastCol = col;
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||
seg = [col, state[1], state[2], state[3], state[4]];
|
||||
}
|
||||
else {
|
||||
seg = [col, state[1], state[2], state[3]];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [col];
|
||||
}
|
||||
line.push(seg);
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
index = semi + 1;
|
||||
} while (index <= mappings.length);
|
||||
return decoded;
|
||||
}
|
||||
function indexOf(mappings, index) {
|
||||
const idx = mappings.indexOf(';', index);
|
||||
return idx === -1 ? mappings.length : idx;
|
||||
}
|
||||
function decodeInteger(mappings, pos, state, j) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = mappings.charCodeAt(pos++);
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
state[j] += value;
|
||||
return pos;
|
||||
}
|
||||
function hasMoreVlq(mappings, i, length) {
|
||||
if (i >= length)
|
||||
return false;
|
||||
return mappings.charCodeAt(i) !== comma;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const state = new Int32Array(5);
|
||||
const bufLength = 1024 * 16;
|
||||
const subLength = bufLength - 36;
|
||||
const buf = new Uint8Array(bufLength);
|
||||
const sub = buf.subarray(0, subLength);
|
||||
let pos = 0;
|
||||
let out = '';
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0) {
|
||||
if (pos === bufLength) {
|
||||
out += td.decode(buf);
|
||||
pos = 0;
|
||||
}
|
||||
buf[pos++] = semicolon;
|
||||
}
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
state[0] = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||
// may push a comma.
|
||||
if (pos > subLength) {
|
||||
out += td.decode(sub);
|
||||
buf.copyWithin(0, subLength, pos);
|
||||
pos -= subLength;
|
||||
}
|
||||
if (j > 0)
|
||||
buf[pos++] = comma;
|
||||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||
}
|
||||
}
|
||||
return out + td.decode(buf.subarray(0, pos));
|
||||
}
|
||||
function encodeInteger(buf, pos, state, segment, j) {
|
||||
const next = segment[j];
|
||||
let num = next - state[j];
|
||||
state[j] = next;
|
||||
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||
do {
|
||||
let clamped = num & 0b011111;
|
||||
num >>>= 5;
|
||||
if (num > 0)
|
||||
clamped |= 0b100000;
|
||||
buf[pos++] = intToChar[clamped];
|
||||
} while (num > 0);
|
||||
return pos;
|
||||
}
|
||||
|
||||
export { decode, encode };
|
||||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
175
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
function decode(mappings) {
|
||||
const state = new Int32Array(5);
|
||||
const decoded = [];
|
||||
let index = 0;
|
||||
do {
|
||||
const semi = indexOf(mappings, index);
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
state[0] = 0;
|
||||
for (let i = index; i < semi; i++) {
|
||||
let seg;
|
||||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||
const col = state[0];
|
||||
if (col < lastCol)
|
||||
sorted = false;
|
||||
lastCol = col;
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||
seg = [col, state[1], state[2], state[3], state[4]];
|
||||
}
|
||||
else {
|
||||
seg = [col, state[1], state[2], state[3]];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [col];
|
||||
}
|
||||
line.push(seg);
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
index = semi + 1;
|
||||
} while (index <= mappings.length);
|
||||
return decoded;
|
||||
}
|
||||
function indexOf(mappings, index) {
|
||||
const idx = mappings.indexOf(';', index);
|
||||
return idx === -1 ? mappings.length : idx;
|
||||
}
|
||||
function decodeInteger(mappings, pos, state, j) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = mappings.charCodeAt(pos++);
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
state[j] += value;
|
||||
return pos;
|
||||
}
|
||||
function hasMoreVlq(mappings, i, length) {
|
||||
if (i >= length)
|
||||
return false;
|
||||
return mappings.charCodeAt(i) !== comma;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const state = new Int32Array(5);
|
||||
const bufLength = 1024 * 16;
|
||||
const subLength = bufLength - 36;
|
||||
const buf = new Uint8Array(bufLength);
|
||||
const sub = buf.subarray(0, subLength);
|
||||
let pos = 0;
|
||||
let out = '';
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0) {
|
||||
if (pos === bufLength) {
|
||||
out += td.decode(buf);
|
||||
pos = 0;
|
||||
}
|
||||
buf[pos++] = semicolon;
|
||||
}
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
state[0] = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||
// may push a comma.
|
||||
if (pos > subLength) {
|
||||
out += td.decode(sub);
|
||||
buf.copyWithin(0, subLength, pos);
|
||||
pos -= subLength;
|
||||
}
|
||||
if (j > 0)
|
||||
buf[pos++] = comma;
|
||||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||
}
|
||||
}
|
||||
return out + td.decode(buf.subarray(0, pos));
|
||||
}
|
||||
function encodeInteger(buf, pos, state, segment, j) {
|
||||
const next = segment[j];
|
||||
let num = next - state[j];
|
||||
state[j] = next;
|
||||
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||
do {
|
||||
let clamped = num & 0b011111;
|
||||
num >>>= 5;
|
||||
if (num > 0)
|
||||
clamped |= 0b100000;
|
||||
buf[pos++] = intToChar[clamped];
|
||||
} while (num > 0);
|
||||
return pos;
|
||||
}
|
||||
|
||||
exports.decode = decode;
|
||||
exports.encode = encode;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||
export declare type SourceMapLine = SourceMapSegment[];
|
||||
export declare type SourceMapMappings = SourceMapLine[];
|
||||
export declare function decode(mappings: string): SourceMapMappings;
|
||||
export declare function encode(decoded: SourceMapMappings): string;
|
||||
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
74
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
74
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
{
|
||||
"name": "@jridgewell/sourcemap-codec",
|
||||
"version": "1.4.15",
|
||||
"description": "Encode/decode sourcemap mappings",
|
||||
"keywords": [
|
||||
"sourcemap",
|
||||
"vlq"
|
||||
],
|
||||
"main": "dist/sourcemap-codec.umd.js",
|
||||
"module": "dist/sourcemap-codec.mjs",
|
||||
"types": "dist/types/sourcemap-codec.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||
"browser": "./dist/sourcemap-codec.umd.js",
|
||||
"require": "./dist/sourcemap-codec.umd.js",
|
||||
"import": "./dist/sourcemap-codec.mjs"
|
||||
},
|
||||
"./dist/sourcemap-codec.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||
},
|
||||
"author": "Rich Harris",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/node": "17.0.15",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"source-map": "0.6.1",
|
||||
"source-map-js": "1.0.2",
|
||||
"sourcemap-codec": "1.4.8",
|
||||
"typescript": "4.5.4"
|
||||
}
|
||||
}
|
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
252
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
252
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
Normal file
@@ -0,0 +1,252 @@
|
||||
# @jridgewell/trace-mapping
|
||||
|
||||
> Trace the original position through a source map
|
||||
|
||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
||||
original location in the source file through a source map.
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/trace-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import {
|
||||
TraceMap,
|
||||
originalPositionFor,
|
||||
generatedPositionFor,
|
||||
sourceContentFor,
|
||||
} from '@jridgewell/trace-mapping';
|
||||
|
||||
const tracer = new TraceMap({
|
||||
version: 3,
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['content of input.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'KAyCIA',
|
||||
});
|
||||
|
||||
// Lines start at line 1, columns at column 0.
|
||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
||||
assert.deepEqual(traced, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
const content = sourceContentFor(tracer, traced.source);
|
||||
assert.strictEqual(content, 'content for input.js');
|
||||
|
||||
const generated = generatedPositionFor(tracer, {
|
||||
source: 'input.js',
|
||||
line: 42,
|
||||
column: 4,
|
||||
});
|
||||
assert.deepEqual(generated, {
|
||||
line: 1,
|
||||
column: 5,
|
||||
});
|
||||
```
|
||||
|
||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
||||
|
||||
```typescript
|
||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
||||
|
||||
// line is 0-base.
|
||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
||||
|
||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
// Again, line is 0-base and so is sourceLine
|
||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
||||
```
|
||||
|
||||
### SectionedSourceMaps
|
||||
|
||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
||||
`TraceMap` instance:
|
||||
|
||||
```typescript
|
||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
||||
const fooOutput = 'foo';
|
||||
const barOutput = 'bar';
|
||||
const output = [fooOutput, barOutput].join('\n');
|
||||
|
||||
const sectioned = new AnyMap({
|
||||
version: 3,
|
||||
sections: [
|
||||
{
|
||||
// 0-base line and column
|
||||
offset: { line: 0, column: 0 },
|
||||
// fooOutput's sourcemap
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['foo.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
{
|
||||
// barOutput's sourcemap will not affect the first line, only the second
|
||||
offset: { line: 1, column: 0 },
|
||||
map: {
|
||||
version: 3,
|
||||
sources: ['bar.js'],
|
||||
names: ['bar'],
|
||||
mappings: 'AAAAA',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const traced = originalPositionFor(sectioned, {
|
||||
line: 2,
|
||||
column: 0,
|
||||
});
|
||||
|
||||
assert.deepEqual(traced, {
|
||||
source: 'bar.js',
|
||||
line: 1,
|
||||
column: 0,
|
||||
name: 'bar',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 562400 bytes
|
||||
trace-mapping encoded 5706544 bytes
|
||||
source-map-js 10717664 bytes
|
||||
source-map-0.6.1 17446384 bytes
|
||||
source-map-0.8.0 9701757 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
||||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
||||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
||||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
||||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 89832 bytes
|
||||
trace-mapping encoded 35474640 bytes
|
||||
source-map-js 51257176 bytes
|
||||
source-map-0.6.1 63515664 bytes
|
||||
source-map-0.8.0 42933752 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
||||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
||||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
||||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 37128 bytes
|
||||
trace-mapping encoded 247280 bytes
|
||||
source-map-js 1143536 bytes
|
||||
source-map-0.6.1 1290992 bytes
|
||||
source-map-0.8.0 96544 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
||||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
||||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
||||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Memory Usage:
|
||||
trace-mapping decoded 16176 bytes
|
||||
trace-mapping encoded 681552 bytes
|
||||
source-map-js 2418352 bytes
|
||||
source-map-0.6.1 2443672 bytes
|
||||
source-map-0.8.0 111768 bytes
|
||||
Smallest memory usage is trace-mapping decoded
|
||||
|
||||
Init speed:
|
||||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
||||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
||||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
||||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
||||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
||||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
||||
Fastest is trace-mapping: decoded Object input
|
||||
|
||||
Trace speed:
|
||||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
||||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
||||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
||||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
||||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
||||
Fastest is trace-mapping: decoded originalPositionFor
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
552
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
552
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
Normal file
@@ -0,0 +1,552 @@
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
import resolveUri from '@jridgewell/resolve-uri';
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||
if (!('sections' in parsed))
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
return presortedDecodedMap(joined);
|
||||
};
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
if ('sections' in input)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(input, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
let encodedMappings;
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
let decodedMappings;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
let traceSegment;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
let originalPositionFor;
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
let generatedPositionFor;
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
let allGeneratedPositionsFor;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
let eachMapping;
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
let sourceContentFor;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
let presortedDecodedMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let decodedMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
let encodedMap;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
encodedMappings = (map) => {
|
||||
var _a;
|
||||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
|
||||
};
|
||||
decodedMappings = (map) => {
|
||||
return (map._decoded || (map._decoded = decode(map._encoded)));
|
||||
};
|
||||
traceSegment = (map, line, column) => {
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
};
|
||||
originalPositionFor = (map, { line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
};
|
||||
allGeneratedPositionsFor = (map, { source, line, column, bias }) => {
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
};
|
||||
generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
};
|
||||
eachMapping = (map, cb) => {
|
||||
const decoded = decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
sourceContentFor = (map, source) => {
|
||||
const { sources, resolvedSources, sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
};
|
||||
presortedDecodedMap = (map, mapUrl) => {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
tracer._decoded = map.mappings;
|
||||
return tracer;
|
||||
};
|
||||
decodedMap = (map) => {
|
||||
return clone(map, decodedMappings(map));
|
||||
};
|
||||
encodedMap = (map) => {
|
||||
return clone(map, encodedMappings(map));
|
||||
};
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = map._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
})();
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
||||
//# sourceMappingURL=trace-mapping.mjs.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
566
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
566
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
Normal file
@@ -0,0 +1,566 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
||||
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolveUri__default["default"](input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
const REV_GENERATED_LINE = 1;
|
||||
const REV_GENERATED_COLUMN = 2;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
||||
// of generated line/column.
|
||||
function buildBySources(decoded, memos) {
|
||||
const sources = memos.map(buildNullArray);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
if (seg.length === 1)
|
||||
continue;
|
||||
const sourceIndex = seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
const originalSource = sources[sourceIndex];
|
||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
||||
const memo = memos[sourceIndex];
|
||||
// The binary search either found a match, or it found the left-index just before where the
|
||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
||||
// generated segments associated with an original location, so there may need to move several
|
||||
// indexes before we find where we need to insert.
|
||||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
||||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
||||
// order when iterating with for-in.
|
||||
function buildNullArray() {
|
||||
return { __proto__: null };
|
||||
}
|
||||
|
||||
const AnyMap = function (map, mapUrl) {
|
||||
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
|
||||
if (!('sections' in parsed))
|
||||
return new TraceMap(parsed, mapUrl);
|
||||
const mappings = [];
|
||||
const sources = [];
|
||||
const sourcesContent = [];
|
||||
const names = [];
|
||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);
|
||||
const joined = {
|
||||
version: 3,
|
||||
file: parsed.file,
|
||||
names,
|
||||
sources,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
return exports.presortedDecodedMap(joined);
|
||||
};
|
||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
const { sections } = input;
|
||||
for (let i = 0; i < sections.length; i++) {
|
||||
const { map, offset } = sections[i];
|
||||
let sl = stopLine;
|
||||
let sc = stopColumn;
|
||||
if (i + 1 < sections.length) {
|
||||
const nextOffset = sections[i + 1].offset;
|
||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
||||
if (sl === stopLine) {
|
||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
||||
}
|
||||
else if (sl < stopLine) {
|
||||
sc = columnOffset + nextOffset.column;
|
||||
}
|
||||
}
|
||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
||||
}
|
||||
}
|
||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
|
||||
if ('sections' in input)
|
||||
return recurse(...arguments);
|
||||
const map = new TraceMap(input, mapUrl);
|
||||
const sourcesOffset = sources.length;
|
||||
const namesOffset = names.length;
|
||||
const decoded = exports.decodedMappings(map);
|
||||
const { resolvedSources, sourcesContent: contents } = map;
|
||||
append(sources, resolvedSources);
|
||||
append(names, map.names);
|
||||
if (contents)
|
||||
append(sourcesContent, contents);
|
||||
else
|
||||
for (let i = 0; i < resolvedSources.length; i++)
|
||||
sourcesContent.push(null);
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const lineI = lineOffset + i;
|
||||
// We can only add so many lines before we step into the range that the next section's map
|
||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
||||
// still need to check that we don't overstep lines, too.
|
||||
if (lineI > stopLine)
|
||||
return;
|
||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
||||
const out = getLine(mappings, lineI);
|
||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
||||
// map can be multiple lines), it doesn't.
|
||||
const cOffset = i === 0 ? columnOffset : 0;
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const column = cOffset + seg[COLUMN];
|
||||
// If this segment steps into the column range that the next section's map controls, we need
|
||||
// to stop early.
|
||||
if (lineI === stopLine && column >= stopColumn)
|
||||
return;
|
||||
if (seg.length === 1) {
|
||||
out.push([column]);
|
||||
continue;
|
||||
}
|
||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
||||
const sourceLine = seg[SOURCE_LINE];
|
||||
const sourceColumn = seg[SOURCE_COLUMN];
|
||||
out.push(seg.length === 4
|
||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function append(arr, other) {
|
||||
for (let i = 0; i < other.length; i++)
|
||||
arr.push(other[i]);
|
||||
}
|
||||
function getLine(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++)
|
||||
arr[i] = [];
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
exports.encodedMappings = void 0;
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
exports.decodedMappings = void 0;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
exports.traceSegment = void 0;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
exports.originalPositionFor = void 0;
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
exports.generatedPositionFor = void 0;
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
exports.allGeneratedPositionsFor = void 0;
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
exports.eachMapping = void 0;
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
exports.sourceContentFor = void 0;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
exports.presortedDecodedMap = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.decodedMap = void 0;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
exports.encodedMap = void 0;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
(() => {
|
||||
exports.encodedMappings = (map) => {
|
||||
var _a;
|
||||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
|
||||
};
|
||||
exports.decodedMappings = (map) => {
|
||||
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
|
||||
};
|
||||
exports.traceSegment = (map, line, column) => {
|
||||
const decoded = exports.decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return null;
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
||||
return index === -1 ? null : segments[index];
|
||||
};
|
||||
exports.originalPositionFor = (map, { line, column, bias }) => {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = exports.decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
};
|
||||
exports.allGeneratedPositionsFor = (map, { source, line, column, bias }) => {
|
||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
||||
};
|
||||
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
|
||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
||||
};
|
||||
exports.eachMapping = (map, cb) => {
|
||||
const decoded = exports.decodedMappings(map);
|
||||
const { names, resolvedSources } = map;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generatedLine = i + 1;
|
||||
const generatedColumn = seg[0];
|
||||
let source = null;
|
||||
let originalLine = null;
|
||||
let originalColumn = null;
|
||||
let name = null;
|
||||
if (seg.length !== 1) {
|
||||
source = resolvedSources[seg[1]];
|
||||
originalLine = seg[2] + 1;
|
||||
originalColumn = seg[3];
|
||||
}
|
||||
if (seg.length === 5)
|
||||
name = names[seg[4]];
|
||||
cb({
|
||||
generatedLine,
|
||||
generatedColumn,
|
||||
source,
|
||||
originalLine,
|
||||
originalColumn,
|
||||
name,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
exports.sourceContentFor = (map, source) => {
|
||||
const { sources, resolvedSources, sourcesContent } = map;
|
||||
if (sourcesContent == null)
|
||||
return null;
|
||||
let index = sources.indexOf(source);
|
||||
if (index === -1)
|
||||
index = resolvedSources.indexOf(source);
|
||||
return index === -1 ? null : sourcesContent[index];
|
||||
};
|
||||
exports.presortedDecodedMap = (map, mapUrl) => {
|
||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
||||
tracer._decoded = map.mappings;
|
||||
return tracer;
|
||||
};
|
||||
exports.decodedMap = (map) => {
|
||||
return clone(map, exports.decodedMappings(map));
|
||||
};
|
||||
exports.encodedMap = (map) => {
|
||||
return clone(map, exports.encodedMappings(map));
|
||||
};
|
||||
function generatedPosition(map, source, line, column, bias, all) {
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const { sources, resolvedSources } = map;
|
||||
let sourceIndex = sources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
sourceIndex = resolvedSources.indexOf(source);
|
||||
if (sourceIndex === -1)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
|
||||
const segments = generated[sourceIndex][line];
|
||||
if (segments == null)
|
||||
return all ? [] : GMapping(null, null);
|
||||
const memo = map._bySourceMemos[sourceIndex];
|
||||
if (all)
|
||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
||||
if (index === -1)
|
||||
return GMapping(null, null);
|
||||
const segment = segments[index];
|
||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
||||
}
|
||||
})();
|
||||
function clone(map, mappings) {
|
||||
return {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
names: map.names,
|
||||
sourceRoot: map.sourceRoot,
|
||||
sources: map.sources,
|
||||
sourcesContent: map.sourcesContent,
|
||||
mappings,
|
||||
};
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function GMapping(line, column) {
|
||||
return { line, column };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
||||
// match LEAST_UPPER_BOUND.
|
||||
if (!found && bias === LEAST_UPPER_BOUND)
|
||||
min++;
|
||||
if (min === -1 || min === segments.length)
|
||||
return [];
|
||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
||||
// to our desired column.
|
||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
||||
if (!found)
|
||||
min = lowerBound(segments, matchedColumn, min);
|
||||
const max = upperBound(segments, matchedColumn, min);
|
||||
const result = [];
|
||||
for (; min <= max; min++) {
|
||||
const segment = segments[min];
|
||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
exports.AnyMap = AnyMap;
|
||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
||||
exports.TraceMap = TraceMap;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { TraceMap } from './trace-mapping';
|
||||
import type { SectionedSourceMapInput } from './types';
|
||||
declare type AnyMap = {
|
||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
||||
};
|
||||
export declare const AnyMap: AnyMap;
|
||||
export {};
|
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
||||
export declare type MemoState = {
|
||||
lastKey: number;
|
||||
lastNeedle: number;
|
||||
lastIndex: number;
|
||||
};
|
||||
export declare let found: boolean;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
||||
export declare function memoizedState(): MemoState;
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
||||
import type { MemoState } from './binary-search';
|
||||
export declare type Source = {
|
||||
__proto__: null;
|
||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
||||
};
|
||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default function resolve(input: string, base: string | undefined): string;
|
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
declare type GeneratedColumn = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type SourceLine = number;
|
||||
declare type SourceColumn = number;
|
||||
declare type NamesIndex = number;
|
||||
declare type GeneratedLine = number;
|
||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export declare const REV_GENERATED_LINE = 1;
|
||||
export declare const REV_GENERATED_COLUMN = 2;
|
||||
export {};
|
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
export default function stripFilename(path: string | undefined | null): string;
|
74
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
74
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
||||
export type { SourceMapSegment } from './sourcemap-segment';
|
||||
export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types';
|
||||
export declare const LEAST_UPPER_BOUND = -1;
|
||||
export declare const GREATEST_LOWER_BOUND = 1;
|
||||
/**
|
||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;
|
||||
/**
|
||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
||||
*/
|
||||
export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null;
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping;
|
||||
/**
|
||||
* Finds the generated line/column position of the provided source/line/column source position.
|
||||
*/
|
||||
export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping;
|
||||
/**
|
||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
||||
*/
|
||||
export declare let allGeneratedPositionsFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping[];
|
||||
/**
|
||||
* Iterates each mapping in generated position order.
|
||||
*/
|
||||
export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;
|
||||
/**
|
||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
||||
*/
|
||||
export declare let sourceContentFor: (map: TraceMap, source: string) => string | null;
|
||||
/**
|
||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
||||
* maps.
|
||||
*/
|
||||
export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
};
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare let encodedMap: (map: TraceMap) => EncodedSourceMap;
|
||||
export { AnyMap } from './any-map';
|
||||
export declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
92
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
92
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
export interface Section {
|
||||
offset: {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
||||
}
|
||||
export interface SectionedSourceMap {
|
||||
file?: string | null;
|
||||
sections: Section[];
|
||||
version: 3;
|
||||
}
|
||||
export declare type OriginalMapping = {
|
||||
source: string | null;
|
||||
line: number;
|
||||
column: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare type InvalidOriginalMapping = {
|
||||
source: null;
|
||||
line: null;
|
||||
column: null;
|
||||
name: null;
|
||||
};
|
||||
export declare type GeneratedMapping = {
|
||||
line: number;
|
||||
column: number;
|
||||
};
|
||||
export declare type InvalidGeneratedMapping = {
|
||||
line: null;
|
||||
column: null;
|
||||
};
|
||||
export declare type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
||||
export declare type SourceMapInput = string | Ro<EncodedSourceMap> | Ro<DecodedSourceMap> | TraceMap;
|
||||
export declare type SectionedSourceMapInput = SourceMapInput | Ro<SectionedSourceMap>;
|
||||
export declare type Needle = {
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export declare type SourceNeedle = {
|
||||
source: string;
|
||||
line: number;
|
||||
column: number;
|
||||
bias?: Bias;
|
||||
};
|
||||
export declare type EachMapping = {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: null;
|
||||
originalLine: null;
|
||||
originalColumn: null;
|
||||
name: null;
|
||||
} | {
|
||||
generatedLine: number;
|
||||
generatedColumn: number;
|
||||
source: string | null;
|
||||
originalLine: number;
|
||||
originalColumn: number;
|
||||
name: string | null;
|
||||
};
|
||||
export declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
}
|
||||
export declare type Ro<T> = T extends Array<infer V> ? V[] | Readonly<V[]> | RoArray<V> | Readonly<RoArray<V>> : T extends object ? T | Readonly<T> | RoObject<T> | Readonly<RoObject<T>> : T;
|
||||
declare type RoArray<T> = Ro<T>[];
|
||||
declare type RoObject<T> = {
|
||||
[K in keyof T]: T[K] | Ro<T[K]>;
|
||||
};
|
||||
export {};
|
21
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Rich Harris
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
200
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
200
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
# sourcemap-codec
|
||||
|
||||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||
|
||||
|
||||
## Why?
|
||||
|
||||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||
|
||||
This package makes the process slightly easier.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install sourcemap-codec
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { encode, decode } from 'sourcemap-codec';
|
||||
|
||||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
|
||||
assert.deepEqual( decoded, [
|
||||
// the first line (of the generated code) has no mappings,
|
||||
// as shown by the starting semi-colon (which separates lines)
|
||||
[],
|
||||
|
||||
// the second line contains four (comma-separated) segments
|
||||
[
|
||||
// segments are encoded as you'd expect:
|
||||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||
|
||||
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||
// name in the `map.names` array
|
||||
[ 2, 0, 2, 2, 0 ],
|
||||
|
||||
// the remaining segments are 4-length rather than 5-length,
|
||||
// because they don't map a name
|
||||
[ 4, 0, 2, 4 ],
|
||||
[ 6, 0, 2, 5 ],
|
||||
[ 7, 0, 2, 7 ]
|
||||
],
|
||||
|
||||
// the final line contains two segments
|
||||
[
|
||||
[ 2, 1, 10, 19 ],
|
||||
[ 12, 1, 11, 20 ]
|
||||
]
|
||||
]);
|
||||
|
||||
var encoded = encode( decoded );
|
||||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 5479160 bytes
|
||||
sourcemap-codec 5659336 bytes
|
||||
source-map-0.6.1 17144440 bytes
|
||||
source-map-0.8.0 6867424 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled)
|
||||
decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled)
|
||||
decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled)
|
||||
decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 1261620 bytes
|
||||
sourcemap-codec 9119248 bytes
|
||||
source-map-0.6.1 8968560 bytes
|
||||
source-map-0.8.0 8952952 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled)
|
||||
encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled)
|
||||
encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled)
|
||||
encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 35338184 bytes
|
||||
sourcemap-codec 35922736 bytes
|
||||
source-map-0.6.1 62366360 bytes
|
||||
source-map-0.8.0 44337416 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled)
|
||||
decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled)
|
||||
decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled)
|
||||
decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 7212604 bytes
|
||||
sourcemap-codec 21421456 bytes
|
||||
source-map-0.6.1 25286888 bytes
|
||||
source-map-0.8.0 25498744 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled)
|
||||
encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled)
|
||||
encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled)
|
||||
encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 500272 bytes
|
||||
sourcemap-codec 516864 bytes
|
||||
source-map-0.6.1 1596672 bytes
|
||||
source-map-0.8.0 517272 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled)
|
||||
decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled)
|
||||
decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled)
|
||||
decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 321026 bytes
|
||||
sourcemap-codec 830832 bytes
|
||||
source-map-0.6.1 586608 bytes
|
||||
source-map-0.8.0 586680 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled)
|
||||
encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled)
|
||||
encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled)
|
||||
encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 734848 bytes
|
||||
sourcemap-codec 954200 bytes
|
||||
source-map-0.6.1 2276432 bytes
|
||||
source-map-0.8.0 955488 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled)
|
||||
decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled)
|
||||
decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled)
|
||||
decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec
|
||||
|
||||
Encode Memory Usage:
|
||||
@jridgewell/sourcemap-codec 638672 bytes
|
||||
sourcemap-codec 1109840 bytes
|
||||
source-map-0.6.1 1321224 bytes
|
||||
source-map-0.8.0 1324448 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec
|
||||
|
||||
Encode speed:
|
||||
encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled)
|
||||
encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled)
|
||||
encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled)
|
||||
encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
164
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
164
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
function decode(mappings) {
|
||||
const state = new Int32Array(5);
|
||||
const decoded = [];
|
||||
let index = 0;
|
||||
do {
|
||||
const semi = indexOf(mappings, index);
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
state[0] = 0;
|
||||
for (let i = index; i < semi; i++) {
|
||||
let seg;
|
||||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||
const col = state[0];
|
||||
if (col < lastCol)
|
||||
sorted = false;
|
||||
lastCol = col;
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||
seg = [col, state[1], state[2], state[3], state[4]];
|
||||
}
|
||||
else {
|
||||
seg = [col, state[1], state[2], state[3]];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [col];
|
||||
}
|
||||
line.push(seg);
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
index = semi + 1;
|
||||
} while (index <= mappings.length);
|
||||
return decoded;
|
||||
}
|
||||
function indexOf(mappings, index) {
|
||||
const idx = mappings.indexOf(';', index);
|
||||
return idx === -1 ? mappings.length : idx;
|
||||
}
|
||||
function decodeInteger(mappings, pos, state, j) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = mappings.charCodeAt(pos++);
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
state[j] += value;
|
||||
return pos;
|
||||
}
|
||||
function hasMoreVlq(mappings, i, length) {
|
||||
if (i >= length)
|
||||
return false;
|
||||
return mappings.charCodeAt(i) !== comma;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const state = new Int32Array(5);
|
||||
const bufLength = 1024 * 16;
|
||||
const subLength = bufLength - 36;
|
||||
const buf = new Uint8Array(bufLength);
|
||||
const sub = buf.subarray(0, subLength);
|
||||
let pos = 0;
|
||||
let out = '';
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0) {
|
||||
if (pos === bufLength) {
|
||||
out += td.decode(buf);
|
||||
pos = 0;
|
||||
}
|
||||
buf[pos++] = semicolon;
|
||||
}
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
state[0] = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||
// may push a comma.
|
||||
if (pos > subLength) {
|
||||
out += td.decode(sub);
|
||||
buf.copyWithin(0, subLength, pos);
|
||||
pos -= subLength;
|
||||
}
|
||||
if (j > 0)
|
||||
buf[pos++] = comma;
|
||||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||
}
|
||||
}
|
||||
return out + td.decode(buf.subarray(0, pos));
|
||||
}
|
||||
function encodeInteger(buf, pos, state, segment, j) {
|
||||
const next = segment[j];
|
||||
let num = next - state[j];
|
||||
state[j] = next;
|
||||
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||
do {
|
||||
let clamped = num & 0b011111;
|
||||
num >>>= 5;
|
||||
if (num > 0)
|
||||
clamped |= 0b100000;
|
||||
buf[pos++] = intToChar[clamped];
|
||||
} while (num > 0);
|
||||
return pos;
|
||||
}
|
||||
|
||||
export { decode, encode };
|
||||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
1
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
175
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
175
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
function decode(mappings) {
|
||||
const state = new Int32Array(5);
|
||||
const decoded = [];
|
||||
let index = 0;
|
||||
do {
|
||||
const semi = indexOf(mappings, index);
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
state[0] = 0;
|
||||
for (let i = index; i < semi; i++) {
|
||||
let seg;
|
||||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||
const col = state[0];
|
||||
if (col < lastCol)
|
||||
sorted = false;
|
||||
lastCol = col;
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||
seg = [col, state[1], state[2], state[3], state[4]];
|
||||
}
|
||||
else {
|
||||
seg = [col, state[1], state[2], state[3]];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [col];
|
||||
}
|
||||
line.push(seg);
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
index = semi + 1;
|
||||
} while (index <= mappings.length);
|
||||
return decoded;
|
||||
}
|
||||
function indexOf(mappings, index) {
|
||||
const idx = mappings.indexOf(';', index);
|
||||
return idx === -1 ? mappings.length : idx;
|
||||
}
|
||||
function decodeInteger(mappings, pos, state, j) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = mappings.charCodeAt(pos++);
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
state[j] += value;
|
||||
return pos;
|
||||
}
|
||||
function hasMoreVlq(mappings, i, length) {
|
||||
if (i >= length)
|
||||
return false;
|
||||
return mappings.charCodeAt(i) !== comma;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const state = new Int32Array(5);
|
||||
const bufLength = 1024 * 16;
|
||||
const subLength = bufLength - 36;
|
||||
const buf = new Uint8Array(bufLength);
|
||||
const sub = buf.subarray(0, subLength);
|
||||
let pos = 0;
|
||||
let out = '';
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0) {
|
||||
if (pos === bufLength) {
|
||||
out += td.decode(buf);
|
||||
pos = 0;
|
||||
}
|
||||
buf[pos++] = semicolon;
|
||||
}
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
state[0] = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||
// may push a comma.
|
||||
if (pos > subLength) {
|
||||
out += td.decode(sub);
|
||||
buf.copyWithin(0, subLength, pos);
|
||||
pos -= subLength;
|
||||
}
|
||||
if (j > 0)
|
||||
buf[pos++] = comma;
|
||||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||
}
|
||||
}
|
||||
return out + td.decode(buf.subarray(0, pos));
|
||||
}
|
||||
function encodeInteger(buf, pos, state, segment, j) {
|
||||
const next = segment[j];
|
||||
let num = next - state[j];
|
||||
state[j] = next;
|
||||
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||
do {
|
||||
let clamped = num & 0b011111;
|
||||
num >>>= 5;
|
||||
if (num > 0)
|
||||
clamped |= 0b100000;
|
||||
buf[pos++] = intToChar[clamped];
|
||||
} while (num > 0);
|
||||
return pos;
|
||||
}
|
||||
|
||||
exports.decode = decode;
|
||||
exports.encode = encode;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
1
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||
export declare type SourceMapLine = SourceMapSegment[];
|
||||
export declare type SourceMapMappings = SourceMapLine[];
|
||||
export declare function decode(mappings: string): SourceMapMappings;
|
||||
export declare function encode(decoded: SourceMapMappings): string;
|
||||
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
75
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
75
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"name": "@jridgewell/sourcemap-codec",
|
||||
"version": "1.4.14",
|
||||
"description": "Encode/decode sourcemap mappings",
|
||||
"keywords": [
|
||||
"sourcemap",
|
||||
"vlq"
|
||||
],
|
||||
"main": "dist/sourcemap-codec.umd.js",
|
||||
"module": "dist/sourcemap-codec.mjs",
|
||||
"typings": "dist/types/sourcemap-codec.d.ts",
|
||||
"files": [
|
||||
"dist",
|
||||
"src"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||
"browser": "./dist/sourcemap-codec.umd.js",
|
||||
"import": "./dist/sourcemap-codec.mjs",
|
||||
"require": "./dist/sourcemap-codec.umd.js"
|
||||
},
|
||||
"./dist/sourcemap-codec.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"pretest": "run-s build:rollup",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||
},
|
||||
"author": "Rich Harris",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/node": "17.0.15",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"source-map": "0.6.1",
|
||||
"source-map-js": "1.0.2",
|
||||
"sourcemap-codec": "1.4.8",
|
||||
"typescript": "4.5.4"
|
||||
}
|
||||
}
|
198
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/src/sourcemap-codec.ts
generated
vendored
Normal file
198
node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/src/sourcemap-codec.ts
generated
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
export type SourceMapSegment =
|
||||
| [number]
|
||||
| [number, number, number, number]
|
||||
| [number, number, number, number, number];
|
||||
export type SourceMapLine = SourceMapSegment[];
|
||||
export type SourceMapMappings = SourceMapLine[];
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
|
||||
// Provide a fallback for older environments.
|
||||
const td =
|
||||
typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf: Uint8Array) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf: Uint8Array) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
|
||||
export function decode(mappings: string): SourceMapMappings {
|
||||
const state: [number, number, number, number, number] = new Int32Array(5) as any;
|
||||
const decoded: SourceMapMappings = [];
|
||||
|
||||
let index = 0;
|
||||
do {
|
||||
const semi = indexOf(mappings, index);
|
||||
const line: SourceMapLine = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
state[0] = 0;
|
||||
|
||||
for (let i = index; i < semi; i++) {
|
||||
let seg: SourceMapSegment;
|
||||
|
||||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
||||
const col = state[0];
|
||||
if (col < lastCol) sorted = false;
|
||||
lastCol = col;
|
||||
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
||||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
||||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
||||
|
||||
if (hasMoreVlq(mappings, i, semi)) {
|
||||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
||||
seg = [col, state[1], state[2], state[3], state[4]];
|
||||
} else {
|
||||
seg = [col, state[1], state[2], state[3]];
|
||||
}
|
||||
} else {
|
||||
seg = [col];
|
||||
}
|
||||
|
||||
line.push(seg);
|
||||
}
|
||||
|
||||
if (!sorted) sort(line);
|
||||
decoded.push(line);
|
||||
index = semi + 1;
|
||||
} while (index <= mappings.length);
|
||||
|
||||
return decoded;
|
||||
}
|
||||
|
||||
function indexOf(mappings: string, index: number): number {
|
||||
const idx = mappings.indexOf(';', index);
|
||||
return idx === -1 ? mappings.length : idx;
|
||||
}
|
||||
|
||||
function decodeInteger(mappings: string, pos: number, state: SourceMapSegment, j: number): number {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
|
||||
do {
|
||||
const c = mappings.charCodeAt(pos++);
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
|
||||
state[j] += value;
|
||||
return pos;
|
||||
}
|
||||
|
||||
function hasMoreVlq(mappings: string, i: number, length: number): boolean {
|
||||
if (i >= length) return false;
|
||||
return mappings.charCodeAt(i) !== comma;
|
||||
}
|
||||
|
||||
function sort(line: SourceMapSegment[]) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
|
||||
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
|
||||
export function encode(decoded: SourceMapMappings): string;
|
||||
export function encode(decoded: Readonly<SourceMapMappings>): string;
|
||||
export function encode(decoded: Readonly<SourceMapMappings>): string {
|
||||
const state: [number, number, number, number, number] = new Int32Array(5) as any;
|
||||
const bufLength = 1024 * 16;
|
||||
const subLength = bufLength - 36;
|
||||
const buf = new Uint8Array(bufLength);
|
||||
const sub = buf.subarray(0, subLength);
|
||||
let pos = 0;
|
||||
let out = '';
|
||||
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0) {
|
||||
if (pos === bufLength) {
|
||||
out += td.decode(buf);
|
||||
pos = 0;
|
||||
}
|
||||
buf[pos++] = semicolon;
|
||||
}
|
||||
if (line.length === 0) continue;
|
||||
|
||||
state[0] = 0;
|
||||
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
||||
// may push a comma.
|
||||
if (pos > subLength) {
|
||||
out += td.decode(sub);
|
||||
buf.copyWithin(0, subLength, pos);
|
||||
pos -= subLength;
|
||||
}
|
||||
if (j > 0) buf[pos++] = comma;
|
||||
|
||||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
||||
|
||||
if (segment.length === 1) continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
||||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
||||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
||||
|
||||
if (segment.length === 4) continue;
|
||||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
||||
}
|
||||
}
|
||||
|
||||
return out + td.decode(buf.subarray(0, pos));
|
||||
}
|
||||
|
||||
function encodeInteger(
|
||||
buf: Uint8Array,
|
||||
pos: number,
|
||||
state: SourceMapSegment,
|
||||
segment: SourceMapSegment,
|
||||
j: number,
|
||||
): number {
|
||||
const next = segment[j];
|
||||
let num = next - state[j];
|
||||
state[j] = next;
|
||||
|
||||
num = num < 0 ? (-num << 1) | 1 : num << 1;
|
||||
do {
|
||||
let clamped = num & 0b011111;
|
||||
num >>>= 5;
|
||||
if (num > 0) clamped |= 0b100000;
|
||||
buf[pos++] = intToChar[clamped];
|
||||
} while (num > 0);
|
||||
|
||||
return pos;
|
||||
}
|
75
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
75
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"name": "@jridgewell/trace-mapping",
|
||||
"version": "0.3.18",
|
||||
"description": "Trace the original position through a source map",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/trace-mapping.umd.js",
|
||||
"module": "dist/trace-mapping.mjs",
|
||||
"types": "dist/types/trace-mapping.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/trace-mapping.d.ts",
|
||||
"browser": "./dist/trace-mapping.umd.js",
|
||||
"require": "./dist/trace-mapping.umd.js",
|
||||
"import": "./dist/trace-mapping.mjs"
|
||||
},
|
||||
"./dist/trace-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "ava debug",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "c8 ava",
|
||||
"test:watch": "ava --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.5.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.39.0",
|
||||
"@typescript-eslint/parser": "5.39.0",
|
||||
"ava": "4.3.3",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.12.0",
|
||||
"esbuild": "0.15.10",
|
||||
"eslint": "8.25.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-no-only-tests": "3.0.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.7.1",
|
||||
"rollup": "2.79.1",
|
||||
"tsx": "3.10.1",
|
||||
"typescript": "4.8.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "1.4.14"
|
||||
}
|
||||
}
|
507
node_modules/@mapbox/node-pre-gyp/CHANGELOG.md
generated
vendored
Normal file
507
node_modules/@mapbox/node-pre-gyp/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,507 @@
|
||||
# node-pre-gyp changelog
|
||||
|
||||
## 1.0.10
|
||||
- Upgraded minimist to 1.2.6 to address dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906)
|
||||
|
||||
## 1.0.9
|
||||
- Upgraded node-fetch to 2.6.7 to address [CVE-2022-0235](https://www.cve.org/CVERecord?id=CVE-2022-0235)
|
||||
- Upgraded detect-libc to 2.0.0 to use non-blocking NodeJS(>=12) Report API
|
||||
|
||||
## 1.0.8
|
||||
- Downgraded npmlog to maintain node v10 and v8 support (https://github.com/mapbox/node-pre-gyp/pull/624)
|
||||
|
||||
## 1.0.7
|
||||
- Upgraded nyc and npmlog to address https://github.com/advisories/GHSA-93q8-gq69-wqmw
|
||||
|
||||
## 1.0.6
|
||||
- Added node v17 to the internal node releases listing
|
||||
- Upgraded various dependencies declared in package.json to latest major versions (node-fetch from 2.6.1 to 2.6.5, npmlog from 4.1.2 to 5.01, semver from 7.3.4 to 7.3.5, and tar from 6.1.0 to 6.1.11)
|
||||
- Fixed bug in `staging_host` parameter (https://github.com/mapbox/node-pre-gyp/pull/590)
|
||||
|
||||
|
||||
## 1.0.5
|
||||
- Fix circular reference warning with node >= v14
|
||||
|
||||
## 1.0.4
|
||||
- Added node v16 to the internal node releases listing
|
||||
|
||||
## 1.0.3
|
||||
- Improved support configuring s3 uploads (solves https://github.com/mapbox/node-pre-gyp/issues/571)
|
||||
- New options added in https://github.com/mapbox/node-pre-gyp/pull/576: 'bucket', 'region', and `s3ForcePathStyle`
|
||||
|
||||
## 1.0.2
|
||||
- Fixed regression in proxy support (https://github.com/mapbox/node-pre-gyp/issues/572)
|
||||
|
||||
## 1.0.1
|
||||
- Switched from mkdirp@1.0.4 to make-dir@3.1.0 to avoid this bug: https://github.com/isaacs/node-mkdirp/issues/31
|
||||
|
||||
## 1.0.0
|
||||
- Module is now name-spaced at `@mapbox/node-pre-gyp` and the original `node-pre-gyp` is deprecated.
|
||||
- New: support for staging and production s3 targets (see README.md)
|
||||
- BREAKING: no longer supporting `node_pre_gyp_accessKeyId` & `node_pre_gyp_secretAccessKey`, use `AWS_ACCESS_KEY_ID` & `AWS_SECRET_ACCESS_KEY` instead to authenticate against s3 for `info`, `publish`, and `unpublish` commands.
|
||||
- Dropped node v6 support, added node v14 support
|
||||
- Switched tests to use mapbox-owned bucket for testing
|
||||
- Added coverage tracking and linting with eslint
|
||||
- Added back support for symlinks inside the tarball
|
||||
- Upgraded all test apps to N-API/node-addon-api
|
||||
- New: support for staging and production s3 targets (see README.md)
|
||||
- Added `node_pre_gyp_s3_host` env var which has priority over the `--s3_host` option or default.
|
||||
- Replaced needle with node-fetch
|
||||
- Added proxy support for node-fetch
|
||||
- Upgraded to mkdirp@1.x
|
||||
|
||||
## 0.17.0
|
||||
- Got travis + appveyor green again
|
||||
- Added support for more node versions
|
||||
|
||||
## 0.16.0
|
||||
|
||||
- Added Node 15 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/520)
|
||||
|
||||
## 0.15.0
|
||||
|
||||
- Bump dependency on `mkdirp` from `^0.5.1` to `^0.5.3` (https://github.com/mapbox/node-pre-gyp/pull/492)
|
||||
- Bump dependency on `needle` from `^2.2.1` to `^2.5.0` (https://github.com/mapbox/node-pre-gyp/pull/502)
|
||||
- Added Node 14 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/501)
|
||||
|
||||
## 0.14.0
|
||||
|
||||
- Defer modules requires in napi.js (https://github.com/mapbox/node-pre-gyp/pull/434)
|
||||
- Bump dependency on `tar` from `^4` to `^4.4.2` (https://github.com/mapbox/node-pre-gyp/pull/454)
|
||||
- Support extracting compiled binary from local offline mirror (https://github.com/mapbox/node-pre-gyp/pull/459)
|
||||
- Added Node 13 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/483)
|
||||
|
||||
## 0.13.0
|
||||
|
||||
- Added Node 12 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/449)
|
||||
|
||||
## 0.12.0
|
||||
|
||||
- Fixed double-build problem with node v10 (https://github.com/mapbox/node-pre-gyp/pull/428)
|
||||
- Added node 11 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/422)
|
||||
|
||||
## 0.11.0
|
||||
|
||||
- Fixed double-install problem with node v10
|
||||
- Significant N-API improvements (https://github.com/mapbox/node-pre-gyp/pull/405)
|
||||
|
||||
## 0.10.3
|
||||
|
||||
- Now will use `request` over `needle` if request is installed. By default `needle` is used for `https`. This should unbreak proxy support that regressed in v0.9.0
|
||||
|
||||
## 0.10.2
|
||||
|
||||
- Fixed rc/deep-extent security vulnerability
|
||||
- Fixed broken reinstall script do to incorrectly named get_best_napi_version
|
||||
|
||||
## 0.10.1
|
||||
|
||||
- Fix needle error event (@medns)
|
||||
|
||||
## 0.10.0
|
||||
|
||||
- Allow for a single-level module path when packing @allenluce (https://github.com/mapbox/node-pre-gyp/pull/371)
|
||||
- Log warnings instead of errors when falling back @xzyfer (https://github.com/mapbox/node-pre-gyp/pull/366)
|
||||
- Add Node.js v10 support to tests (https://github.com/mapbox/node-pre-gyp/pull/372)
|
||||
- Remove retire.js from CI (https://github.com/mapbox/node-pre-gyp/pull/372)
|
||||
- Remove support for Node.js v4 due to [EOL on April 30th, 2018](https://github.com/nodejs/Release/blob/7dd52354049cae99eed0e9fe01345b0722a86fde/schedule.json#L14)
|
||||
- Update appveyor tests to install default NPM version instead of NPM v2.x for all Windows builds (https://github.com/mapbox/node-pre-gyp/pull/375)
|
||||
|
||||
## 0.9.1
|
||||
|
||||
- Fixed regression (in v0.9.0) with support for http redirects @allenluce (https://github.com/mapbox/node-pre-gyp/pull/361)
|
||||
|
||||
## 0.9.0
|
||||
|
||||
- Switched from using `request` to `needle` to reduce size of module deps (https://github.com/mapbox/node-pre-gyp/pull/350)
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- N-API support (@inspiredware)
|
||||
|
||||
## 0.7.1
|
||||
|
||||
- Upgraded to tar v4.x
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- Updated request and hawk (#347)
|
||||
- Dropped node v0.10.x support
|
||||
|
||||
## 0.6.40
|
||||
|
||||
- Improved error reporting if an install fails
|
||||
|
||||
## 0.6.39
|
||||
|
||||
- Support for node v9
|
||||
- Support for versioning on `{libc}` to allow binaries to work on non-glic linux systems like alpine linux
|
||||
|
||||
|
||||
## 0.6.38
|
||||
|
||||
- Maintaining compatibility (for v0.6.x series) with node v0.10.x
|
||||
|
||||
## 0.6.37
|
||||
|
||||
- Solved one part of #276: now now deduce the node ABI from the major version for node >= 2 even when not stored in the abi_crosswalk.json
|
||||
- Fixed docs to avoid mentioning the deprecated and dangerous `prepublish` in package.json (#291)
|
||||
- Add new node versions to crosswalk
|
||||
- Ported tests to use tape instead of mocha
|
||||
- Got appveyor tests passing by downgrading npm and node-gyp
|
||||
|
||||
## 0.6.36
|
||||
|
||||
- Removed the running of `testbinary` during install. Because this was regressed for so long, it is too dangerous to re-enable by default. Developers needing validation can call `node-pre-gyp testbinary` directory.
|
||||
- Fixed regression in v0.6.35 for electron installs (now skipping binary validation which is not yet supported for electron)
|
||||
|
||||
## 0.6.35
|
||||
|
||||
- No longer recommending `npm ls` in `prepublish` (#291)
|
||||
- Fixed testbinary command (#283) @szdavid92
|
||||
|
||||
## 0.6.34
|
||||
|
||||
- Added new node versions to crosswalk, including v8
|
||||
- Upgraded deps to latest versions, started using `^` instead of `~` for all deps.
|
||||
|
||||
## 0.6.33
|
||||
|
||||
- Improved support for yarn
|
||||
|
||||
## 0.6.32
|
||||
|
||||
- Honor npm configuration for CA bundles (@heikkipora)
|
||||
- Add node-pre-gyp and npm versions to user agent (@addaleax)
|
||||
- Updated various deps
|
||||
- Add known node version for v7.x
|
||||
|
||||
## 0.6.31
|
||||
|
||||
- Updated various deps
|
||||
|
||||
## 0.6.30
|
||||
|
||||
- Update to npmlog@4.x and semver@5.3.x
|
||||
- Add known node version for v6.5.0
|
||||
|
||||
## 0.6.29
|
||||
|
||||
- Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0
|
||||
|
||||
## 0.6.28
|
||||
|
||||
- Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default
|
||||
and users need to know why builds are falling back to source compiles that might then error out.
|
||||
|
||||
## 0.6.27
|
||||
|
||||
- Add known node version for node v6
|
||||
- Stopped bundling dependencies
|
||||
- Documented method for module authors to avoid bundling node-pre-gyp
|
||||
- See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details
|
||||
|
||||
## 0.6.26
|
||||
|
||||
- Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg
|
||||
|
||||
## 0.6.25
|
||||
|
||||
- Improved support for auto-detection of electron runtime in `node-pre-gyp.find()`
|
||||
- Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187
|
||||
- Add known node version for 4.4.1 and 5.9.1
|
||||
|
||||
## 0.6.24
|
||||
|
||||
- Add known node version for 5.8.0, 5.9.0, and 4.4.0.
|
||||
|
||||
## 0.6.23
|
||||
|
||||
- Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1.
|
||||
|
||||
## 0.6.22
|
||||
|
||||
- Add known node version for 4.3.1, and 5.7.0.
|
||||
|
||||
## 0.6.21
|
||||
|
||||
- Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0.
|
||||
|
||||
## 0.6.20
|
||||
|
||||
- Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0.
|
||||
|
||||
## 0.6.19
|
||||
|
||||
- Add known node version for 4.2.4
|
||||
|
||||
## 0.6.18
|
||||
|
||||
- Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x
|
||||
|
||||
## 0.6.17
|
||||
|
||||
- Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'`
|
||||
|
||||
## 0.6.16
|
||||
|
||||
- Added known version in crosswalk for 5.1.0.
|
||||
|
||||
## 0.6.15
|
||||
|
||||
- Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182)
|
||||
- Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170)
|
||||
- Added known version in crosswalk for 4.2.2.
|
||||
|
||||
## 0.6.14
|
||||
|
||||
- Added node 5.x version
|
||||
|
||||
## 0.6.13
|
||||
|
||||
- Added more known node 4.x versions
|
||||
|
||||
## 0.6.12
|
||||
|
||||
- Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz
|
||||
|
||||
## 0.6.11
|
||||
|
||||
- Added known node and io.js versions including more 3.x and 4.x versions
|
||||
|
||||
## 0.6.10
|
||||
|
||||
- Added known node and io.js versions including 3.x and 4.x versions
|
||||
- Upgraded `tar` dep
|
||||
|
||||
## 0.6.9
|
||||
|
||||
- Upgraded `rc` dep
|
||||
- Updated known io.js version: v2.4.0
|
||||
|
||||
## 0.6.8
|
||||
|
||||
- Upgraded `semver` and `rimraf` deps
|
||||
- Updated known node and io.js versions
|
||||
|
||||
## 0.6.7
|
||||
|
||||
- Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94)
|
||||
|
||||
## 0.6.6
|
||||
|
||||
- Updated with known io.js 2.0.0 version
|
||||
|
||||
## 0.6.5
|
||||
|
||||
- Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887)
|
||||
- Updated to semver@4.3.2
|
||||
- Updated known node v0.12.x versions and io.js 1.x versions.
|
||||
|
||||
## 0.6.4
|
||||
|
||||
- Improved support for `io.js` (@fengmk2)
|
||||
- Test coverage improvements (@mikemorris)
|
||||
- Fixed support for `--dist-url` that regressed in 0.6.3
|
||||
|
||||
## 0.6.3
|
||||
|
||||
- Added support for passing raw options to node-gyp using `--` separator. Flags passed after
|
||||
the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed
|
||||
after the `--` will be passed directly to make/visual studio.
|
||||
- Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly
|
||||
- Fix issue with require validation not working on windows 7 (@edgarsilva)
|
||||
|
||||
## 0.6.2
|
||||
|
||||
- Support for io.js >= v1.0.2
|
||||
- Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`.
|
||||
|
||||
## 0.6.1
|
||||
|
||||
- Fixed bundled `tar` version
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124)
|
||||
- Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`.
|
||||
- Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place.
|
||||
- Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped.
|
||||
- Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version`
|
||||
|
||||
## 0.5.31
|
||||
|
||||
- Added support for deducing node_abi for node.js runtime from previous release if the series is even
|
||||
- Added support for --target=0.10.33
|
||||
|
||||
## 0.5.30
|
||||
|
||||
- Repackaged with latest bundled deps
|
||||
|
||||
## 0.5.29
|
||||
|
||||
- Added support for semver `build`.
|
||||
- Fixed support for downloading from urls that include `+`.
|
||||
|
||||
## 0.5.28
|
||||
|
||||
- Now reporting unix style paths only in reveal command
|
||||
|
||||
## 0.5.27
|
||||
|
||||
- Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo
|
||||
- Fixed support for installing when path contains a `'` - @halfdan
|
||||
- Ported tests to mocha
|
||||
|
||||
## 0.5.26
|
||||
|
||||
- Fix node-webkit support when `--target` option is not provided
|
||||
|
||||
## 0.5.25
|
||||
|
||||
- Fix bundling of deps
|
||||
|
||||
## 0.5.24
|
||||
|
||||
- Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31
|
||||
|
||||
## 0.5.23
|
||||
|
||||
- Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value
|
||||
- Added support for `--silent` (shortcut for `--loglevel=silent`)
|
||||
|
||||
## 0.5.22
|
||||
|
||||
- Fixed node-webkit versioning name (NOTE: node-webkit support still experimental)
|
||||
|
||||
## 0.5.21
|
||||
|
||||
- New package to fix `shasum check failed` error with v0.5.20
|
||||
|
||||
## 0.5.20
|
||||
|
||||
- Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90)
|
||||
|
||||
## 0.5.19
|
||||
|
||||
- Updated to know about more node-webkit releases
|
||||
|
||||
## 0.5.18
|
||||
|
||||
- Updated to know about more node-webkit releases
|
||||
|
||||
## 0.5.17
|
||||
|
||||
- Updated to know about node v0.10.29 release
|
||||
|
||||
## 0.5.16
|
||||
|
||||
- Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86)
|
||||
|
||||
## 0.5.15
|
||||
|
||||
- Fixed installation of windows packages sub directories on unix systems (#84)
|
||||
|
||||
## 0.5.14
|
||||
|
||||
- Finished support for cross building using `--target_platform` option (#82)
|
||||
- Now skipping binary validation on install if target arch/platform do not match the host.
|
||||
- Removed multi-arch validing for OS X since it required a FAT node.js binary
|
||||
|
||||
## 0.5.13
|
||||
|
||||
- Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled.
|
||||
|
||||
## 0.5.12
|
||||
|
||||
- Improved support for node-webkit (@Mithgol)
|
||||
|
||||
## 0.5.11
|
||||
|
||||
- Updated target versions listing
|
||||
|
||||
## 0.5.10
|
||||
|
||||
- Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72)
|
||||
- Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary
|
||||
- Failed install due to `testbinary` check failure no longer leaves behind binary (#70)
|
||||
|
||||
## 0.5.9
|
||||
|
||||
- Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60)
|
||||
|
||||
## 0.5.8
|
||||
|
||||
- Started bundling deps
|
||||
|
||||
## 0.5.7
|
||||
|
||||
- Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63)
|
||||
- Exposed the internal `testbinary` command in node-pre-gyp command line tool
|
||||
- Fixed minor bug so that `fallback_to_build` option is always respected
|
||||
|
||||
## 0.5.6
|
||||
|
||||
- Added support for versioning on the `name` value in `package.json` (#57).
|
||||
- Moved to using streams for reading tarball when publishing (#52)
|
||||
|
||||
## 0.5.5
|
||||
|
||||
- Improved binary validation that also now works with node-webkit (@Mithgol)
|
||||
- Upgraded test apps to work with node v0.11.x
|
||||
- Improved test coverage
|
||||
|
||||
## 0.5.4
|
||||
|
||||
- No longer depends on external install of node-gyp for compiling builds.
|
||||
|
||||
## 0.5.3
|
||||
|
||||
- Reverted fix for debian/nodejs since it broke windows (#45)
|
||||
|
||||
## 0.5.2
|
||||
|
||||
- Support for debian systems where the node binary is named `nodejs` (#45)
|
||||
- Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed)
|
||||
- Updated abi-crosswalk with node v0.10.26 entry.
|
||||
|
||||
## 0.5.1
|
||||
|
||||
- Various minor bug fixes, several improving windows support for publishing.
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`.
|
||||
- Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18).
|
||||
- Added `remote_path` which also supports versioning.
|
||||
- Changed `remote_uri` to `host`.
|
||||
|
||||
## 0.4.2
|
||||
|
||||
- Added support for `--target` flag to request cross-compile against a specific node/node-webkit version.
|
||||
- Added preliminary support for node-webkit
|
||||
- Fixed support for `--target_arch` option being respected in all cases.
|
||||
|
||||
## 0.4.1
|
||||
|
||||
- Fixed exception when only stderr is available in binary test (@bendi / #31)
|
||||
|
||||
## 0.4.0
|
||||
|
||||
- Enforce only `https:` based remote publishing access.
|
||||
- Added `node-pre-gyp info` command to display listing of published binaries
|
||||
- Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option.
|
||||
- Added support for S3 prefixes.
|
||||
|
||||
## 0.3.1
|
||||
|
||||
- Added `unpublish` command.
|
||||
- Fixed module path construction in tests.
|
||||
- Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
- Support for packaging all files in `module_path` directory - see `app4` for example
|
||||
- Added `testpackage` command.
|
||||
- Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that.
|
||||
- `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks.
|
27
node_modules/@mapbox/node-pre-gyp/LICENSE
generated
vendored
Normal file
27
node_modules/@mapbox/node-pre-gyp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
Copyright (c), Mapbox
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of node-pre-gyp nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
742
node_modules/@mapbox/node-pre-gyp/README.md
generated
vendored
Normal file
742
node_modules/@mapbox/node-pre-gyp/README.md
generated
vendored
Normal file
@@ -0,0 +1,742 @@
|
||||
# @mapbox/node-pre-gyp
|
||||
|
||||
#### @mapbox/node-pre-gyp makes it easy to publish and install Node.js C++ addons from binaries
|
||||
|
||||
[](https://travis-ci.com/mapbox/node-pre-gyp)
|
||||
[](https://ci.appveyor.com/project/Mapbox/node-pre-gyp)
|
||||
|
||||
`@mapbox/node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment.
|
||||
|
||||
### Special note on previous package
|
||||
|
||||
On Feb 9th, 2021 `@mapbox/node-pre-gyp@1.0.0` was [released](./CHANGELOG.md). Older, unscoped versions that are not part of the `@mapbox` org are deprecated and only `@mapbox/node-pre-gyp` will see updates going forward. To upgrade to the new package do:
|
||||
|
||||
```
|
||||
npm uninstall node-pre-gyp --save
|
||||
npm install @mapbox/node-pre-gyp --save
|
||||
```
|
||||
|
||||
### Features
|
||||
|
||||
- A command line tool called `node-pre-gyp` that can install your package's C++ module from a binary.
|
||||
- A variety of developer targeted commands for packaging, testing, and publishing binaries.
|
||||
- A JavaScript module that can dynamically require your installed binary: `require('@mapbox/node-pre-gyp').find`
|
||||
|
||||
For a hello world example of a module packaged with `node-pre-gyp` see <https://github.com/springmeyer/node-addon-example> and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples.
|
||||
|
||||
## Credits
|
||||
|
||||
- The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate)
|
||||
- Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost).
|
||||
- Development is sponsored by [Mapbox](https://www.mapbox.com/)
|
||||
|
||||
## FAQ
|
||||
|
||||
See the [Frequently Ask Questions](https://github.com/mapbox/node-pre-gyp/wiki/FAQ).
|
||||
|
||||
## Depends
|
||||
|
||||
- Node.js >= node v8.x
|
||||
|
||||
## Install
|
||||
|
||||
`node-pre-gyp` is designed to be installed as a local dependency of your Node.js C++ addon and accessed like:
|
||||
|
||||
./node_modules/.bin/node-pre-gyp --help
|
||||
|
||||
But you can also install it globally:
|
||||
|
||||
npm install @mapbox/node-pre-gyp -g
|
||||
|
||||
## Usage
|
||||
|
||||
### Commands
|
||||
|
||||
View all possible commands:
|
||||
|
||||
node-pre-gyp --help
|
||||
|
||||
- clean - Remove the entire folder containing the compiled .node module
|
||||
- install - Install pre-built binary for module
|
||||
- reinstall - Run "clean" and "install" at once
|
||||
- build - Compile the module by dispatching to node-gyp or nw-gyp
|
||||
- rebuild - Run "clean" and "build" at once
|
||||
- package - Pack binary into tarball
|
||||
- testpackage - Test that the staged package is valid
|
||||
- publish - Publish pre-built binary
|
||||
- unpublish - Unpublish pre-built binary
|
||||
- info - Fetch info on published binaries
|
||||
|
||||
You can also chain commands:
|
||||
|
||||
node-pre-gyp clean build unpublish publish info
|
||||
|
||||
### Options
|
||||
|
||||
Options include:
|
||||
|
||||
- `-C/--directory`: run the command in this directory
|
||||
- `--build-from-source`: build from source instead of using pre-built binary
|
||||
- `--update-binary`: reinstall by replacing previously installed local binary with remote binary
|
||||
- `--runtime=node-webkit`: customize the runtime: `node`, `electron` and `node-webkit` are the valid options
|
||||
- `--fallback-to-build`: fallback to building from source if pre-built binary is not available
|
||||
- `--target=0.4.0`: Pass the target node or node-webkit version to compile against
|
||||
- `--target_arch=ia32`: Pass the target arch and override the host `arch`. Any value that is [supported by Node.js](https://nodejs.org/api/os.html#osarch) is valid.
|
||||
- `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`.
|
||||
|
||||
Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module.
|
||||
|
||||
For example: `npm install --build-from-source=myapp`. This is useful if:
|
||||
|
||||
- `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependency with `npm install`.
|
||||
- The larger app also depends on other modules installed with `node-pre-gyp`
|
||||
- You only want to trigger a source compile for `myapp` and the other modules.
|
||||
|
||||
### Configuring
|
||||
|
||||
This is a guide to configuring your module to use node-pre-gyp.
|
||||
|
||||
#### 1) Add new entries to your `package.json`
|
||||
|
||||
- Add `@mapbox/node-pre-gyp` to `dependencies`
|
||||
- Add `aws-sdk` as a `devDependency`
|
||||
- Add a custom `install` script
|
||||
- Declare a `binary` object
|
||||
|
||||
This looks like:
|
||||
|
||||
```js
|
||||
"dependencies" : {
|
||||
"@mapbox/node-pre-gyp": "1.x"
|
||||
},
|
||||
"devDependencies": {
|
||||
"aws-sdk": "2.x"
|
||||
}
|
||||
"scripts": {
|
||||
"install": "node-pre-gyp install --fallback-to-build"
|
||||
},
|
||||
"binary": {
|
||||
"module_name": "your_module",
|
||||
"module_path": "./lib/binding/",
|
||||
"host": "https://your_module.s3-us-west-1.amazonaws.com"
|
||||
}
|
||||
```
|
||||
|
||||
For a full example see [node-addon-examples's package.json](https://github.com/springmeyer/node-addon-example/blob/master/package.json).
|
||||
|
||||
Let's break this down:
|
||||
|
||||
- Dependencies need to list `node-pre-gyp`
|
||||
- Your devDependencies should list `aws-sdk` so that you can run `node-pre-gyp publish` locally or a CI system. We recommend using `devDependencies` only since `aws-sdk` is large and not needed for `node-pre-gyp install` since it only uses http to fetch binaries
|
||||
- Your `scripts` section should override the `install` target with `"install": "node-pre-gyp install --fallback-to-build"`. This allows node-pre-gyp to be used instead of the default npm behavior of always source compiling with `node-gyp` directly.
|
||||
- Your package.json should contain a `binary` section describing key properties you provide to allow node-pre-gyp to package optimally. They are detailed below.
|
||||
|
||||
Note: in the past we recommended putting `@mapbox/node-pre-gyp` in the `bundledDependencies`, but we no longer recommend this. In the past there were npm bugs (with node versions 0.10.x) that could lead to node-pre-gyp not being available at the right time during install (unless we bundled). This should no longer be the case. Also, for a time we recommended using `"preinstall": "npm install @mapbox/node-pre-gyp"` as an alternative method to avoid needing to bundle. But this did not behave predictably across all npm versions - see https://github.com/mapbox/node-pre-gyp/issues/260 for the details. So we do not recommend using `preinstall` to install `@mapbox/node-pre-gyp`. More history on this at https://github.com/strongloop/fsevents/issues/157#issuecomment-265545908.
|
||||
|
||||
##### The `binary` object has three required properties
|
||||
|
||||
###### module_name
|
||||
|
||||
The name of your native node module. This value must:
|
||||
|
||||
- Match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world)
|
||||
- Must be a valid C variable name (e.g. it cannot contain `-`)
|
||||
- Should not include the `.node` extension.
|
||||
|
||||
###### module_path
|
||||
|
||||
The location your native module is placed after a build. This should be an empty directory without other Javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball.
|
||||
|
||||
Note: This property supports variables based on [Versioning](#versioning).
|
||||
|
||||
###### host
|
||||
|
||||
A url to the remote location where you've published tarball binaries (must be `https` not `http`).
|
||||
|
||||
It is highly recommended that you use Amazon S3. The reasons are:
|
||||
|
||||
- Various node-pre-gyp commands like `publish` and `info` only work with an S3 host.
|
||||
- S3 is a very solid hosting platform for distributing large files.
|
||||
- We provide detail documentation for using [S3 hosting](#s3-hosting) with node-pre-gyp.
|
||||
|
||||
Why then not require S3? Because while some applications using node-pre-gyp need to distribute binaries as large as 20-30 MB, others might have very small binaries and might wish to store them in a GitHub repo. This is not recommended, but if an author really wants to host in a non-S3 location then it should be possible.
|
||||
|
||||
It should also be mentioned that there is an optional and entirely separate npm module called [node-pre-gyp-github](https://github.com/bchr02/node-pre-gyp-github) which is intended to complement node-pre-gyp and be installed along with it. It provides the ability to store and publish your binaries within your repositories GitHub Releases if you would rather not use S3 directly. Installation and usage instructions can be found [here](https://github.com/bchr02/node-pre-gyp-github), but the basic premise is that instead of using the ```node-pre-gyp publish``` command you would use ```node-pre-gyp-github publish```.
|
||||
|
||||
##### The `binary` object other optional S3 properties
|
||||
|
||||
If you are not using a standard s3 path like `bucket_name.s3(.-)region.amazonaws.com`, you might get an error on `publish` because node-pre-gyp extracts the region and bucket from the `host` url. For example, you may have an on-premises s3-compatible storage server, or may have configured a specific dns redirecting to an s3 endpoint. In these cases, you can explicitly set the `region` and `bucket` properties to tell node-pre-gyp to use these values instead of guessing from the `host` property. The following values can be used in the `binary` section:
|
||||
|
||||
###### host
|
||||
|
||||
The url to the remote server root location (must be `https` not `http`).
|
||||
|
||||
###### bucket
|
||||
|
||||
The bucket name where your tarball binaries should be located.
|
||||
|
||||
###### region
|
||||
|
||||
Your S3 server region.
|
||||
|
||||
###### s3ForcePathStyle
|
||||
|
||||
Set `s3ForcePathStyle` to true if the endpoint url should not be prefixed with the bucket name. If false (default), the server endpoint would be constructed as `bucket_name.your_server.com`.
|
||||
|
||||
##### The `binary` object has optional properties
|
||||
|
||||
###### remote_path
|
||||
|
||||
It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`.
|
||||
|
||||
Note: This property supports variables based on [Versioning](#versioning).
|
||||
|
||||
###### package_name
|
||||
|
||||
It is **not recommended** to override this property unless you are also overriding the `remote_path`. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path` directory. Unless you specify `package_name` in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which allows your binary to work across node versions, platforms, and architectures. If you are using `remote_path` that is also versioned by `./{module_name}/v{version}` then you could remove these variables from the `package_name` and just use: `{node_abi}-{platform}-{arch}.tar.gz`. Then your remote tarball will be looked up at, for example, `https://example.com/your-module/v0.1.0/node-v11-linux-x64.tar.gz`.
|
||||
|
||||
Avoiding the version of your module in the `package_name` and instead only embedding in a directory name can be useful when you want to make a quick tag of your module that does not change any C++ code. In this case you can just copy binaries to the new version behind the scenes like:
|
||||
|
||||
```sh
|
||||
aws s3 sync --acl public-read s3://mapbox-node-binary/sqlite3/v3.0.3/ s3://mapbox-node-binary/sqlite3/v3.0.4/
|
||||
```
|
||||
|
||||
Note: This property supports variables based on [Versioning](#versioning).
|
||||
|
||||
#### 2) Add a new target to binding.gyp
|
||||
|
||||
`node-pre-gyp` calls out to `node-gyp` to compile the module and passes variables along like [module_name](#module_name) and [module_path](#module_path).
|
||||
|
||||
A new target must be added to `binding.gyp` that moves the compiled `.node` module from `./build/Release/module_name.node` into the directory specified by `module_path`.
|
||||
|
||||
Add a target like this at the end of your `targets` list:
|
||||
|
||||
```js
|
||||
{
|
||||
"target_name": "action_after_build",
|
||||
"type": "none",
|
||||
"dependencies": [ "<(module_name)" ],
|
||||
"copies": [
|
||||
{
|
||||
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
|
||||
"destination": "<(module_path)"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
For a full example see [node-addon-example's binding.gyp](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/binding.gyp).
|
||||
|
||||
#### 3) Dynamically require your `.node`
|
||||
|
||||
Inside the main js file that requires your addon module you are likely currently doing:
|
||||
|
||||
```js
|
||||
var binding = require('../build/Release/binding.node');
|
||||
```
|
||||
|
||||
or:
|
||||
|
||||
```js
|
||||
var bindings = require('./bindings')
|
||||
```
|
||||
|
||||
Change those lines to:
|
||||
|
||||
```js
|
||||
var binary = require('@mapbox/node-pre-gyp');
|
||||
var path = require('path');
|
||||
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
|
||||
var binding = require(binding_path);
|
||||
```
|
||||
|
||||
For a full example see [node-addon-example's index.js](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/index.js#L1-L4)
|
||||
|
||||
#### 4) Build and package your app
|
||||
|
||||
Now build your module from source:
|
||||
|
||||
npm install --build-from-source
|
||||
|
||||
The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build.
|
||||
|
||||
Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`.
|
||||
|
||||
#### 5) Test
|
||||
|
||||
Now `npm test` should work just as it did before.
|
||||
|
||||
#### 6) Publish the tarball
|
||||
|
||||
Then package your app:
|
||||
|
||||
./node_modules/.bin/node-pre-gyp package
|
||||
|
||||
Once packaged, now you can publish:
|
||||
|
||||
./node_modules/.bin/node-pre-gyp publish
|
||||
|
||||
Currently the `publish` command pushes your binary to S3. This requires:
|
||||
|
||||
- You have installed `aws-sdk` with `npm install aws-sdk`
|
||||
- You have created a bucket already.
|
||||
- The `host` points to an S3 http or https endpoint.
|
||||
- You have configured node-pre-gyp to read your S3 credentials (see [S3 hosting](#s3-hosting) for details).
|
||||
|
||||
You can also host your binaries elsewhere. To do this requires:
|
||||
|
||||
- You manually publish the binary created by the `package` command to an `https` endpoint
|
||||
- Ensure that the `host` value points to your custom `https` endpoint.
|
||||
|
||||
#### 7) Automate builds
|
||||
|
||||
Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated.
|
||||
|
||||
- See [Appveyor Automation](#appveyor-automation) for how to auto-publish builds on Windows.
|
||||
- See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux.
|
||||
|
||||
#### 8) You're done!
|
||||
|
||||
Now publish your module to the npm registry. Users will now be able to install your module from a binary.
|
||||
|
||||
What will happen is this:
|
||||
|
||||
1. `npm install <your package>` will pull from the npm registry
|
||||
2. npm will run the `install` script which will call out to `node-pre-gyp`
|
||||
3. `node-pre-gyp` will fetch the binary `.node` module and unpack in the right place
|
||||
4. Assuming that all worked, you are done
|
||||
|
||||
If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module.
|
||||
|
||||
#### 9) One more option
|
||||
|
||||
It may be that you want to work with two s3 buckets, one for staging and one for production; this
|
||||
arrangement makes it less likely to accidentally overwrite a production binary. It also allows the production
|
||||
environment to have more restrictive permissions than staging while still enabling publishing when
|
||||
developing and testing.
|
||||
|
||||
The binary.host property can be set at execution time. In order to do so all of the following conditions
|
||||
must be true.
|
||||
|
||||
- binary.host is falsey or not present
|
||||
- binary.staging_host is not empty
|
||||
- binary.production_host is not empty
|
||||
|
||||
If any of these checks fail then the operation will not perform execution time determination of the s3 target.
|
||||
|
||||
If the command being executed is either "publish" or "unpublish" then the default is set to `binary.staging_host`. In all other cases
|
||||
the default is `binary.production_host`.
|
||||
|
||||
The command-line options `--s3_host=staging` or `--s3_host=production` override the default. If `s3_host`
|
||||
is present and not `staging` or `production` an exception is thrown.
|
||||
|
||||
This allows installing from staging by specifying `--s3_host=staging`. And it requires specifying
|
||||
`--s3_option=production` in order to publish to, or unpublish from, production, making accidental errors less likely.
|
||||
|
||||
## Node-API Considerations
|
||||
|
||||
[Node-API](https://nodejs.org/api/n-api.html#n_api_node_api), which was previously known as N-API, is an ABI-stable alternative to previous technologies such as [nan](https://github.com/nodejs/nan) which are tied to a specific Node runtime engine. Node-API is Node runtime engine agnostic and guarantees modules created today will continue to run, without changes, into the future.
|
||||
|
||||
Using `node-pre-gyp` with Node-API projects requires a handful of additional configuration values and imposes some additional requirements.
|
||||
|
||||
The most significant difference is that an Node-API module can be coded to target multiple Node-API versions. Therefore, an Node-API module must declare in its `package.json` file which Node-API versions the module is designed to run against. In addition, since multiple builds may be required for a single module, path and file names must be specified in way that avoids naming conflicts.
|
||||
|
||||
### The `napi_versions` array property
|
||||
|
||||
A Node-API module must declare in its `package.json` file, the Node-API versions the module is intended to support. This is accomplished by including an `napi-versions` array property in the `binary` object. For example:
|
||||
|
||||
```js
|
||||
"binary": {
|
||||
"module_name": "your_module",
|
||||
"module_path": "your_module_path",
|
||||
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
||||
"napi_versions": [1,3]
|
||||
}
|
||||
```
|
||||
|
||||
If the `napi_versions` array property is *not* present, `node-pre-gyp` operates as it always has. Including the `napi_versions` array property instructs `node-pre-gyp` that this is a Node-API module build.
|
||||
|
||||
When the `napi_versions` array property is present, `node-pre-gyp` fires off multiple operations, one for each of the Node-API versions in the array. In the example above, two operations are initiated, one for Node-API version 1 and second for Node-API version 3. How this version number is communicated is described next.
|
||||
|
||||
### The `napi_build_version` value
|
||||
|
||||
For each of the Node-API module operations `node-pre-gyp` initiates, it ensures that the `napi_build_version` is set appropriately.
|
||||
|
||||
This value is of importance in two areas:
|
||||
|
||||
1. The C/C++ code which needs to know against which Node-API version it should compile.
|
||||
2. `node-pre-gyp` itself which must assign appropriate path and file names to avoid collisions.
|
||||
|
||||
### Defining `NAPI_VERSION` for the C/C++ code
|
||||
|
||||
The `napi_build_version` value is communicated to the C/C++ code by adding this code to the `binding.gyp` file:
|
||||
|
||||
```
|
||||
"defines": [
|
||||
"NAPI_VERSION=<(napi_build_version)",
|
||||
]
|
||||
```
|
||||
|
||||
This ensures that `NAPI_VERSION`, an integer value, is declared appropriately to the C/C++ code for each build.
|
||||
|
||||
> Note that earlier versions of this document recommended defining the symbol `NAPI_BUILD_VERSION`. `NAPI_VERSION` is preferred because it used by the Node-API C/C++ headers to configure the specific Node-API versions being requested.
|
||||
|
||||
### Path and file naming requirements in `package.json`
|
||||
|
||||
Since `node-pre-gyp` fires off multiple operations for each request, it is essential that path and file names be created in such a way as to avoid collisions. This is accomplished by imposing additional path and file naming requirements.
|
||||
|
||||
Specifically, when performing Node-API builds, the `{napi_build_version}` text configuration value *must* be present in the `module_path` property. In addition, the `{napi_build_version}` text configuration value *must* be present in either the `remote_path` or `package_name` property. (No problem if it's in both.)
|
||||
|
||||
Here's an example:
|
||||
|
||||
```js
|
||||
"binary": {
|
||||
"module_name": "your_module",
|
||||
"module_path": "./lib/binding/napi-v{napi_build_version}",
|
||||
"remote_path": "./{module_name}/v{version}/{configuration}/",
|
||||
"package_name": "{platform}-{arch}-napi-v{napi_build_version}.tar.gz",
|
||||
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
||||
"napi_versions": [1,3]
|
||||
}
|
||||
```
|
||||
|
||||
## Supporting both Node-API and NAN builds
|
||||
|
||||
You may have a legacy native add-on that you wish to continue supporting for those versions of Node that do not support Node-API, as you add Node-API support for later Node versions. This can be accomplished by specifying the `node_napi_label` configuration value in the package.json `binary.package_name` property.
|
||||
|
||||
Placing the configuration value `node_napi_label` in the package.json `binary.package_name` property instructs `node-pre-gyp` to build all viable Node-API binaries supported by the current Node instance. If the current Node instance does not support Node-API, `node-pre-gyp` will request a traditional, non-Node-API build.
|
||||
|
||||
The configuration value `node_napi_label` is set by `node-pre-gyp` to the type of build created, `napi` or `node`, and the version number. For Node-API builds, the string contains the Node-API version nad has values like `napi-v3`. For traditional, non-Node-API builds, the string contains the ABI version with values like `node-v46`.
|
||||
|
||||
Here's how the `binary` configuration above might be changed to support both Node-API and NAN builds:
|
||||
|
||||
```js
|
||||
"binary": {
|
||||
"module_name": "your_module",
|
||||
"module_path": "./lib/binding/{node_napi_label}",
|
||||
"remote_path": "./{module_name}/v{version}/{configuration}/",
|
||||
"package_name": "{platform}-{arch}-{node_napi_label}.tar.gz",
|
||||
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
||||
"napi_versions": [1,3]
|
||||
}
|
||||
```
|
||||
|
||||
The C/C++ symbol `NAPI_VERSION` can be used to distinguish Node-API and non-Node-API builds. The value of `NAPI_VERSION` is set to the integer Node-API version for Node-API builds and is set to `0` for non-Node-API builds.
|
||||
|
||||
For example:
|
||||
|
||||
```C
|
||||
#if NAPI_VERSION
|
||||
// Node-API code goes here
|
||||
#else
|
||||
// NAN code goes here
|
||||
#endif
|
||||
```
|
||||
|
||||
### Two additional configuration values
|
||||
|
||||
The following two configuration values, which were implemented in previous versions of `node-pre-gyp`, continue to exist, but have been replaced by the `node_napi_label` configuration value described above.
|
||||
|
||||
1. `napi_version` If Node-API is supported by the currently executing Node instance, this value is the Node-API version number supported by Node. If Node-API is not supported, this value is an empty string.
|
||||
|
||||
2. `node_abi_napi` If the value returned for `napi_version` is non empty, this value is `'napi'`. If the value returned for `napi_version` is empty, this value is the value returned for `node_abi`.
|
||||
|
||||
These values are present for use in the `binding.gyp` file and may be used as `{napi_version}` and `{node_abi_napi}` for text substituion in the `binary` properties of the `package.json` file.
|
||||
|
||||
## S3 Hosting
|
||||
|
||||
You can host wherever you choose but S3 is cheap, `node-pre-gyp publish` expects it, and S3 can be integrated well with [Travis.ci](http://travis-ci.org) to automate builds for OS X and Ubuntu, and with [Appveyor](http://appveyor.com) to automate builds for Windows. Here is an approach to do this:
|
||||
|
||||
First, get setup locally and test the workflow:
|
||||
|
||||
#### 1) Create an S3 bucket
|
||||
|
||||
And have your **key** and **secret key** ready for writing to the bucket.
|
||||
|
||||
It is recommended to create a IAM user with a policy that only gives permissions to the specific bucket you plan to publish to. This can be done in the [IAM console](https://console.aws.amazon.com/iam/) by: 1) adding a new user, 2) choosing `Attach User Policy`, 3) Using the `Policy Generator`, 4) selecting `Amazon S3` for the service, 5) adding the actions: `DeleteObject`, `GetObject`, `GetObjectAcl`, `ListBucket`, `HeadBucket`, `PutObject`, `PutObjectAcl`, 6) adding an ARN of `arn:aws:s3:::bucket/*` (replacing `bucket` with your bucket name), and finally 7) clicking `Add Statement` and saving the policy. It should generate a policy like:
|
||||
|
||||
```js
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "objects",
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:PutObject",
|
||||
"s3:GetObjectAcl",
|
||||
"s3:GetObject",
|
||||
"s3:DeleteObject",
|
||||
"s3:PutObjectAcl"
|
||||
],
|
||||
"Resource": "arn:aws:s3:::your-bucket-name/*"
|
||||
},
|
||||
{
|
||||
"Sid": "bucket",
|
||||
"Effect": "Allow",
|
||||
"Action": "s3:ListBucket",
|
||||
"Resource": "arn:aws:s3:::your-bucket-name"
|
||||
},
|
||||
{
|
||||
"Sid": "buckets",
|
||||
"Effect": "Allow",
|
||||
"Action": "s3:HeadBucket",
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### 2) Install node-pre-gyp
|
||||
|
||||
Either install it globally:
|
||||
|
||||
npm install node-pre-gyp -g
|
||||
|
||||
Or put the local version on your PATH
|
||||
|
||||
export PATH=`pwd`/node_modules/.bin/:$PATH
|
||||
|
||||
#### 3) Configure AWS credentials
|
||||
|
||||
It is recommended to configure the AWS JS SDK v2 used internally by `node-pre-gyp` by setting these environment variables:
|
||||
|
||||
- AWS_ACCESS_KEY_ID
|
||||
- AWS_SECRET_ACCESS_KEY
|
||||
|
||||
But also you can also use the `Shared Config File` mentioned [in the AWS JS SDK v2 docs](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/configuring-the-jssdk.html)
|
||||
|
||||
#### 4) Package and publish your build
|
||||
|
||||
Install the `aws-sdk`:
|
||||
|
||||
npm install aws-sdk
|
||||
|
||||
Then publish:
|
||||
|
||||
node-pre-gyp package publish
|
||||
|
||||
Note: if you hit an error like `Hostname/IP doesn't match certificate's altnames` it may mean that you need to provide the `region` option in your config.
|
||||
|
||||
## Appveyor Automation
|
||||
|
||||
[Appveyor](http://www.appveyor.com/) can build binaries and publish the results per commit and supports:
|
||||
|
||||
- Windows Visual Studio 2013 and related compilers
|
||||
- Both 64 bit (x64) and 32 bit (x86) build configurations
|
||||
- Multiple Node.js versions
|
||||
|
||||
For an example of doing this see [node-sqlite3's appveyor.yml](https://github.com/mapbox/node-sqlite3/blob/master/appveyor.yml).
|
||||
|
||||
Below is a guide to getting set up:
|
||||
|
||||
#### 1) Create a free Appveyor account
|
||||
|
||||
Go to https://ci.appveyor.com/signup/free and sign in with your GitHub account.
|
||||
|
||||
#### 2) Create a new project
|
||||
|
||||
Go to https://ci.appveyor.com/projects/new and select the GitHub repo for your module
|
||||
|
||||
#### 3) Add appveyor.yml and push it
|
||||
|
||||
Once you have committed an `appveyor.yml` ([appveyor.yml reference](http://www.appveyor.com/docs/appveyor-yml)) to your GitHub repo and pushed it AppVeyor should automatically start building your project.
|
||||
|
||||
#### 4) Create secure variables
|
||||
|
||||
Encrypt your S3 AWS keys by going to <https://ci.appveyor.com/tools/encrypt> and hitting the `encrypt` button.
|
||||
|
||||
Then paste the result into your `appveyor.yml`
|
||||
|
||||
```yml
|
||||
environment:
|
||||
AWS_ACCESS_KEY_ID:
|
||||
secure: Dn9HKdLNYvDgPdQOzRq/DqZ/MPhjknRHB1o+/lVU8MA=
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
secure: W1rwNoSnOku1r+28gnoufO8UA8iWADmL1LiiwH9IOkIVhDTNGdGPJqAlLjNqwLnL
|
||||
```
|
||||
|
||||
NOTE: keys are per account but not per repo (this is difference than Travis where keys are per repo but not related to the account used to encrypt them).
|
||||
|
||||
#### 5) Hook up publishing
|
||||
|
||||
Just put `node-pre-gyp package publish` in your `appveyor.yml` after `npm install`.
|
||||
|
||||
#### 6) Publish when you want
|
||||
|
||||
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
|
||||
|
||||
SET CM=%APPVEYOR_REPO_COMMIT_MESSAGE%
|
||||
if not "%CM%" == "%CM:[publish binary]=%" node-pre-gyp --msvs_version=2013 publish
|
||||
|
||||
If your commit message contains special characters (e.g. `&`) this method might fail. An alternative is to use PowerShell, which gives you additional possibilities, like ignoring case by using `ToLower()`:
|
||||
|
||||
ps: if($env:APPVEYOR_REPO_COMMIT_MESSAGE.ToLower().Contains('[publish binary]')) { node-pre-gyp --msvs_version=2013 publish }
|
||||
|
||||
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package.
|
||||
|
||||
## Travis Automation
|
||||
|
||||
[Travis](https://travis-ci.org/) can push to S3 after a successful build and supports both:
|
||||
|
||||
- Ubuntu Precise and OS X (64 bit)
|
||||
- Multiple Node.js versions
|
||||
|
||||
For an example of doing this see [node-add-example's .travis.yml](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/.travis.yml).
|
||||
|
||||
Note: if you need 32 bit binaries, this can be done from a 64 bit Travis machine. See [the node-sqlite3 scripts for an example of doing this](https://github.com/mapbox/node-sqlite3/blob/bae122aa6a2b8a45f6b717fab24e207740e32b5d/scripts/build_against_node.sh#L54-L74).
|
||||
|
||||
Below is a guide to getting set up:
|
||||
|
||||
#### 1) Install the Travis gem
|
||||
|
||||
gem install travis
|
||||
|
||||
#### 2) Create secure variables
|
||||
|
||||
Make sure you run this command from within the directory of your module.
|
||||
|
||||
Use `travis-encrypt` like:
|
||||
|
||||
travis encrypt AWS_ACCESS_KEY_ID=${node_pre_gyp_accessKeyId}
|
||||
travis encrypt AWS_SECRET_ACCESS_KEY=${node_pre_gyp_secretAccessKey}
|
||||
|
||||
Then put those values in your `.travis.yml` like:
|
||||
|
||||
```yaml
|
||||
env:
|
||||
global:
|
||||
- secure: F+sEL/v56CzHqmCSSES4pEyC9NeQlkoR0Gs/ZuZxX1ytrj8SKtp3MKqBj7zhIclSdXBz4Ev966Da5ctmcTd410p0b240MV6BVOkLUtkjZJyErMBOkeb8n8yVfSoeMx8RiIhBmIvEn+rlQq+bSFis61/JkE9rxsjkGRZi14hHr4M=
|
||||
- secure: o2nkUQIiABD139XS6L8pxq3XO5gch27hvm/gOdV+dzNKc/s2KomVPWcOyXNxtJGhtecAkABzaW8KHDDi5QL1kNEFx6BxFVMLO8rjFPsMVaBG9Ks6JiDQkkmrGNcnVdxI/6EKTLHTH5WLsz8+J7caDBzvKbEfTux5EamEhxIWgrI=
|
||||
```
|
||||
|
||||
More details on Travis encryption at http://about.travis-ci.org/docs/user/encryption-keys/.
|
||||
|
||||
#### 3) Hook up publishing
|
||||
|
||||
Just put `node-pre-gyp package publish` in your `.travis.yml` after `npm install`.
|
||||
|
||||
##### OS X publishing
|
||||
|
||||
If you want binaries for OS X in addition to linux you can enable [multi-os for Travis](http://docs.travis-ci.com/user/multi-os/#Setting-.travis.yml)
|
||||
|
||||
Use a configuration like:
|
||||
|
||||
```yml
|
||||
|
||||
language: cpp
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- NODE_VERSION="4"
|
||||
- NODE_VERSION="6"
|
||||
|
||||
before_install:
|
||||
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
|
||||
- source ~/.nvm/nvm.sh
|
||||
- nvm install $NODE_VERSION
|
||||
- nvm use $NODE_VERSION
|
||||
```
|
||||
|
||||
See [Travis OS X Gotchas](#travis-os-x-gotchas) for why we replace `language: node_js` and `node_js:` sections with `language: cpp` and a custom matrix.
|
||||
|
||||
Also create platform specific sections for any deps that need install. For example if you need libpng:
|
||||
|
||||
```yml
|
||||
- if [ $(uname -s) == 'Linux' ]; then apt-get install libpng-dev; fi;
|
||||
- if [ $(uname -s) == 'Darwin' ]; then brew install libpng; fi;
|
||||
```
|
||||
|
||||
For detailed multi-OS examples see [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/.travis.yml) and [node-sqlite3](https://github.com/mapbox/node-sqlite3/blob/master/.travis.yml).
|
||||
|
||||
##### Travis OS X Gotchas
|
||||
|
||||
First, unlike the Travis Linux machines, the OS X machines do not put `node-pre-gyp` on PATH by default. To do so you will need to:
|
||||
|
||||
```sh
|
||||
export PATH=$(pwd)/node_modules/.bin:${PATH}
|
||||
```
|
||||
|
||||
Second, the OS X machines do not support using a matrix for installing different Node.js versions. So you need to bootstrap the installation of Node.js in a cross platform way.
|
||||
|
||||
By doing:
|
||||
|
||||
```yml
|
||||
env:
|
||||
matrix:
|
||||
- NODE_VERSION="4"
|
||||
- NODE_VERSION="6"
|
||||
|
||||
before_install:
|
||||
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
|
||||
- source ~/.nvm/nvm.sh
|
||||
- nvm install $NODE_VERSION
|
||||
- nvm use $NODE_VERSION
|
||||
```
|
||||
|
||||
You can easily recreate the previous behavior of this matrix:
|
||||
|
||||
```yml
|
||||
node_js:
|
||||
- "4"
|
||||
- "6"
|
||||
```
|
||||
|
||||
#### 4) Publish when you want
|
||||
|
||||
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
|
||||
|
||||
COMMIT_MESSAGE=$(git log --format=%B --no-merges -n 1 | tr -d '\n')
|
||||
if [[ ${COMMIT_MESSAGE} =~ "[publish binary]" ]]; then node-pre-gyp publish; fi;
|
||||
|
||||
Then you can trigger new binaries to be built like:
|
||||
|
||||
git commit -a -m "[publish binary]"
|
||||
|
||||
Or, if you don't have any changes to make simply run:
|
||||
|
||||
git commit --allow-empty -m "[publish binary]"
|
||||
|
||||
WARNING: if you are working in a pull request and publishing binaries from there then you will want to avoid double publishing when Travis CI builds both the `push` and `pr`. You only want to run the publish on the `push` commit. See https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/is_pr_merge.sh which is called from https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/publish.sh for an example of how to do this.
|
||||
|
||||
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. To automate the publishing of your entire package to npm on Travis see http://about.travis-ci.org/docs/user/deployment/npm/
|
||||
|
||||
# Versioning
|
||||
|
||||
The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed.
|
||||
|
||||
- `node_abi`: The node C++ `ABI` number. This value is available in Javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version.
|
||||
- `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32` unless the user passed the `--target_platform` option to override.
|
||||
- `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override.
|
||||
- `libc` matches `require('detect-libc').family` like `glibc` or `musl` unless the user passes the `--target_libc` option to override.
|
||||
- `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build.
|
||||
- `module_name` - the `binary.module_name` attribute from `package.json`.
|
||||
- `version` - the semver `version` value for your module from `package.json` (NOTE: ignores the `semver.build` property).
|
||||
- `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version`
|
||||
- `build` - the sevmer `build` value. For example it would be `this.that` if your package.json `version` was `v1.0.0+this.that`
|
||||
- `prerelease` - the semver `prerelease` value. For example it would be `alpha.beta` if your package.json `version` was `v1.0.0-alpha.beta`
|
||||
|
||||
|
||||
The options are visible in the code at <https://github.com/mapbox/node-pre-gyp/blob/612b7bca2604508d881e1187614870ba19a7f0c5/lib/util/versioning.js#L114-L127>
|
||||
|
||||
# Download binary files from a mirror
|
||||
|
||||
S3 is broken in China for the well known reason.
|
||||
|
||||
Using the `npm` config argument: `--{module_name}_binary_host_mirror` can download binary files through a mirror, `-` in `module_name` will be replaced with `_`.
|
||||
|
||||
e.g.: Install [v8-profiler](https://www.npmjs.com/package/v8-profiler) from `npm`.
|
||||
|
||||
```bash
|
||||
$ npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/
|
||||
```
|
||||
|
||||
e.g.: Install [canvas-prebuilt](https://www.npmjs.com/package/canvas-prebuilt) from `npm`.
|
||||
|
||||
```bash
|
||||
$ npm install canvas-prebuilt --canvas_prebuilt_binary_host_mirror=https://npm.taobao.org/mirrors/canvas-prebuilt/
|
||||
```
|
4
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp
generated
vendored
Executable file
4
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp
generated
vendored
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
|
||||
require('../lib/main');
|
2
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd
generated
vendored
Normal file
2
node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
node "%~dp0\node-pre-gyp" %*
|
10
node_modules/@mapbox/node-pre-gyp/contributing.md
generated
vendored
Normal file
10
node_modules/@mapbox/node-pre-gyp/contributing.md
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# Contributing
|
||||
|
||||
|
||||
### Releasing a new version:
|
||||
|
||||
- Ensure tests are passing on travis and appveyor
|
||||
- Run `node scripts/abi_crosswalk.js` and commit any changes
|
||||
- Update the changelog
|
||||
- Tag a new release like: `git tag -a v0.6.34 -m "tagging v0.6.34" && git push --tags`
|
||||
- Run `npm publish`
|
51
node_modules/@mapbox/node-pre-gyp/lib/build.js
generated
vendored
Normal file
51
node_modules/@mapbox/node-pre-gyp/lib/build.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = exports = build;
|
||||
|
||||
exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp';
|
||||
|
||||
const napi = require('./util/napi.js');
|
||||
const compile = require('./util/compile.js');
|
||||
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
||||
const configure = require('./configure.js');
|
||||
|
||||
function do_build(gyp, argv, callback) {
|
||||
handle_gyp_opts(gyp, argv, (err, result) => {
|
||||
let final_args = ['build'].concat(result.gyp).concat(result.pre);
|
||||
if (result.unparsed.length > 0) {
|
||||
final_args = final_args.
|
||||
concat(['--']).
|
||||
concat(result.unparsed);
|
||||
}
|
||||
if (!err && result.opts.napi_build_version) {
|
||||
napi.swap_build_dir_in(result.opts.napi_build_version);
|
||||
}
|
||||
compile.run_gyp(final_args, result.opts, (err2) => {
|
||||
if (result.opts.napi_build_version) {
|
||||
napi.swap_build_dir_out(result.opts.napi_build_version);
|
||||
}
|
||||
return callback(err2);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function build(gyp, argv, callback) {
|
||||
|
||||
// Form up commands to pass to node-gyp:
|
||||
// We map `node-pre-gyp build` to `node-gyp configure build` so that we do not
|
||||
// trigger a clean and therefore do not pay the penalty of a full recompile
|
||||
if (argv.length && (argv.indexOf('rebuild') > -1)) {
|
||||
argv.shift(); // remove `rebuild`
|
||||
// here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means
|
||||
// "clean + configure + build" and triggers a full recompile
|
||||
compile.run_gyp(['clean'], {}, (err3) => {
|
||||
if (err3) return callback(err3);
|
||||
configure(gyp, argv, (err4) => {
|
||||
if (err4) return callback(err4);
|
||||
return do_build(gyp, argv, callback);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
return do_build(gyp, argv, callback);
|
||||
}
|
||||
}
|
31
node_modules/@mapbox/node-pre-gyp/lib/clean.js
generated
vendored
Normal file
31
node_modules/@mapbox/node-pre-gyp/lib/clean.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = exports = clean;
|
||||
|
||||
exports.usage = 'Removes the entire folder containing the compiled .node module';
|
||||
|
||||
const rm = require('rimraf');
|
||||
const exists = require('fs').exists || require('path').exists;
|
||||
const versioning = require('./util/versioning.js');
|
||||
const napi = require('./util/napi.js');
|
||||
const path = require('path');
|
||||
|
||||
function clean(gyp, argv, callback) {
|
||||
const package_json = gyp.package_json;
|
||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
const to_delete = opts.module_path;
|
||||
if (!to_delete) {
|
||||
return callback(new Error('module_path is empty, refusing to delete'));
|
||||
} else if (path.normalize(to_delete) === path.normalize(process.cwd())) {
|
||||
return callback(new Error('module_path is not set, refusing to delete'));
|
||||
} else {
|
||||
exists(to_delete, (found) => {
|
||||
if (found) {
|
||||
if (!gyp.opts.silent_clean) console.log('[' + package_json.name + '] Removing "%s"', to_delete);
|
||||
return rm(to_delete, callback);
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
}
|
52
node_modules/@mapbox/node-pre-gyp/lib/configure.js
generated
vendored
Normal file
52
node_modules/@mapbox/node-pre-gyp/lib/configure.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = exports = configure;
|
||||
|
||||
exports.usage = 'Attempts to configure node-gyp or nw-gyp build';
|
||||
|
||||
const napi = require('./util/napi.js');
|
||||
const compile = require('./util/compile.js');
|
||||
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
||||
|
||||
function configure(gyp, argv, callback) {
|
||||
handle_gyp_opts(gyp, argv, (err, result) => {
|
||||
let final_args = result.gyp.concat(result.pre);
|
||||
// pull select node-gyp configure options out of the npm environ
|
||||
const known_gyp_args = ['dist-url', 'python', 'nodedir', 'msvs_version'];
|
||||
known_gyp_args.forEach((key) => {
|
||||
const val = gyp.opts[key] || gyp.opts[key.replace('-', '_')];
|
||||
if (val) {
|
||||
final_args.push('--' + key + '=' + val);
|
||||
}
|
||||
});
|
||||
// --ensure=false tell node-gyp to re-install node development headers
|
||||
// but it is only respected by node-gyp install, so we have to call install
|
||||
// as a separate step if the user passes it
|
||||
if (gyp.opts.ensure === false) {
|
||||
const install_args = final_args.concat(['install', '--ensure=false']);
|
||||
compile.run_gyp(install_args, result.opts, (err2) => {
|
||||
if (err2) return callback(err2);
|
||||
if (result.unparsed.length > 0) {
|
||||
final_args = final_args.
|
||||
concat(['--']).
|
||||
concat(result.unparsed);
|
||||
}
|
||||
compile.run_gyp(['configure'].concat(final_args), result.opts, (err3) => {
|
||||
return callback(err3);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
if (result.unparsed.length > 0) {
|
||||
final_args = final_args.
|
||||
concat(['--']).
|
||||
concat(result.unparsed);
|
||||
}
|
||||
compile.run_gyp(['configure'].concat(final_args), result.opts, (err4) => {
|
||||
if (!err4 && result.opts.napi_build_version) {
|
||||
napi.swap_build_dir_out(result.opts.napi_build_version);
|
||||
}
|
||||
return callback(err4);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
38
node_modules/@mapbox/node-pre-gyp/lib/info.js
generated
vendored
Normal file
38
node_modules/@mapbox/node-pre-gyp/lib/info.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = exports = info;
|
||||
|
||||
exports.usage = 'Lists all published binaries (requires aws-sdk)';
|
||||
|
||||
const log = require('npmlog');
|
||||
const versioning = require('./util/versioning.js');
|
||||
const s3_setup = require('./util/s3_setup.js');
|
||||
|
||||
function info(gyp, argv, callback) {
|
||||
const package_json = gyp.package_json;
|
||||
const opts = versioning.evaluate(package_json, gyp.opts);
|
||||
const config = {};
|
||||
s3_setup.detect(opts, config);
|
||||
const s3 = s3_setup.get_s3(config);
|
||||
const s3_opts = {
|
||||
Bucket: config.bucket,
|
||||
Prefix: config.prefix
|
||||
};
|
||||
s3.listObjects(s3_opts, (err, meta) => {
|
||||
if (err && err.code === 'NotFound') {
|
||||
return callback(new Error('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix));
|
||||
} else if (err) {
|
||||
return callback(err);
|
||||
} else {
|
||||
log.verbose(JSON.stringify(meta, null, 1));
|
||||
if (meta && meta.Contents) {
|
||||
meta.Contents.forEach((obj) => {
|
||||
console.log(obj.Key);
|
||||
});
|
||||
} else {
|
||||
console.error('[' + package_json.name + '] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix);
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
});
|
||||
}
|
235
node_modules/@mapbox/node-pre-gyp/lib/install.js
generated
vendored
Normal file
235
node_modules/@mapbox/node-pre-gyp/lib/install.js
generated
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = exports = install;
|
||||
|
||||
exports.usage = 'Attempts to install pre-built binary for module';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const log = require('npmlog');
|
||||
const existsAsync = fs.exists || path.exists;
|
||||
const versioning = require('./util/versioning.js');
|
||||
const napi = require('./util/napi.js');
|
||||
const makeDir = require('make-dir');
|
||||
// for fetching binaries
|
||||
const fetch = require('node-fetch');
|
||||
const tar = require('tar');
|
||||
|
||||
let npgVersion = 'unknown';
|
||||
try {
|
||||
// Read own package.json to get the current node-pre-pyp version.
|
||||
const ownPackageJSON = fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8');
|
||||
npgVersion = JSON.parse(ownPackageJSON).version;
|
||||
} catch (e) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
function place_binary(uri, targetDir, opts, callback) {
|
||||
log.http('GET', uri);
|
||||
|
||||
// Try getting version info from the currently running npm.
|
||||
const envVersionInfo = process.env.npm_config_user_agent ||
|
||||
'node ' + process.version;
|
||||
|
||||
const sanitized = uri.replace('+', '%2B');
|
||||
const requestOpts = {
|
||||
uri: sanitized,
|
||||
headers: {
|
||||
'User-Agent': 'node-pre-gyp (v' + npgVersion + ', ' + envVersionInfo + ')'
|
||||
},
|
||||
follow_max: 10
|
||||
};
|
||||
|
||||
if (opts.cafile) {
|
||||
try {
|
||||
requestOpts.ca = fs.readFileSync(opts.cafile);
|
||||
} catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
} else if (opts.ca) {
|
||||
requestOpts.ca = opts.ca;
|
||||
}
|
||||
|
||||
const proxyUrl = opts.proxy ||
|
||||
process.env.http_proxy ||
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.npm_config_proxy;
|
||||
let agent;
|
||||
if (proxyUrl) {
|
||||
const ProxyAgent = require('https-proxy-agent');
|
||||
agent = new ProxyAgent(proxyUrl);
|
||||
log.http('download', 'proxy agent configured using: "%s"', proxyUrl);
|
||||
}
|
||||
|
||||
fetch(sanitized, { agent })
|
||||
.then((res) => {
|
||||
if (!res.ok) {
|
||||
throw new Error(`response status ${res.status} ${res.statusText} on ${sanitized}`);
|
||||
}
|
||||
const dataStream = res.body;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let extractions = 0;
|
||||
const countExtractions = (entry) => {
|
||||
extractions += 1;
|
||||
log.info('install', 'unpacking %s', entry.path);
|
||||
};
|
||||
|
||||
dataStream.pipe(extract(targetDir, countExtractions))
|
||||
.on('error', (e) => {
|
||||
reject(e);
|
||||
});
|
||||
dataStream.on('end', () => {
|
||||
resolve(`extracted file count: ${extractions}`);
|
||||
});
|
||||
dataStream.on('error', (e) => {
|
||||
reject(e);
|
||||
});
|
||||
});
|
||||
})
|
||||
.then((text) => {
|
||||
log.info(text);
|
||||
callback();
|
||||
})
|
||||
.catch((e) => {
|
||||
log.error(`install ${e.message}`);
|
||||
callback(e);
|
||||
});
|
||||
}
|
||||
|
||||
function extract(to, onentry) {
|
||||
return tar.extract({
|
||||
cwd: to,
|
||||
strip: 1,
|
||||
onentry
|
||||
});
|
||||
}
|
||||
|
||||
function extract_from_local(from, targetDir, callback) {
|
||||
if (!fs.existsSync(from)) {
|
||||
return callback(new Error('Cannot find file ' + from));
|
||||
}
|
||||
log.info('Found local file to extract from ' + from);
|
||||
|
||||
// extract helpers
|
||||
let extractCount = 0;
|
||||
function countExtractions(entry) {
|
||||
extractCount += 1;
|
||||
log.info('install', 'unpacking ' + entry.path);
|
||||
}
|
||||
function afterExtract(err) {
|
||||
if (err) return callback(err);
|
||||
if (extractCount === 0) {
|
||||
return callback(new Error('There was a fatal problem while extracting the tarball'));
|
||||
}
|
||||
log.info('tarball', 'done parsing tarball');
|
||||
callback();
|
||||
}
|
||||
|
||||
fs.createReadStream(from).pipe(extract(targetDir, countExtractions))
|
||||
.on('close', afterExtract)
|
||||
.on('error', afterExtract);
|
||||
}
|
||||
|
||||
function do_build(gyp, argv, callback) {
|
||||
const args = ['rebuild'].concat(argv);
|
||||
gyp.todo.push({ name: 'build', args: args });
|
||||
process.nextTick(callback);
|
||||
}
|
||||
|
||||
function print_fallback_error(err, opts, package_json) {
|
||||
const fallback_message = ' (falling back to source compile with node-gyp)';
|
||||
let full_message = '';
|
||||
if (err.statusCode !== undefined) {
|
||||
// If we got a network response it but failed to download
|
||||
// it means remote binaries are not available, so let's try to help
|
||||
// the user/developer with the info to debug why
|
||||
full_message = 'Pre-built binaries not found for ' + package_json.name + '@' + package_json.version;
|
||||
full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')';
|
||||
full_message += fallback_message;
|
||||
log.warn('Tried to download(' + err.statusCode + '): ' + opts.hosted_tarball);
|
||||
log.warn(full_message);
|
||||
log.http(err.message);
|
||||
} else {
|
||||
// If we do not have a statusCode that means an unexpected error
|
||||
// happened and prevented an http response, so we output the exact error
|
||||
full_message = 'Pre-built binaries not installable for ' + package_json.name + '@' + package_json.version;
|
||||
full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')';
|
||||
full_message += fallback_message;
|
||||
log.warn(full_message);
|
||||
log.warn('Hit error ' + err.message);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// install
|
||||
//
|
||||
function install(gyp, argv, callback) {
|
||||
const package_json = gyp.package_json;
|
||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
||||
const source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source;
|
||||
const update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary;
|
||||
const should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true');
|
||||
if (should_do_source_build) {
|
||||
log.info('build', 'requesting source compile');
|
||||
return do_build(gyp, argv, callback);
|
||||
} else {
|
||||
const fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build;
|
||||
let should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true');
|
||||
// but allow override from npm
|
||||
if (process.env.npm_config_argv) {
|
||||
const cooked = JSON.parse(process.env.npm_config_argv).cooked;
|
||||
const match = cooked.indexOf('--fallback-to-build');
|
||||
if (match > -1 && cooked.length > match && cooked[match + 1] === 'false') {
|
||||
should_do_fallback_build = false;
|
||||
log.info('install', 'Build fallback disabled via npm flag: --fallback-to-build=false');
|
||||
}
|
||||
}
|
||||
let opts;
|
||||
try {
|
||||
opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
opts.ca = gyp.opts.ca;
|
||||
opts.cafile = gyp.opts.cafile;
|
||||
|
||||
const from = opts.hosted_tarball;
|
||||
const to = opts.module_path;
|
||||
const binary_module = path.join(to, opts.module_name + '.node');
|
||||
existsAsync(binary_module, (found) => {
|
||||
if (!update_binary) {
|
||||
if (found) {
|
||||
console.log('[' + package_json.name + '] Success: "' + binary_module + '" already installed');
|
||||
console.log('Pass --update-binary to reinstall or --build-from-source to recompile');
|
||||
return callback();
|
||||
}
|
||||
log.info('check', 'checked for "' + binary_module + '" (not found)');
|
||||
}
|
||||
|
||||
makeDir(to).then(() => {
|
||||
const fileName = from.startsWith('file://') && from.slice('file://'.length);
|
||||
if (fileName) {
|
||||
extract_from_local(fileName, to, after_place);
|
||||
} else {
|
||||
place_binary(from, to, opts, after_place);
|
||||
}
|
||||
}).catch((err) => {
|
||||
after_place(err);
|
||||
});
|
||||
|
||||
function after_place(err) {
|
||||
if (err && should_do_fallback_build) {
|
||||
print_fallback_error(err, opts, package_json);
|
||||
return do_build(gyp, argv, callback);
|
||||
} else if (err) {
|
||||
return callback(err);
|
||||
} else {
|
||||
console.log('[' + package_json.name + '] Success: "' + binary_module + '" is installed via remote');
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
125
node_modules/@mapbox/node-pre-gyp/lib/main.js
generated
vendored
Normal file
125
node_modules/@mapbox/node-pre-gyp/lib/main.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Set the title.
|
||||
*/
|
||||
|
||||
process.title = 'node-pre-gyp';
|
||||
|
||||
const node_pre_gyp = require('../');
|
||||
const log = require('npmlog');
|
||||
|
||||
/**
|
||||
* Process and execute the selected commands.
|
||||
*/
|
||||
|
||||
const prog = new node_pre_gyp.Run({ argv: process.argv });
|
||||
let completed = false;
|
||||
|
||||
if (prog.todo.length === 0) {
|
||||
if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
|
||||
console.log('v%s', prog.version);
|
||||
process.exit(0);
|
||||
} else if (~process.argv.indexOf('-h') || ~process.argv.indexOf('--help')) {
|
||||
console.log('%s', prog.usage());
|
||||
process.exit(0);
|
||||
}
|
||||
console.log('%s', prog.usage());
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// if --no-color is passed
|
||||
if (prog.opts && Object.hasOwnProperty.call(prog, 'color') && !prog.opts.color) {
|
||||
log.disableColor();
|
||||
}
|
||||
|
||||
log.info('it worked if it ends with', 'ok');
|
||||
log.verbose('cli', process.argv);
|
||||
log.info('using', process.title + '@%s', prog.version);
|
||||
log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch);
|
||||
|
||||
|
||||
/**
|
||||
* Change dir if -C/--directory was passed.
|
||||
*/
|
||||
|
||||
const dir = prog.opts.directory;
|
||||
if (dir) {
|
||||
const fs = require('fs');
|
||||
try {
|
||||
const stat = fs.statSync(dir);
|
||||
if (stat.isDirectory()) {
|
||||
log.info('chdir', dir);
|
||||
process.chdir(dir);
|
||||
} else {
|
||||
log.warn('chdir', dir + ' is not a directory');
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') {
|
||||
log.warn('chdir', dir + ' is not a directory');
|
||||
} else {
|
||||
log.warn('chdir', 'error during chdir() "%s"', e.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function run() {
|
||||
const command = prog.todo.shift();
|
||||
if (!command) {
|
||||
// done!
|
||||
completed = true;
|
||||
log.info('ok');
|
||||
return;
|
||||
}
|
||||
|
||||
// set binary.host when appropriate. host determines the s3 target bucket.
|
||||
const target = prog.setBinaryHostProperty(command.name);
|
||||
if (target && ['install', 'publish', 'unpublish', 'info'].indexOf(command.name) >= 0) {
|
||||
log.info('using binary.host: ' + prog.package_json.binary.host);
|
||||
}
|
||||
|
||||
prog.commands[command.name](command.args, function(err) {
|
||||
if (err) {
|
||||
log.error(command.name + ' error');
|
||||
log.error('stack', err.stack);
|
||||
errorMessage();
|
||||
log.error('not ok');
|
||||
console.log(err.message);
|
||||
return process.exit(1);
|
||||
}
|
||||
const args_array = [].slice.call(arguments, 1);
|
||||
if (args_array.length) {
|
||||
console.log.apply(console, args_array);
|
||||
}
|
||||
// now run the next command in the queue
|
||||
process.nextTick(run);
|
||||
});
|
||||
}
|
||||
|
||||
process.on('exit', (code) => {
|
||||
if (!completed && !code) {
|
||||
log.error('Completion callback never invoked!');
|
||||
errorMessage();
|
||||
process.exit(6);
|
||||
}
|
||||
});
|
||||
|
||||
process.on('uncaughtException', (err) => {
|
||||
log.error('UNCAUGHT EXCEPTION');
|
||||
log.error('stack', err.stack);
|
||||
errorMessage();
|
||||
process.exit(7);
|
||||
});
|
||||
|
||||
function errorMessage() {
|
||||
// copied from npm's lib/util/error-handler.js
|
||||
const os = require('os');
|
||||
log.error('System', os.type() + ' ' + os.release());
|
||||
log.error('command', process.argv.map(JSON.stringify).join(' '));
|
||||
log.error('cwd', process.cwd());
|
||||
log.error('node -v', process.version);
|
||||
log.error(process.title + ' -v', 'v' + prog.package.version);
|
||||
}
|
||||
|
||||
// start running the given commands!
|
||||
run();
|
309
node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
Normal file
309
node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
Normal file
@@ -0,0 +1,309 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = exports;
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
// load mocking control function for accessing s3 via https. the function is a noop always returning
|
||||
// false if not mocking.
|
||||
exports.mockS3Http = require('./util/s3_setup').get_mockS3Http();
|
||||
exports.mockS3Http('on');
|
||||
const mocking = exports.mockS3Http('get');
|
||||
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const nopt = require('nopt');
|
||||
const log = require('npmlog');
|
||||
log.disableProgress();
|
||||
const napi = require('./util/napi.js');
|
||||
|
||||
const EE = require('events').EventEmitter;
|
||||
const inherits = require('util').inherits;
|
||||
const cli_commands = [
|
||||
'clean',
|
||||
'install',
|
||||
'reinstall',
|
||||
'build',
|
||||
'rebuild',
|
||||
'package',
|
||||
'testpackage',
|
||||
'publish',
|
||||
'unpublish',
|
||||
'info',
|
||||
'testbinary',
|
||||
'reveal',
|
||||
'configure'
|
||||
];
|
||||
const aliases = {};
|
||||
|
||||
// differentiate node-pre-gyp's logs from npm's
|
||||
log.heading = 'node-pre-gyp';
|
||||
|
||||
if (mocking) {
|
||||
log.warn(`mocking s3 to ${process.env.node_pre_gyp_mock_s3}`);
|
||||
}
|
||||
|
||||
// this is a getter to avoid circular reference warnings with node v14.
|
||||
Object.defineProperty(exports, 'find', {
|
||||
get: function() {
|
||||
return require('./pre-binding').find;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
||||
|
||||
// in the following, "my_module" is using node-pre-gyp to
|
||||
// prebuild and install pre-built binaries. "main_module"
|
||||
// is using "my_module".
|
||||
//
|
||||
// "bin/node-pre-gyp" invokes Run() without a path. the
|
||||
// expectation is that the working directory is the package
|
||||
// root "my_module". this is true because in all cases npm is
|
||||
// executing a script in the context of "my_module".
|
||||
//
|
||||
// "pre-binding.find()" is executed by "my_module" but in the
|
||||
// context of "main_module". this is because "main_module" is
|
||||
// executing and requires "my_module" which is then executing
|
||||
// "pre-binding.find()" via "node-pre-gyp.find()", so the working
|
||||
// directory is that of "main_module".
|
||||
//
|
||||
// that's why "find()" must pass the path to package.json.
|
||||
//
|
||||
function Run({ package_json_path = './package.json', argv }) {
|
||||
this.package_json_path = package_json_path;
|
||||
this.commands = {};
|
||||
|
||||
const self = this;
|
||||
cli_commands.forEach((command) => {
|
||||
self.commands[command] = function(argvx, callback) {
|
||||
log.verbose('command', command, argvx);
|
||||
return require('./' + command)(self, argvx, callback);
|
||||
};
|
||||
});
|
||||
|
||||
this.parseArgv(argv);
|
||||
|
||||
// this is set to true after the binary.host property was set to
|
||||
// either staging_host or production_host.
|
||||
this.binaryHostSet = false;
|
||||
}
|
||||
inherits(Run, EE);
|
||||
exports.Run = Run;
|
||||
const proto = Run.prototype;
|
||||
|
||||
/**
|
||||
* Export the contents of the package.json.
|
||||
*/
|
||||
|
||||
proto.package = require('../package.json');
|
||||
|
||||
/**
|
||||
* nopt configuration definitions
|
||||
*/
|
||||
|
||||
proto.configDefs = {
|
||||
help: Boolean, // everywhere
|
||||
arch: String, // 'configure'
|
||||
debug: Boolean, // 'build'
|
||||
directory: String, // bin
|
||||
proxy: String, // 'install'
|
||||
loglevel: String // everywhere
|
||||
};
|
||||
|
||||
/**
|
||||
* nopt shorthands
|
||||
*/
|
||||
|
||||
proto.shorthands = {
|
||||
release: '--no-debug',
|
||||
C: '--directory',
|
||||
debug: '--debug',
|
||||
j: '--jobs',
|
||||
silent: '--loglevel=silent',
|
||||
silly: '--loglevel=silly',
|
||||
verbose: '--loglevel=verbose'
|
||||
};
|
||||
|
||||
/**
|
||||
* expose the command aliases for the bin file to use.
|
||||
*/
|
||||
|
||||
proto.aliases = aliases;
|
||||
|
||||
/**
|
||||
* Parses the given argv array and sets the 'opts', 'argv',
|
||||
* 'command', and 'package_json' properties.
|
||||
*/
|
||||
|
||||
proto.parseArgv = function parseOpts(argv) {
|
||||
this.opts = nopt(this.configDefs, this.shorthands, argv);
|
||||
this.argv = this.opts.argv.remain.slice();
|
||||
const commands = this.todo = [];
|
||||
|
||||
// create a copy of the argv array with aliases mapped
|
||||
argv = this.argv.map((arg) => {
|
||||
// is this an alias?
|
||||
if (arg in this.aliases) {
|
||||
arg = this.aliases[arg];
|
||||
}
|
||||
return arg;
|
||||
});
|
||||
|
||||
// process the mapped args into "command" objects ("name" and "args" props)
|
||||
argv.slice().forEach((arg) => {
|
||||
if (arg in this.commands) {
|
||||
const args = argv.splice(0, argv.indexOf(arg));
|
||||
argv.shift();
|
||||
if (commands.length > 0) {
|
||||
commands[commands.length - 1].args = args;
|
||||
}
|
||||
commands.push({ name: arg, args: [] });
|
||||
}
|
||||
});
|
||||
if (commands.length > 0) {
|
||||
commands[commands.length - 1].args = argv.splice(0);
|
||||
}
|
||||
|
||||
|
||||
// if a directory was specified package.json is assumed to be relative
|
||||
// to it.
|
||||
let package_json_path = this.package_json_path;
|
||||
if (this.opts.directory) {
|
||||
package_json_path = path.join(this.opts.directory, package_json_path);
|
||||
}
|
||||
|
||||
this.package_json = JSON.parse(fs.readFileSync(package_json_path));
|
||||
|
||||
// expand commands entries for multiple napi builds
|
||||
this.todo = napi.expand_commands(this.package_json, this.opts, commands);
|
||||
|
||||
// support for inheriting config env variables from npm
|
||||
const npm_config_prefix = 'npm_config_';
|
||||
Object.keys(process.env).forEach((name) => {
|
||||
if (name.indexOf(npm_config_prefix) !== 0) return;
|
||||
const val = process.env[name];
|
||||
if (name === npm_config_prefix + 'loglevel') {
|
||||
log.level = val;
|
||||
} else {
|
||||
// add the user-defined options to the config
|
||||
name = name.substring(npm_config_prefix.length);
|
||||
// avoid npm argv clobber already present args
|
||||
// which avoids problem of 'npm test' calling
|
||||
// script that runs unique npm install commands
|
||||
if (name === 'argv') {
|
||||
if (this.opts.argv &&
|
||||
this.opts.argv.remain &&
|
||||
this.opts.argv.remain.length) {
|
||||
// do nothing
|
||||
} else {
|
||||
this.opts[name] = val;
|
||||
}
|
||||
} else {
|
||||
this.opts[name] = val;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (this.opts.loglevel) {
|
||||
log.level = this.opts.loglevel;
|
||||
}
|
||||
log.resume();
|
||||
};
|
||||
|
||||
/**
|
||||
* allow the binary.host property to be set at execution time.
|
||||
*
|
||||
* for this to take effect requires all the following to be true.
|
||||
* - binary is a property in package.json
|
||||
* - binary.host is falsey
|
||||
* - binary.staging_host is not empty
|
||||
* - binary.production_host is not empty
|
||||
*
|
||||
* if any of the previous checks fail then the function returns an empty string
|
||||
* and makes no changes to package.json's binary property.
|
||||
*
|
||||
*
|
||||
* if command is "publish" then the default is set to "binary.staging_host"
|
||||
* if command is not "publish" the the default is set to "binary.production_host"
|
||||
*
|
||||
* if the command-line option '--s3_host' is set to "staging" or "production" then
|
||||
* "binary.host" is set to the specified "staging_host" or "production_host". if
|
||||
* '--s3_host' is any other value an exception is thrown.
|
||||
*
|
||||
* if '--s3_host' is not present then "binary.host" is set to the default as above.
|
||||
*
|
||||
* this strategy was chosen so that any command other than "publish" or "unpublish" uses "production"
|
||||
* as the default without requiring any command-line options but that "publish" and "unpublish" require
|
||||
* '--s3_host production_host' to be specified in order to *really* publish (or unpublish). publishing
|
||||
* to staging can be done freely without worrying about disturbing any production releases.
|
||||
*/
|
||||
proto.setBinaryHostProperty = function(command) {
|
||||
if (this.binaryHostSet) {
|
||||
return this.package_json.binary.host;
|
||||
}
|
||||
const p = this.package_json;
|
||||
// don't set anything if host is present. it must be left blank to trigger this.
|
||||
if (!p || !p.binary || p.binary.host) {
|
||||
return '';
|
||||
}
|
||||
// and both staging and production must be present. errors will be reported later.
|
||||
if (!p.binary.staging_host || !p.binary.production_host) {
|
||||
return '';
|
||||
}
|
||||
let target = 'production_host';
|
||||
if (command === 'publish' || command === 'unpublish') {
|
||||
target = 'staging_host';
|
||||
}
|
||||
// the environment variable has priority over the default or the command line. if
|
||||
// either the env var or the command line option are invalid throw an error.
|
||||
const npg_s3_host = process.env.node_pre_gyp_s3_host;
|
||||
if (npg_s3_host === 'staging' || npg_s3_host === 'production') {
|
||||
target = `${npg_s3_host}_host`;
|
||||
} else if (this.opts['s3_host'] === 'staging' || this.opts['s3_host'] === 'production') {
|
||||
target = `${this.opts['s3_host']}_host`;
|
||||
} else if (this.opts['s3_host'] || npg_s3_host) {
|
||||
throw new Error(`invalid s3_host ${this.opts['s3_host'] || npg_s3_host}`);
|
||||
}
|
||||
|
||||
p.binary.host = p.binary[target];
|
||||
this.binaryHostSet = true;
|
||||
|
||||
return p.binary.host;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the usage instructions for node-pre-gyp.
|
||||
*/
|
||||
|
||||
proto.usage = function usage() {
|
||||
const str = [
|
||||
'',
|
||||
' Usage: node-pre-gyp <command> [options]',
|
||||
'',
|
||||
' where <command> is one of:',
|
||||
cli_commands.map((c) => {
|
||||
return ' - ' + c + ' - ' + require('./' + c).usage;
|
||||
}).join('\n'),
|
||||
'',
|
||||
'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'),
|
||||
'node@' + process.versions.node
|
||||
].join('\n');
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Version number getter.
|
||||
*/
|
||||
|
||||
Object.defineProperty(proto, 'version', {
|
||||
get: function() {
|
||||
return this.package.version;
|
||||
},
|
||||
enumerable: true
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user