diff --git a/node_modules/.bin/cssesc b/node_modules/.bin/cssesc new file mode 120000 index 0000000..487b689 --- /dev/null +++ b/node_modules/.bin/cssesc @@ -0,0 +1 @@ +../cssesc/bin/cssesc \ No newline at end of file diff --git a/node_modules/.bin/jiti b/node_modules/.bin/jiti new file mode 120000 index 0000000..031ee3f --- /dev/null +++ b/node_modules/.bin/jiti @@ -0,0 +1 @@ +../jiti/bin/jiti.js \ No newline at end of file diff --git a/node_modules/.bin/nanoid b/node_modules/.bin/nanoid new file mode 120000 index 0000000..e2be547 --- /dev/null +++ b/node_modules/.bin/nanoid @@ -0,0 +1 @@ +../nanoid/bin/nanoid.cjs \ No newline at end of file diff --git a/node_modules/.bin/resolve b/node_modules/.bin/resolve new file mode 120000 index 0000000..b6afda6 --- /dev/null +++ b/node_modules/.bin/resolve @@ -0,0 +1 @@ +../resolve/bin/resolve \ No newline at end of file diff --git a/node_modules/.bin/sucrase b/node_modules/.bin/sucrase new file mode 120000 index 0000000..0ac7e77 --- /dev/null +++ b/node_modules/.bin/sucrase @@ -0,0 +1 @@ +../sucrase/bin/sucrase \ No newline at end of file diff --git a/node_modules/.bin/sucrase-node b/node_modules/.bin/sucrase-node new file mode 120000 index 0000000..8b96fae --- /dev/null +++ b/node_modules/.bin/sucrase-node @@ -0,0 +1 @@ +../sucrase/bin/sucrase-node \ No newline at end of file diff --git a/node_modules/.bin/tailwind b/node_modules/.bin/tailwind new file mode 120000 index 0000000..d497797 --- /dev/null +++ b/node_modules/.bin/tailwind @@ -0,0 +1 @@ +../tailwindcss/lib/cli.js \ No newline at end of file diff --git a/node_modules/.bin/tailwindcss b/node_modules/.bin/tailwindcss new file mode 120000 index 0000000..d497797 --- /dev/null +++ b/node_modules/.bin/tailwindcss @@ -0,0 +1 @@ +../tailwindcss/lib/cli.js \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index f136b6e..d7a3527 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -4,6 +4,107 @@ "lockfileVersion": 3, "requires": true, "packages": { + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", + "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.18", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", + "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" + } + }, + "node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/@types/long": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", @@ -62,6 +163,31 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true + }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", @@ -77,6 +203,15 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/body-parser": { "version": "1.20.2", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", @@ -109,6 +244,18 @@ "concat-map": "0.0.1" } }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/bson": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/bson/-/bson-5.3.0.tgz", @@ -137,6 +284,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/cassandra-driver": { "version": "4.6.4", "resolved": "https://registry.npmjs.org/cassandra-driver/-/cassandra-driver-4.6.4.tgz", @@ -166,6 +322,45 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -182,6 +377,15 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -219,6 +423,18 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -244,6 +460,18 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -362,6 +590,43 @@ "node": ">= 0.8" } }, + "node_modules/fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, "node_modules/filelist": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", @@ -389,6 +654,18 @@ "node": ">=10" } }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", @@ -422,6 +699,31 @@ "node": ">= 0.6" } }, + "node_modules/fs": { + "version": "0.0.1-security", + "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", + "integrity": "sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w==" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -441,6 +743,38 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", @@ -508,6 +842,16 @@ "node": ">=0.10.0" } }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -526,6 +870,60 @@ "node": ">= 0.10" } }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.1.tgz", + "integrity": "sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, "node_modules/jake": { "version": "10.8.7", "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.7.tgz", @@ -543,6 +941,30 @@ "node": ">=10" } }, + "node_modules/jiti": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.18.2.tgz", + "integrity": "sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg==", + "dev": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, "node_modules/long": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/long/-/long-2.4.0.tgz", @@ -570,6 +992,15 @@ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, "node_modules/methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -578,6 +1009,19 @@ "node": ">= 0.6" } }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, "node_modules/mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", @@ -665,6 +1109,34 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", + "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, "node_modules/negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", @@ -673,6 +1145,33 @@ "node": ">= 0.6" } }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/object-inspect": { "version": "1.12.3", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", @@ -692,6 +1191,15 @@ "node": ">= 0.8" } }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -700,11 +1208,223 @@ "node": ">= 0.8" } }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, "node_modules/path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.4.24", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.24.tgz", + "integrity": "sha512-M0RzbcI0sO/XJNucsGjvWU9ERWxb/ytp1w6dKtxTKgixdtQDq4rmx/g8W1hnaheq9jgwL/oyEdH5Bc4WwJKMqg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", + "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", + "dev": true, + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-less": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/postcss-less/-/postcss-less-3.1.4.tgz", + "integrity": "sha512-7TvleQWNM2QLcHqvudt3VYjULVB49uiW6XzEUFmvwHzvsOEF5MwBrIXZDJQvJNFGjJQTzSzZnDoCJ8h/ljyGXA==", + "dependencies": { + "postcss": "^7.0.14" + }, + "engines": { + "node": ">=6.14.4" + } + }, + "node_modules/postcss-less/node_modules/picocolors": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-0.2.1.tgz", + "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==" + }, + "node_modules/postcss-less/node_modules/postcss": { + "version": "7.0.39", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz", + "integrity": "sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA==", + "dependencies": { + "picocolors": "^0.2.1", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + } + }, + "node_modules/postcss-load-config": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-4.0.1.tgz", + "integrity": "sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA==", + "dev": true, + "dependencies": { + "lilconfig": "^2.0.5", + "yaml": "^2.1.1" + }, + "engines": { + "node": ">= 14" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": ">=8.0.9", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.1.tgz", + "integrity": "sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.11" + }, + "engines": { + "node": ">=12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.0.13", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz", + "integrity": "sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true + }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -739,6 +1459,26 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -761,6 +1501,77 @@ "node": ">= 0.8" } }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.2", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz", + "integrity": "sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==", + "dev": true, + "dependencies": { + "is-core-module": "^2.11.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -879,6 +1690,22 @@ "npm": ">= 3.0.0" } }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/sparse-bitfield": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", @@ -896,6 +1723,28 @@ "node": ">= 0.8" } }, + "node_modules/sucrase": { + "version": "3.32.0", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.32.0.tgz", + "integrity": "sha512-ydQOU34rpSyj2TGyz4D2p8rbktIOZ8QY9s+DGLvFU1i5pWJE8vkpruCjGCMHsdXwnD7JDcS+noSwM/a7zyNFDQ==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "7.1.6", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -907,6 +1756,89 @@ "node": ">=8" } }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwindcss": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.3.2.tgz", + "integrity": "sha512-9jPkMiIBXvPc2KywkraqsUfbfj+dHDb+JPWtSJa9MLFdrPyazI7q6WX2sUrm7R9eVR7qqv3Pas7EvQFzxKnI6w==", + "dev": true, + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.5.3", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.2.12", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.18.2", + "lilconfig": "^2.1.0", + "micromatch": "^4.0.5", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.0.0", + "postcss": "^8.4.23", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.1", + "postcss-nested": "^6.0.1", + "postcss-selector-parser": "^6.0.11", + "postcss-value-parser": "^4.2.0", + "resolve": "^1.22.2", + "sucrase": "^3.32.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", @@ -926,6 +1858,12 @@ "node": ">=12" } }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -946,6 +1884,12 @@ "node": ">= 0.8" } }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -981,6 +1925,21 @@ "engines": { "node": ">=12" } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/yaml": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz", + "integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==", + "dev": true, + "engines": { + "node": ">= 14" + } } } } diff --git a/node_modules/@alloc/quick-lru/index.d.ts b/node_modules/@alloc/quick-lru/index.d.ts new file mode 100644 index 0000000..eb819ba --- /dev/null +++ b/node_modules/@alloc/quick-lru/index.d.ts @@ -0,0 +1,128 @@ +declare namespace QuickLRU { + interface Options { + /** + The maximum number of milliseconds an item should remain in the cache. + + @default Infinity + + By default, `maxAge` will be `Infinity`, which means that items will never expire. + Lazy expiration upon the next write or read call. + + Individual expiration of an item can be specified by the `set(key, value, maxAge)` method. + */ + readonly maxAge?: number; + + /** + The maximum number of items before evicting the least recently used items. + */ + readonly maxSize: number; + + /** + Called right before an item is evicted from the cache. + + Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`). + */ + onEviction?: (key: KeyType, value: ValueType) => void; + } +} + +declare class QuickLRU + implements Iterable<[KeyType, ValueType]> { + /** + The stored item count. + */ + readonly size: number; + + /** + Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29). + + The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop. + + @example + ``` + import QuickLRU = require('quick-lru'); + + const lru = new QuickLRU({maxSize: 1000}); + + lru.set('🦄', '🌈'); + + lru.has('🦄'); + //=> true + + lru.get('🦄'); + //=> '🌈' + ``` + */ + constructor(options: QuickLRU.Options); + + [Symbol.iterator](): IterableIterator<[KeyType, ValueType]>; + + /** + Set an item. Returns the instance. + + Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified in the constructor, otherwise the item will never expire. + + @returns The list instance. + */ + set(key: KeyType, value: ValueType, options?: {maxAge?: number}): this; + + /** + Get an item. + + @returns The stored item or `undefined`. + */ + get(key: KeyType): ValueType | undefined; + + /** + Check if an item exists. + */ + has(key: KeyType): boolean; + + /** + Get an item without marking it as recently used. + + @returns The stored item or `undefined`. + */ + peek(key: KeyType): ValueType | undefined; + + /** + Delete an item. + + @returns `true` if the item is removed or `false` if the item doesn't exist. + */ + delete(key: KeyType): boolean; + + /** + Delete all items. + */ + clear(): void; + + /** + Update the `maxSize` in-place, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee. + + Useful for on-the-fly tuning of cache sizes in live systems. + */ + resize(maxSize: number): void; + + /** + Iterable for all the keys. + */ + keys(): IterableIterator; + + /** + Iterable for all the values. + */ + values(): IterableIterator; + + /** + Iterable for all entries, starting with the oldest (ascending in recency). + */ + entriesAscending(): IterableIterator<[KeyType, ValueType]>; + + /** + Iterable for all entries, starting with the newest (descending in recency). + */ + entriesDescending(): IterableIterator<[KeyType, ValueType]>; +} + +export = QuickLRU; diff --git a/node_modules/@alloc/quick-lru/index.js b/node_modules/@alloc/quick-lru/index.js new file mode 100644 index 0000000..7eeced2 --- /dev/null +++ b/node_modules/@alloc/quick-lru/index.js @@ -0,0 +1,263 @@ +'use strict'; + +class QuickLRU { + constructor(options = {}) { + if (!(options.maxSize && options.maxSize > 0)) { + throw new TypeError('`maxSize` must be a number greater than 0'); + } + + if (typeof options.maxAge === 'number' && options.maxAge === 0) { + throw new TypeError('`maxAge` must be a number greater than 0'); + } + + this.maxSize = options.maxSize; + this.maxAge = options.maxAge || Infinity; + this.onEviction = options.onEviction; + this.cache = new Map(); + this.oldCache = new Map(); + this._size = 0; + } + + _emitEvictions(cache) { + if (typeof this.onEviction !== 'function') { + return; + } + + for (const [key, item] of cache) { + this.onEviction(key, item.value); + } + } + + _deleteIfExpired(key, item) { + if (typeof item.expiry === 'number' && item.expiry <= Date.now()) { + if (typeof this.onEviction === 'function') { + this.onEviction(key, item.value); + } + + return this.delete(key); + } + + return false; + } + + _getOrDeleteIfExpired(key, item) { + const deleted = this._deleteIfExpired(key, item); + if (deleted === false) { + return item.value; + } + } + + _getItemValue(key, item) { + return item.expiry ? this._getOrDeleteIfExpired(key, item) : item.value; + } + + _peek(key, cache) { + const item = cache.get(key); + + return this._getItemValue(key, item); + } + + _set(key, value) { + this.cache.set(key, value); + this._size++; + + if (this._size >= this.maxSize) { + this._size = 0; + this._emitEvictions(this.oldCache); + this.oldCache = this.cache; + this.cache = new Map(); + } + } + + _moveToRecent(key, item) { + this.oldCache.delete(key); + this._set(key, item); + } + + * _entriesAscending() { + for (const item of this.oldCache) { + const [key, value] = item; + if (!this.cache.has(key)) { + const deleted = this._deleteIfExpired(key, value); + if (deleted === false) { + yield item; + } + } + } + + for (const item of this.cache) { + const [key, value] = item; + const deleted = this._deleteIfExpired(key, value); + if (deleted === false) { + yield item; + } + } + } + + get(key) { + if (this.cache.has(key)) { + const item = this.cache.get(key); + + return this._getItemValue(key, item); + } + + if (this.oldCache.has(key)) { + const item = this.oldCache.get(key); + if (this._deleteIfExpired(key, item) === false) { + this._moveToRecent(key, item); + return item.value; + } + } + } + + set(key, value, {maxAge = this.maxAge === Infinity ? undefined : Date.now() + this.maxAge} = {}) { + if (this.cache.has(key)) { + this.cache.set(key, { + value, + maxAge + }); + } else { + this._set(key, {value, expiry: maxAge}); + } + } + + has(key) { + if (this.cache.has(key)) { + return !this._deleteIfExpired(key, this.cache.get(key)); + } + + if (this.oldCache.has(key)) { + return !this._deleteIfExpired(key, this.oldCache.get(key)); + } + + return false; + } + + peek(key) { + if (this.cache.has(key)) { + return this._peek(key, this.cache); + } + + if (this.oldCache.has(key)) { + return this._peek(key, this.oldCache); + } + } + + delete(key) { + const deleted = this.cache.delete(key); + if (deleted) { + this._size--; + } + + return this.oldCache.delete(key) || deleted; + } + + clear() { + this.cache.clear(); + this.oldCache.clear(); + this._size = 0; + } + + resize(newSize) { + if (!(newSize && newSize > 0)) { + throw new TypeError('`maxSize` must be a number greater than 0'); + } + + const items = [...this._entriesAscending()]; + const removeCount = items.length - newSize; + if (removeCount < 0) { + this.cache = new Map(items); + this.oldCache = new Map(); + this._size = items.length; + } else { + if (removeCount > 0) { + this._emitEvictions(items.slice(0, removeCount)); + } + + this.oldCache = new Map(items.slice(removeCount)); + this.cache = new Map(); + this._size = 0; + } + + this.maxSize = newSize; + } + + * keys() { + for (const [key] of this) { + yield key; + } + } + + * values() { + for (const [, value] of this) { + yield value; + } + } + + * [Symbol.iterator]() { + for (const item of this.cache) { + const [key, value] = item; + const deleted = this._deleteIfExpired(key, value); + if (deleted === false) { + yield [key, value.value]; + } + } + + for (const item of this.oldCache) { + const [key, value] = item; + if (!this.cache.has(key)) { + const deleted = this._deleteIfExpired(key, value); + if (deleted === false) { + yield [key, value.value]; + } + } + } + } + + * entriesDescending() { + let items = [...this.cache]; + for (let i = items.length - 1; i >= 0; --i) { + const item = items[i]; + const [key, value] = item; + const deleted = this._deleteIfExpired(key, value); + if (deleted === false) { + yield [key, value.value]; + } + } + + items = [...this.oldCache]; + for (let i = items.length - 1; i >= 0; --i) { + const item = items[i]; + const [key, value] = item; + if (!this.cache.has(key)) { + const deleted = this._deleteIfExpired(key, value); + if (deleted === false) { + yield [key, value.value]; + } + } + } + } + + * entriesAscending() { + for (const [key, value] of this._entriesAscending()) { + yield [key, value.value]; + } + } + + get size() { + if (!this._size) { + return this.oldCache.size; + } + + let oldCacheSize = 0; + for (const key of this.oldCache.keys()) { + if (!this.cache.has(key)) { + oldCacheSize++; + } + } + + return Math.min(this._size + oldCacheSize, this.maxSize); + } +} + +module.exports = QuickLRU; diff --git a/node_modules/@alloc/quick-lru/license b/node_modules/@alloc/quick-lru/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/@alloc/quick-lru/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@alloc/quick-lru/package.json b/node_modules/@alloc/quick-lru/package.json new file mode 100644 index 0000000..21f1072 --- /dev/null +++ b/node_modules/@alloc/quick-lru/package.json @@ -0,0 +1,43 @@ +{ + "name": "@alloc/quick-lru", + "version": "5.2.0", + "description": "Simple “Least Recently Used” (LRU) cache", + "license": "MIT", + "repository": "sindresorhus/quick-lru", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "lru", + "quick", + "cache", + "caching", + "least", + "recently", + "used", + "fast", + "map", + "hash", + "buffer" + ], + "devDependencies": { + "ava": "^2.0.0", + "coveralls": "^3.0.3", + "nyc": "^15.0.0", + "tsd": "^0.11.0", + "xo": "^0.26.0" + } +} diff --git a/node_modules/@alloc/quick-lru/readme.md b/node_modules/@alloc/quick-lru/readme.md new file mode 100644 index 0000000..7187ba5 --- /dev/null +++ b/node_modules/@alloc/quick-lru/readme.md @@ -0,0 +1,139 @@ +# quick-lru [![Build Status](https://travis-ci.org/sindresorhus/quick-lru.svg?branch=master)](https://travis-ci.org/sindresorhus/quick-lru) [![Coverage Status](https://coveralls.io/repos/github/sindresorhus/quick-lru/badge.svg?branch=master)](https://coveralls.io/github/sindresorhus/quick-lru?branch=master) + +> Simple [“Least Recently Used” (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29) + +Useful when you need to cache something and limit memory usage. + +Inspired by the [`hashlru` algorithm](https://github.com/dominictarr/hashlru#algorithm), but instead uses [`Map`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map) to support keys of any type, not just strings, and values can be `undefined`. + +## Install + +``` +$ npm install quick-lru +``` + +## Usage + +```js +const QuickLRU = require('quick-lru'); + +const lru = new QuickLRU({maxSize: 1000}); + +lru.set('🦄', '🌈'); + +lru.has('🦄'); +//=> true + +lru.get('🦄'); +//=> '🌈' +``` + +## API + +### new QuickLRU(options?) + +Returns a new instance. + +### options + +Type: `object` + +#### maxSize + +*Required*\ +Type: `number` + +The maximum number of items before evicting the least recently used items. + +#### maxAge + +Type: `number`\ +Default: `Infinity` + +The maximum number of milliseconds an item should remain in cache. +By default maxAge will be Infinity, which means that items will never expire. + +Lazy expiration happens upon the next `write` or `read` call. + +Individual expiration of an item can be specified by the `set(key, value, options)` method. + +#### onEviction + +*Optional*\ +Type: `(key, value) => void` + +Called right before an item is evicted from the cache. + +Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`). + +### Instance + +The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop. + +Both `key` and `value` can be of any type. + +#### .set(key, value, options?) + +Set an item. Returns the instance. + +Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified on the constructor, otherwise the item will never expire. + +#### .get(key) + +Get an item. + +#### .has(key) + +Check if an item exists. + +#### .peek(key) + +Get an item without marking it as recently used. + +#### .delete(key) + +Delete an item. + +Returns `true` if the item is removed or `false` if the item doesn't exist. + +#### .clear() + +Delete all items. + +#### .resize(maxSize) + +Update the `maxSize`, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee. + +Useful for on-the-fly tuning of cache sizes in live systems. + +#### .keys() + +Iterable for all the keys. + +#### .values() + +Iterable for all the values. + +#### .entriesAscending() + +Iterable for all entries, starting with the oldest (ascending in recency). + +#### .entriesDescending() + +Iterable for all entries, starting with the newest (descending in recency). + +#### .size + +The stored item count. + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/@jridgewell/gen-mapping/LICENSE b/node_modules/@jridgewell/gen-mapping/LICENSE new file mode 100644 index 0000000..352f071 --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/LICENSE @@ -0,0 +1,19 @@ +Copyright 2022 Justin Ridgewell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@jridgewell/gen-mapping/README.md b/node_modules/@jridgewell/gen-mapping/README.md new file mode 100644 index 0000000..4066cdb --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/README.md @@ -0,0 +1,227 @@ +# @jridgewell/gen-mapping + +> Generate source maps + +`gen-mapping` allows you to generate a source map during transpilation or minification. +With a source map, you're able to trace the original location in the source file, either in Chrome's +DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping]. + +You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This +provides the same `addMapping` and `setSourceContent` API. + +## Installation + +```sh +npm install @jridgewell/gen-mapping +``` + +## Usage + +```typescript +import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping'; + +const map = new GenMapping({ + file: 'output.js', + sourceRoot: 'https://example.com/', +}); + +setSourceContent(map, 'input.js', `function foo() {}`); + +addMapping(map, { + // Lines start at line 1, columns at column 0. + generated: { line: 1, column: 0 }, + source: 'input.js', + original: { line: 1, column: 0 }, +}); + +addMapping(map, { + generated: { line: 1, column: 9 }, + source: 'input.js', + original: { line: 1, column: 9 }, + name: 'foo', +}); + +assert.deepEqual(toDecodedMap(map), { + version: 3, + file: 'output.js', + names: ['foo'], + sourceRoot: 'https://example.com/', + sources: ['input.js'], + sourcesContent: ['function foo() {}'], + mappings: [ + [ [0, 0, 0, 0], [9, 0, 0, 9, 0] ] + ], +}); + +assert.deepEqual(toEncodedMap(map), { + version: 3, + file: 'output.js', + names: ['foo'], + sourceRoot: 'https://example.com/', + sources: ['input.js'], + sourcesContent: ['function foo() {}'], + mappings: 'AAAA,SAASA', +}); +``` + +### Smaller Sourcemaps + +Not everything needs to be added to a sourcemap, and needless markings can cause signficantly +larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will +intelligently determine if this marking adds useful information. If not, the marking will be +skipped. + +```typescript +import { maybeAddMapping } from '@jridgewell/gen-mapping'; + +const map = new GenMapping(); + +// Adding a sourceless marking at the beginning of a line isn't useful. +maybeAddMapping(map, { + generated: { line: 1, column: 0 }, +}); + +// Adding a new source marking is useful. +maybeAddMapping(map, { + generated: { line: 1, column: 0 }, + source: 'input.js', + original: { line: 1, column: 0 }, +}); + +// But adding another marking pointing to the exact same original location isn't, even if the +// generated column changed. +maybeAddMapping(map, { + generated: { line: 1, column: 9 }, + source: 'input.js', + original: { line: 1, column: 0 }, +}); + +assert.deepEqual(toEncodedMap(map), { + version: 3, + names: [], + sources: ['input.js'], + sourcesContent: [null], + mappings: 'AAAA', +}); +``` + +## Benchmarks + +``` +node v18.0.0 + +amp.js.map +Memory Usage: +gen-mapping: addSegment 5852872 bytes +gen-mapping: addMapping 7716042 bytes +source-map-js 6143250 bytes +source-map-0.6.1 6124102 bytes +source-map-0.8.0 6121173 bytes +Smallest memory usage is gen-mapping: addSegment + +Adding speed: +gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled) +gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled) +source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled) +source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled) +source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled) +Fastest is gen-mapping: addSegment + +Generate speed: +gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled) +gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled) +source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled) +source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled) +source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled) +Fastest is gen-mapping: decoded output + + +*** + + +babel.min.js.map +Memory Usage: +gen-mapping: addSegment 37578063 bytes +gen-mapping: addMapping 37212897 bytes +source-map-js 47638527 bytes +source-map-0.6.1 47690503 bytes +source-map-0.8.0 47470188 bytes +Smallest memory usage is gen-mapping: addMapping + +Adding speed: +gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled) +gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled) +source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled) +source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled) +source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled) +Fastest is gen-mapping: addSegment + +Generate speed: +gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled) +gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled) +source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled) +source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled) +source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled) +Fastest is gen-mapping: decoded output + + +*** + + +preact.js.map +Memory Usage: +gen-mapping: addSegment 416247 bytes +gen-mapping: addMapping 419824 bytes +source-map-js 1024619 bytes +source-map-0.6.1 1146004 bytes +source-map-0.8.0 1113250 bytes +Smallest memory usage is gen-mapping: addSegment + +Adding speed: +gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled) +gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled) +source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled) +source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled) +source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled) +Fastest is gen-mapping: addSegment + +Generate speed: +gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled) +gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled) +source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled) +source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled) +source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled) +Fastest is gen-mapping: decoded output + + +*** + + +react.js.map +Memory Usage: +gen-mapping: addSegment 975096 bytes +gen-mapping: addMapping 1102981 bytes +source-map-js 2918836 bytes +source-map-0.6.1 2885435 bytes +source-map-0.8.0 2874336 bytes +Smallest memory usage is gen-mapping: addSegment + +Adding speed: +gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled) +gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled) +source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled) +source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled) +source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled) +Fastest is gen-mapping: addSegment + +Generate speed: +gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled) +gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled) +source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled) +source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled) +source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled) +Fastest is gen-mapping: decoded output +``` + +[source-map]: https://www.npmjs.com/package/source-map +[trace-mapping]: https://github.com/jridgewell/trace-mapping diff --git a/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs new file mode 100644 index 0000000..5aeb5cc --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs @@ -0,0 +1,230 @@ +import { SetArray, put } from '@jridgewell/set-array'; +import { encode } from '@jridgewell/sourcemap-codec'; +import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping'; + +const COLUMN = 0; +const SOURCES_INDEX = 1; +const SOURCE_LINE = 2; +const SOURCE_COLUMN = 3; +const NAMES_INDEX = 4; + +const NO_NAME = -1; +/** + * A low-level API to associate a generated position with an original source position. Line and + * column here are 0-based, unlike `addMapping`. + */ +let addSegment; +/** + * A high-level API to associate a generated position with an original source position. Line is + * 1-based, but column is 0-based, due to legacy behavior in `source-map` library. + */ +let addMapping; +/** + * Same as `addSegment`, but will only add the segment if it generates useful information in the + * resulting map. This only works correctly if segments are added **in order**, meaning you should + * not add a segment with a lower generated line/column than one that came before. + */ +let maybeAddSegment; +/** + * Same as `addMapping`, but will only add the mapping if it generates useful information in the + * resulting map. This only works correctly if mappings are added **in order**, meaning you should + * not add a mapping with a lower generated line/column than one that came before. + */ +let maybeAddMapping; +/** + * Adds/removes the content of the source file to the source map. + */ +let setSourceContent; +/** + * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +let toDecodedMap; +/** + * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +let toEncodedMap; +/** + * Constructs a new GenMapping, using the already present mappings of the input. + */ +let fromMap; +/** + * Returns an array of high-level mapping objects for every recorded segment, which could then be + * passed to the `source-map` library. + */ +let allMappings; +// This split declaration is only so that terser can elminiate the static initialization block. +let addSegmentInternal; +/** + * Provides the state to generate a sourcemap. + */ +class GenMapping { + constructor({ file, sourceRoot } = {}) { + this._names = new SetArray(); + this._sources = new SetArray(); + this._sourcesContent = []; + this._mappings = []; + this.file = file; + this.sourceRoot = sourceRoot; + } +} +(() => { + addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { + return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); + }; + maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { + return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); + }; + addMapping = (map, mapping) => { + return addMappingInternal(false, map, mapping); + }; + maybeAddMapping = (map, mapping) => { + return addMappingInternal(true, map, mapping); + }; + setSourceContent = (map, source, content) => { + const { _sources: sources, _sourcesContent: sourcesContent } = map; + sourcesContent[put(sources, source)] = content; + }; + toDecodedMap = (map) => { + const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; + removeEmptyFinalLines(mappings); + return { + version: 3, + file: file || undefined, + names: names.array, + sourceRoot: sourceRoot || undefined, + sources: sources.array, + sourcesContent, + mappings, + }; + }; + toEncodedMap = (map) => { + const decoded = toDecodedMap(map); + return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) }); + }; + allMappings = (map) => { + const out = []; + const { _mappings: mappings, _sources: sources, _names: names } = map; + for (let i = 0; i < mappings.length; i++) { + const line = mappings[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + const generated = { line: i + 1, column: seg[COLUMN] }; + let source = undefined; + let original = undefined; + let name = undefined; + if (seg.length !== 1) { + source = sources.array[seg[SOURCES_INDEX]]; + original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] }; + if (seg.length === 5) + name = names.array[seg[NAMES_INDEX]]; + } + out.push({ generated, source, original, name }); + } + } + return out; + }; + fromMap = (input) => { + const map = new TraceMap(input); + const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot }); + putAll(gen._names, map.names); + putAll(gen._sources, map.sources); + gen._sourcesContent = map.sourcesContent || map.sources.map(() => null); + gen._mappings = decodedMappings(map); + return gen; + }; + // Internal helpers + addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { + const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; + const line = getLine(mappings, genLine); + const index = getColumnIndex(line, genColumn); + if (!source) { + if (skipable && skipSourceless(line, index)) + return; + return insert(line, index, [genColumn]); + } + const sourcesIndex = put(sources, source); + const namesIndex = name ? put(names, name) : NO_NAME; + if (sourcesIndex === sourcesContent.length) + sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null; + if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { + return; + } + return insert(line, index, name + ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] + : [genColumn, sourcesIndex, sourceLine, sourceColumn]); + }; +})(); +function getLine(mappings, index) { + for (let i = mappings.length; i <= index; i++) { + mappings[i] = []; + } + return mappings[index]; +} +function getColumnIndex(line, genColumn) { + let index = line.length; + for (let i = index - 1; i >= 0; index = i--) { + const current = line[i]; + if (genColumn >= current[COLUMN]) + break; + } + return index; +} +function insert(array, index, value) { + for (let i = array.length; i > index; i--) { + array[i] = array[i - 1]; + } + array[index] = value; +} +function removeEmptyFinalLines(mappings) { + const { length } = mappings; + let len = length; + for (let i = len - 1; i >= 0; len = i, i--) { + if (mappings[i].length > 0) + break; + } + if (len < length) + mappings.length = len; +} +function putAll(strarr, array) { + for (let i = 0; i < array.length; i++) + put(strarr, array[i]); +} +function skipSourceless(line, index) { + // The start of a line is already sourceless, so adding a sourceless segment to the beginning + // doesn't generate any useful information. + if (index === 0) + return true; + const prev = line[index - 1]; + // If the previous segment is also sourceless, then adding another sourceless segment doesn't + // genrate any new information. Else, this segment will end the source/named segment and point to + // a sourceless position, which is useful. + return prev.length === 1; +} +function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { + // A source/named segment at the start of a line gives position at that genColumn + if (index === 0) + return false; + const prev = line[index - 1]; + // If the previous segment is sourceless, then we're transitioning to a source. + if (prev.length === 1) + return false; + // If the previous segment maps to the exact same source position, then this segment doesn't + // provide any new position information. + return (sourcesIndex === prev[SOURCES_INDEX] && + sourceLine === prev[SOURCE_LINE] && + sourceColumn === prev[SOURCE_COLUMN] && + namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)); +} +function addMappingInternal(skipable, map, mapping) { + const { generated, source, original, name, content } = mapping; + if (!source) { + return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null); + } + const s = source; + return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content); +} + +export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setSourceContent, toDecodedMap, toEncodedMap }; +//# sourceMappingURL=gen-mapping.mjs.map diff --git a/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map new file mode 100644 index 0000000..2fee0cd --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"gen-mapping.mjs","sources":["../src/sourcemap-segment.ts","../src/gen-mapping.ts"],"sourcesContent":["type GeneratedColumn = number;\ntype SourcesIndex = number;\ntype SourceLine = number;\ntype SourceColumn = number;\ntype NamesIndex = number;\n\nexport type SourceMapSegment =\n | [GeneratedColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];\n\nexport const COLUMN = 0;\nexport const SOURCES_INDEX = 1;\nexport const SOURCE_LINE = 2;\nexport const SOURCE_COLUMN = 3;\nexport const NAMES_INDEX = 4;\n","import { SetArray, put } from '@jridgewell/set-array';\nimport { encode } from '@jridgewell/sourcemap-codec';\nimport { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';\n\nimport {\n COLUMN,\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n} from './sourcemap-segment';\n\nimport type { SourceMapInput } from '@jridgewell/trace-mapping';\nimport type { SourceMapSegment } from './sourcemap-segment';\nimport type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';\n\nexport type { DecodedSourceMap, EncodedSourceMap, Mapping };\n\nexport type Options = {\n file?: string | null;\n sourceRoot?: string | null;\n};\n\nconst NO_NAME = -1;\n\n/**\n * A low-level API to associate a generated position with an original source position. Line and\n * column here are 0-based, unlike `addMapping`.\n */\nexport let addSegment: {\n (\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source?: null,\n sourceLine?: null,\n sourceColumn?: null,\n name?: null,\n content?: null,\n ): void;\n (\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source: string,\n sourceLine: number,\n sourceColumn: number,\n name?: null,\n content?: string | null,\n ): void;\n (\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source: string,\n sourceLine: number,\n sourceColumn: number,\n name: string,\n content?: string | null,\n ): void;\n};\n\n/**\n * A high-level API to associate a generated position with an original source position. Line is\n * 1-based, but column is 0-based, due to legacy behavior in `source-map` library.\n */\nexport let addMapping: {\n (\n map: GenMapping,\n mapping: {\n generated: Pos;\n source?: null;\n original?: null;\n name?: null;\n content?: null;\n },\n ): void;\n (\n map: GenMapping,\n mapping: {\n generated: Pos;\n source: string;\n original: Pos;\n name?: null;\n content?: string | null;\n },\n ): void;\n (\n map: GenMapping,\n mapping: {\n generated: Pos;\n source: string;\n original: Pos;\n name: string;\n content?: string | null;\n },\n ): void;\n};\n\n/**\n * Same as `addSegment`, but will only add the segment if it generates useful information in the\n * resulting map. This only works correctly if segments are added **in order**, meaning you should\n * not add a segment with a lower generated line/column than one that came before.\n */\nexport let maybeAddSegment: typeof addSegment;\n\n/**\n * Same as `addMapping`, but will only add the mapping if it generates useful information in the\n * resulting map. This only works correctly if mappings are added **in order**, meaning you should\n * not add a mapping with a lower generated line/column than one that came before.\n */\nexport let maybeAddMapping: typeof addMapping;\n\n/**\n * Adds/removes the content of the source file to the source map.\n */\nexport let setSourceContent: (map: GenMapping, source: string, content: string | null) => void;\n\n/**\n * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let toDecodedMap: (map: GenMapping) => DecodedSourceMap;\n\n/**\n * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let toEncodedMap: (map: GenMapping) => EncodedSourceMap;\n\n/**\n * Constructs a new GenMapping, using the already present mappings of the input.\n */\nexport let fromMap: (input: SourceMapInput) => GenMapping;\n\n/**\n * Returns an array of high-level mapping objects for every recorded segment, which could then be\n * passed to the `source-map` library.\n */\nexport let allMappings: (map: GenMapping) => Mapping[];\n\n// This split declaration is only so that terser can elminiate the static initialization block.\nlet addSegmentInternal: (\n skipable: boolean,\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source: S,\n sourceLine: S extends string ? number : null | undefined,\n sourceColumn: S extends string ? number : null | undefined,\n name: S extends string ? string | null | undefined : null | undefined,\n content: S extends string ? string | null | undefined : null | undefined,\n) => void;\n\n/**\n * Provides the state to generate a sourcemap.\n */\nexport class GenMapping {\n private _names = new SetArray();\n private _sources = new SetArray();\n private _sourcesContent: (string | null)[] = [];\n private _mappings: SourceMapSegment[][] = [];\n declare file: string | null | undefined;\n declare sourceRoot: string | null | undefined;\n\n constructor({ file, sourceRoot }: Options = {}) {\n this.file = file;\n this.sourceRoot = sourceRoot;\n }\n\n static {\n addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {\n return addSegmentInternal(\n false,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n );\n };\n\n maybeAddSegment = (\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n ) => {\n return addSegmentInternal(\n true,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n );\n };\n\n addMapping = (map, mapping) => {\n return addMappingInternal(false, map, mapping as Parameters[2]);\n };\n\n maybeAddMapping = (map, mapping) => {\n return addMappingInternal(true, map, mapping as Parameters[2]);\n };\n\n setSourceContent = (map, source, content) => {\n const { _sources: sources, _sourcesContent: sourcesContent } = map;\n sourcesContent[put(sources, source)] = content;\n };\n\n toDecodedMap = (map) => {\n const {\n file,\n sourceRoot,\n _mappings: mappings,\n _sources: sources,\n _sourcesContent: sourcesContent,\n _names: names,\n } = map;\n removeEmptyFinalLines(mappings);\n\n return {\n version: 3,\n file: file || undefined,\n names: names.array,\n sourceRoot: sourceRoot || undefined,\n sources: sources.array,\n sourcesContent,\n mappings,\n };\n };\n\n toEncodedMap = (map) => {\n const decoded = toDecodedMap(map);\n return {\n ...decoded,\n mappings: encode(decoded.mappings as SourceMapSegment[][]),\n };\n };\n\n allMappings = (map) => {\n const out: Mapping[] = [];\n const { _mappings: mappings, _sources: sources, _names: names } = map;\n\n for (let i = 0; i < mappings.length; i++) {\n const line = mappings[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n\n const generated = { line: i + 1, column: seg[COLUMN] };\n let source: string | undefined = undefined;\n let original: Pos | undefined = undefined;\n let name: string | undefined = undefined;\n\n if (seg.length !== 1) {\n source = sources.array[seg[SOURCES_INDEX]];\n original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };\n\n if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];\n }\n\n out.push({ generated, source, original, name } as Mapping);\n }\n }\n\n return out;\n };\n\n fromMap = (input) => {\n const map = new TraceMap(input);\n const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });\n\n putAll(gen._names, map.names);\n putAll(gen._sources, map.sources as string[]);\n gen._sourcesContent = map.sourcesContent || map.sources.map(() => null);\n gen._mappings = decodedMappings(map) as GenMapping['_mappings'];\n\n return gen;\n };\n\n // Internal helpers\n addSegmentInternal = (\n skipable,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n ) => {\n const {\n _mappings: mappings,\n _sources: sources,\n _sourcesContent: sourcesContent,\n _names: names,\n } = map;\n const line = getLine(mappings, genLine);\n const index = getColumnIndex(line, genColumn);\n\n if (!source) {\n if (skipable && skipSourceless(line, index)) return;\n return insert(line, index, [genColumn]);\n }\n\n // Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source\n // isn't nullish.\n assert(sourceLine);\n assert(sourceColumn);\n\n const sourcesIndex = put(sources, source);\n const namesIndex = name ? put(names, name) : NO_NAME;\n if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null;\n\n if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {\n return;\n }\n\n return insert(\n line,\n index,\n name\n ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]\n : [genColumn, sourcesIndex, sourceLine, sourceColumn],\n );\n };\n }\n}\n\nfunction assert(_val: unknown): asserts _val is T {\n // noop.\n}\n\nfunction getLine(mappings: SourceMapSegment[][], index: number): SourceMapSegment[] {\n for (let i = mappings.length; i <= index; i++) {\n mappings[i] = [];\n }\n return mappings[index];\n}\n\nfunction getColumnIndex(line: SourceMapSegment[], genColumn: number): number {\n let index = line.length;\n for (let i = index - 1; i >= 0; index = i--) {\n const current = line[i];\n if (genColumn >= current[COLUMN]) break;\n }\n return index;\n}\n\nfunction insert(array: T[], index: number, value: T) {\n for (let i = array.length; i > index; i--) {\n array[i] = array[i - 1];\n }\n array[index] = value;\n}\n\nfunction removeEmptyFinalLines(mappings: SourceMapSegment[][]) {\n const { length } = mappings;\n let len = length;\n for (let i = len - 1; i >= 0; len = i, i--) {\n if (mappings[i].length > 0) break;\n }\n if (len < length) mappings.length = len;\n}\n\nfunction putAll(strarr: SetArray, array: string[]) {\n for (let i = 0; i < array.length; i++) put(strarr, array[i]);\n}\n\nfunction skipSourceless(line: SourceMapSegment[], index: number): boolean {\n // The start of a line is already sourceless, so adding a sourceless segment to the beginning\n // doesn't generate any useful information.\n if (index === 0) return true;\n\n const prev = line[index - 1];\n // If the previous segment is also sourceless, then adding another sourceless segment doesn't\n // genrate any new information. Else, this segment will end the source/named segment and point to\n // a sourceless position, which is useful.\n return prev.length === 1;\n}\n\nfunction skipSource(\n line: SourceMapSegment[],\n index: number,\n sourcesIndex: number,\n sourceLine: number,\n sourceColumn: number,\n namesIndex: number,\n): boolean {\n // A source/named segment at the start of a line gives position at that genColumn\n if (index === 0) return false;\n\n const prev = line[index - 1];\n\n // If the previous segment is sourceless, then we're transitioning to a source.\n if (prev.length === 1) return false;\n\n // If the previous segment maps to the exact same source position, then this segment doesn't\n // provide any new position information.\n return (\n sourcesIndex === prev[SOURCES_INDEX] &&\n sourceLine === prev[SOURCE_LINE] &&\n sourceColumn === prev[SOURCE_COLUMN] &&\n namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)\n );\n}\n\nfunction addMappingInternal(\n skipable: boolean,\n map: GenMapping,\n mapping: {\n generated: Pos;\n source: S;\n original: S extends string ? Pos : null | undefined;\n name: S extends string ? string | null | undefined : null | undefined;\n content: S extends string ? string | null | undefined : null | undefined;\n },\n) {\n const { generated, source, original, name, content } = mapping;\n if (!source) {\n return addSegmentInternal(\n skipable,\n map,\n generated.line - 1,\n generated.column,\n null,\n null,\n null,\n null,\n null,\n );\n }\n const s: string = source;\n assert(original);\n return addSegmentInternal(\n skipable,\n map,\n generated.line - 1,\n generated.column,\n s,\n original.line - 1,\n original.column,\n name,\n content,\n );\n}\n"],"names":[],"mappings":";;;;AAWO,MAAM,MAAM,GAAG,CAAC,CAAC;AACjB,MAAM,aAAa,GAAG,CAAC,CAAC;AACxB,MAAM,WAAW,GAAG,CAAC,CAAC;AACtB,MAAM,aAAa,GAAG,CAAC,CAAC;AACxB,MAAM,WAAW,GAAG,CAAC;;ACQ5B,MAAM,OAAO,GAAG,CAAC,CAAC,CAAC;AAEnB;;;AAGG;AACQ,IAAA,WA+BT;AAEF;;;AAGG;AACQ,IAAA,WA+BT;AAEF;;;;AAIG;AACQ,IAAA,gBAAmC;AAE9C;;;;AAIG;AACQ,IAAA,gBAAmC;AAE9C;;AAEG;AACQ,IAAA,iBAAoF;AAE/F;;;AAGG;AACQ,IAAA,aAAoD;AAE/D;;;AAGG;AACQ,IAAA,aAAoD;AAE/D;;AAEG;AACQ,IAAA,QAA+C;AAE1D;;;AAGG;AACQ,IAAA,YAA4C;AAEvD;AACA,IAAI,kBAUK,CAAC;AAEV;;AAEG;MACU,UAAU,CAAA;AAQrB,IAAA,WAAA,CAAY,EAAE,IAAI,EAAE,UAAU,KAAc,EAAE,EAAA;AAPtC,QAAA,IAAA,CAAA,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;AACxB,QAAA,IAAA,CAAA,QAAQ,GAAG,IAAI,QAAQ,EAAE,CAAC;QAC1B,IAAe,CAAA,eAAA,GAAsB,EAAE,CAAC;QACxC,IAAS,CAAA,SAAA,GAAyB,EAAE,CAAC;AAK3C,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;KAC9B;AA2KF,CAAA;AAzKC,CAAA,MAAA;AACE,IAAA,UAAU,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,IAAI,EAAE,OAAO,KAAI;QACxF,OAAO,kBAAkB,CACvB,KAAK,EACL,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,CACR,CAAC;AACJ,KAAC,CAAC;AAEF,IAAA,eAAe,GAAG,CAChB,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,KACL;QACF,OAAO,kBAAkB,CACvB,IAAI,EACJ,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,CACR,CAAC;AACJ,KAAC,CAAC;AAEF,IAAA,UAAU,GAAG,CAAC,GAAG,EAAE,OAAO,KAAI;QAC5B,OAAO,kBAAkB,CAAC,KAAK,EAAE,GAAG,EAAE,OAAmD,CAAC,CAAC;AAC7F,KAAC,CAAC;AAEF,IAAA,eAAe,GAAG,CAAC,GAAG,EAAE,OAAO,KAAI;QACjC,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,OAAmD,CAAC,CAAC;AAC5F,KAAC,CAAC;IAEF,gBAAgB,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,KAAI;QAC1C,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,GAAG,GAAG,CAAC;QACnE,cAAc,CAAC,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC,GAAG,OAAO,CAAC;AACjD,KAAC,CAAC;AAEF,IAAA,YAAY,GAAG,CAAC,GAAG,KAAI;QACrB,MAAM,EACJ,IAAI,EACJ,UAAU,EACV,SAAS,EAAE,QAAQ,EACnB,QAAQ,EAAE,OAAO,EACjB,eAAe,EAAE,cAAc,EAC/B,MAAM,EAAE,KAAK,GACd,GAAG,GAAG,CAAC;QACR,qBAAqB,CAAC,QAAQ,CAAC,CAAC;QAEhC,OAAO;AACL,YAAA,OAAO,EAAE,CAAC;YACV,IAAI,EAAE,IAAI,IAAI,SAAS;YACvB,KAAK,EAAE,KAAK,CAAC,KAAK;YAClB,UAAU,EAAE,UAAU,IAAI,SAAS;YACnC,OAAO,EAAE,OAAO,CAAC,KAAK;YACtB,cAAc;YACd,QAAQ;SACT,CAAC;AACJ,KAAC,CAAC;AAEF,IAAA,YAAY,GAAG,CAAC,GAAG,KAAI;AACrB,QAAA,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;QAClC,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACV,EAAA,EAAA,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,QAAgC,CAAC,EAC1D,CAAA,CAAA;AACJ,KAAC,CAAC;AAEF,IAAA,WAAW,GAAG,CAAC,GAAG,KAAI;QACpB,MAAM,GAAG,GAAc,EAAE,CAAC;AAC1B,QAAA,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC;AAEtE,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACxC,YAAA,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;AACzB,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpC,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;AAEpB,gBAAA,MAAM,SAAS,GAAG,EAAE,IAAI,EAAE,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;gBACvD,IAAI,MAAM,GAAuB,SAAS,CAAC;gBAC3C,IAAI,QAAQ,GAAoB,SAAS,CAAC;gBAC1C,IAAI,IAAI,GAAuB,SAAS,CAAC;AAEzC,gBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;oBACpB,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;AAC3C,oBAAA,QAAQ,GAAG,EAAE,IAAI,EAAE,GAAG,CAAC,WAAW,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,aAAa,CAAC,EAAE,CAAC;AAEtE,oBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;wBAAE,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC;AAC5D,iBAAA;AAED,gBAAA,GAAG,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAa,CAAC,CAAC;AAC5D,aAAA;AACF,SAAA;AAED,QAAA,OAAO,GAAG,CAAC;AACb,KAAC,CAAC;AAEF,IAAA,OAAO,GAAG,CAAC,KAAK,KAAI;AAClB,QAAA,MAAM,GAAG,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,CAAC;AAChC,QAAA,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,EAAE,IAAI,EAAE,GAAG,CAAC,IAAI,EAAE,UAAU,EAAE,GAAG,CAAC,UAAU,EAAE,CAAC,CAAC;QAE3E,MAAM,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC;QAC9B,MAAM,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,CAAC,OAAmB,CAAC,CAAC;AAC9C,QAAA,GAAG,CAAC,eAAe,GAAG,GAAG,CAAC,cAAc,IAAI,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC;AACxE,QAAA,GAAG,CAAC,SAAS,GAAG,eAAe,CAAC,GAAG,CAA4B,CAAC;AAEhE,QAAA,OAAO,GAAG,CAAC;AACb,KAAC,CAAC;;IAGF,kBAAkB,GAAG,CACnB,QAAQ,EACR,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,KACL;AACF,QAAA,MAAM,EACJ,SAAS,EAAE,QAAQ,EACnB,QAAQ,EAAE,OAAO,EACjB,eAAe,EAAE,cAAc,EAC/B,MAAM,EAAE,KAAK,GACd,GAAG,GAAG,CAAC;QACR,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACxC,MAAM,KAAK,GAAG,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;QAE9C,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,IAAI,QAAQ,IAAI,cAAc,CAAC,IAAI,EAAE,KAAK,CAAC;gBAAE,OAAO;YACpD,OAAO,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;AACzC,SAAA;QAOD,MAAM,YAAY,GAAG,GAAG,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AAC1C,QAAA,MAAM,UAAU,GAAG,IAAI,GAAG,GAAG,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC;AACrD,QAAA,IAAI,YAAY,KAAK,cAAc,CAAC,MAAM;YAAE,cAAc,CAAC,YAAY,CAAC,GAAG,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,OAAO,GAAI,IAAI,CAAC;AAE3F,QAAA,IAAI,QAAQ,IAAI,UAAU,CAAC,IAAI,EAAE,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC,EAAE;YAC3F,OAAO;AACR,SAAA;AAED,QAAA,OAAO,MAAM,CACX,IAAI,EACJ,KAAK,EACL,IAAI;cACA,CAAC,SAAS,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC;cAC/D,CAAC,SAAS,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC,CACxD,CAAC;AACJ,KAAC,CAAC;AACJ,CAAC,GAAA,CAAA;AAOH,SAAS,OAAO,CAAC,QAA8B,EAAE,KAAa,EAAA;AAC5D,IAAA,KAAK,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,EAAE,EAAE;AAC7C,QAAA,QAAQ,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;AAClB,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC,KAAK,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,cAAc,CAAC,IAAwB,EAAE,SAAiB,EAAA;AACjE,IAAA,IAAI,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC;AACxB,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,KAAK,GAAG,CAAC,EAAE,EAAE;AAC3C,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;AACxB,QAAA,IAAI,SAAS,IAAI,OAAO,CAAC,MAAM,CAAC;YAAE,MAAM;AACzC,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAI,KAAU,EAAE,KAAa,EAAE,KAAQ,EAAA;AACpD,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QACzC,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACzB,KAAA;AACD,IAAA,KAAK,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;AACvB,CAAC;AAED,SAAS,qBAAqB,CAAC,QAA8B,EAAA;AAC3D,IAAA,MAAM,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;IAC5B,IAAI,GAAG,GAAG,MAAM,CAAC;AACjB,IAAA,KAAK,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;AAC1C,QAAA,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC;YAAE,MAAM;AACnC,KAAA;IACD,IAAI,GAAG,GAAG,MAAM;AAAE,QAAA,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;AAC1C,CAAC;AAED,SAAS,MAAM,CAAC,MAAgB,EAAE,KAAe,EAAA;AAC/C,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;QAAE,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAC/D,CAAC;AAED,SAAS,cAAc,CAAC,IAAwB,EAAE,KAAa,EAAA;;;IAG7D,IAAI,KAAK,KAAK,CAAC;AAAE,QAAA,OAAO,IAAI,CAAC;IAE7B,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;;;;AAI7B,IAAA,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED,SAAS,UAAU,CACjB,IAAwB,EACxB,KAAa,EACb,YAAoB,EACpB,UAAkB,EAClB,YAAoB,EACpB,UAAkB,EAAA;;IAGlB,IAAI,KAAK,KAAK,CAAC;AAAE,QAAA,OAAO,KAAK,CAAC;IAE9B,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;;AAG7B,IAAA,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;AAAE,QAAA,OAAO,KAAK,CAAC;;;AAIpC,IAAA,QACE,YAAY,KAAK,IAAI,CAAC,aAAa,CAAC;AACpC,QAAA,UAAU,KAAK,IAAI,CAAC,WAAW,CAAC;AAChC,QAAA,YAAY,KAAK,IAAI,CAAC,aAAa,CAAC;QACpC,UAAU,MAAM,IAAI,CAAC,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,OAAO,CAAC,EAChE;AACJ,CAAC;AAED,SAAS,kBAAkB,CACzB,QAAiB,EACjB,GAAe,EACf,OAMC,EAAA;AAED,IAAA,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;IAC/D,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,kBAAkB,CACvB,QAAQ,EACR,GAAG,EACH,SAAS,CAAC,IAAI,GAAG,CAAC,EAClB,SAAS,CAAC,MAAM,EAChB,IAAI,EACJ,IAAI,EACJ,IAAI,EACJ,IAAI,EACJ,IAAI,CACL,CAAC;AACH,KAAA;IACD,MAAM,CAAC,GAAW,MAAM,CAAC;AAEzB,IAAA,OAAO,kBAAkB,CACvB,QAAQ,EACR,GAAG,EACH,SAAS,CAAC,IAAI,GAAG,CAAC,EAClB,SAAS,CAAC,MAAM,EAChB,CAAC,EACD,QAAQ,CAAC,IAAI,GAAG,CAAC,EACjB,QAAQ,CAAC,MAAM,EACf,IAAI,EACJ,OAAO,CACR,CAAC;AACJ;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js new file mode 100644 index 0000000..d9fcf5c --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js @@ -0,0 +1,236 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) : + typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping)); +})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict'; + + const COLUMN = 0; + const SOURCES_INDEX = 1; + const SOURCE_LINE = 2; + const SOURCE_COLUMN = 3; + const NAMES_INDEX = 4; + + const NO_NAME = -1; + /** + * A low-level API to associate a generated position with an original source position. Line and + * column here are 0-based, unlike `addMapping`. + */ + exports.addSegment = void 0; + /** + * A high-level API to associate a generated position with an original source position. Line is + * 1-based, but column is 0-based, due to legacy behavior in `source-map` library. + */ + exports.addMapping = void 0; + /** + * Same as `addSegment`, but will only add the segment if it generates useful information in the + * resulting map. This only works correctly if segments are added **in order**, meaning you should + * not add a segment with a lower generated line/column than one that came before. + */ + exports.maybeAddSegment = void 0; + /** + * Same as `addMapping`, but will only add the mapping if it generates useful information in the + * resulting map. This only works correctly if mappings are added **in order**, meaning you should + * not add a mapping with a lower generated line/column than one that came before. + */ + exports.maybeAddMapping = void 0; + /** + * Adds/removes the content of the source file to the source map. + */ + exports.setSourceContent = void 0; + /** + * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ + exports.toDecodedMap = void 0; + /** + * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ + exports.toEncodedMap = void 0; + /** + * Constructs a new GenMapping, using the already present mappings of the input. + */ + exports.fromMap = void 0; + /** + * Returns an array of high-level mapping objects for every recorded segment, which could then be + * passed to the `source-map` library. + */ + exports.allMappings = void 0; + // This split declaration is only so that terser can elminiate the static initialization block. + let addSegmentInternal; + /** + * Provides the state to generate a sourcemap. + */ + class GenMapping { + constructor({ file, sourceRoot } = {}) { + this._names = new setArray.SetArray(); + this._sources = new setArray.SetArray(); + this._sourcesContent = []; + this._mappings = []; + this.file = file; + this.sourceRoot = sourceRoot; + } + } + (() => { + exports.addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { + return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); + }; + exports.maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { + return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); + }; + exports.addMapping = (map, mapping) => { + return addMappingInternal(false, map, mapping); + }; + exports.maybeAddMapping = (map, mapping) => { + return addMappingInternal(true, map, mapping); + }; + exports.setSourceContent = (map, source, content) => { + const { _sources: sources, _sourcesContent: sourcesContent } = map; + sourcesContent[setArray.put(sources, source)] = content; + }; + exports.toDecodedMap = (map) => { + const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; + removeEmptyFinalLines(mappings); + return { + version: 3, + file: file || undefined, + names: names.array, + sourceRoot: sourceRoot || undefined, + sources: sources.array, + sourcesContent, + mappings, + }; + }; + exports.toEncodedMap = (map) => { + const decoded = exports.toDecodedMap(map); + return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) }); + }; + exports.allMappings = (map) => { + const out = []; + const { _mappings: mappings, _sources: sources, _names: names } = map; + for (let i = 0; i < mappings.length; i++) { + const line = mappings[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + const generated = { line: i + 1, column: seg[COLUMN] }; + let source = undefined; + let original = undefined; + let name = undefined; + if (seg.length !== 1) { + source = sources.array[seg[SOURCES_INDEX]]; + original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] }; + if (seg.length === 5) + name = names.array[seg[NAMES_INDEX]]; + } + out.push({ generated, source, original, name }); + } + } + return out; + }; + exports.fromMap = (input) => { + const map = new traceMapping.TraceMap(input); + const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot }); + putAll(gen._names, map.names); + putAll(gen._sources, map.sources); + gen._sourcesContent = map.sourcesContent || map.sources.map(() => null); + gen._mappings = traceMapping.decodedMappings(map); + return gen; + }; + // Internal helpers + addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { + const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; + const line = getLine(mappings, genLine); + const index = getColumnIndex(line, genColumn); + if (!source) { + if (skipable && skipSourceless(line, index)) + return; + return insert(line, index, [genColumn]); + } + const sourcesIndex = setArray.put(sources, source); + const namesIndex = name ? setArray.put(names, name) : NO_NAME; + if (sourcesIndex === sourcesContent.length) + sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null; + if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { + return; + } + return insert(line, index, name + ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] + : [genColumn, sourcesIndex, sourceLine, sourceColumn]); + }; + })(); + function getLine(mappings, index) { + for (let i = mappings.length; i <= index; i++) { + mappings[i] = []; + } + return mappings[index]; + } + function getColumnIndex(line, genColumn) { + let index = line.length; + for (let i = index - 1; i >= 0; index = i--) { + const current = line[i]; + if (genColumn >= current[COLUMN]) + break; + } + return index; + } + function insert(array, index, value) { + for (let i = array.length; i > index; i--) { + array[i] = array[i - 1]; + } + array[index] = value; + } + function removeEmptyFinalLines(mappings) { + const { length } = mappings; + let len = length; + for (let i = len - 1; i >= 0; len = i, i--) { + if (mappings[i].length > 0) + break; + } + if (len < length) + mappings.length = len; + } + function putAll(strarr, array) { + for (let i = 0; i < array.length; i++) + setArray.put(strarr, array[i]); + } + function skipSourceless(line, index) { + // The start of a line is already sourceless, so adding a sourceless segment to the beginning + // doesn't generate any useful information. + if (index === 0) + return true; + const prev = line[index - 1]; + // If the previous segment is also sourceless, then adding another sourceless segment doesn't + // genrate any new information. Else, this segment will end the source/named segment and point to + // a sourceless position, which is useful. + return prev.length === 1; + } + function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { + // A source/named segment at the start of a line gives position at that genColumn + if (index === 0) + return false; + const prev = line[index - 1]; + // If the previous segment is sourceless, then we're transitioning to a source. + if (prev.length === 1) + return false; + // If the previous segment maps to the exact same source position, then this segment doesn't + // provide any new position information. + return (sourcesIndex === prev[SOURCES_INDEX] && + sourceLine === prev[SOURCE_LINE] && + sourceColumn === prev[SOURCE_COLUMN] && + namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)); + } + function addMappingInternal(skipable, map, mapping) { + const { generated, source, original, name, content } = mapping; + if (!source) { + return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null); + } + const s = source; + return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content); + } + + exports.GenMapping = GenMapping; + + Object.defineProperty(exports, '__esModule', { value: true }); + +})); +//# sourceMappingURL=gen-mapping.umd.js.map diff --git a/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map new file mode 100644 index 0000000..7cc8d14 --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map @@ -0,0 +1 @@ +{"version":3,"file":"gen-mapping.umd.js","sources":["../src/sourcemap-segment.ts","../src/gen-mapping.ts"],"sourcesContent":["type GeneratedColumn = number;\ntype SourcesIndex = number;\ntype SourceLine = number;\ntype SourceColumn = number;\ntype NamesIndex = number;\n\nexport type SourceMapSegment =\n | [GeneratedColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];\n\nexport const COLUMN = 0;\nexport const SOURCES_INDEX = 1;\nexport const SOURCE_LINE = 2;\nexport const SOURCE_COLUMN = 3;\nexport const NAMES_INDEX = 4;\n","import { SetArray, put } from '@jridgewell/set-array';\nimport { encode } from '@jridgewell/sourcemap-codec';\nimport { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';\n\nimport {\n COLUMN,\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n} from './sourcemap-segment';\n\nimport type { SourceMapInput } from '@jridgewell/trace-mapping';\nimport type { SourceMapSegment } from './sourcemap-segment';\nimport type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';\n\nexport type { DecodedSourceMap, EncodedSourceMap, Mapping };\n\nexport type Options = {\n file?: string | null;\n sourceRoot?: string | null;\n};\n\nconst NO_NAME = -1;\n\n/**\n * A low-level API to associate a generated position with an original source position. Line and\n * column here are 0-based, unlike `addMapping`.\n */\nexport let addSegment: {\n (\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source?: null,\n sourceLine?: null,\n sourceColumn?: null,\n name?: null,\n content?: null,\n ): void;\n (\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source: string,\n sourceLine: number,\n sourceColumn: number,\n name?: null,\n content?: string | null,\n ): void;\n (\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source: string,\n sourceLine: number,\n sourceColumn: number,\n name: string,\n content?: string | null,\n ): void;\n};\n\n/**\n * A high-level API to associate a generated position with an original source position. Line is\n * 1-based, but column is 0-based, due to legacy behavior in `source-map` library.\n */\nexport let addMapping: {\n (\n map: GenMapping,\n mapping: {\n generated: Pos;\n source?: null;\n original?: null;\n name?: null;\n content?: null;\n },\n ): void;\n (\n map: GenMapping,\n mapping: {\n generated: Pos;\n source: string;\n original: Pos;\n name?: null;\n content?: string | null;\n },\n ): void;\n (\n map: GenMapping,\n mapping: {\n generated: Pos;\n source: string;\n original: Pos;\n name: string;\n content?: string | null;\n },\n ): void;\n};\n\n/**\n * Same as `addSegment`, but will only add the segment if it generates useful information in the\n * resulting map. This only works correctly if segments are added **in order**, meaning you should\n * not add a segment with a lower generated line/column than one that came before.\n */\nexport let maybeAddSegment: typeof addSegment;\n\n/**\n * Same as `addMapping`, but will only add the mapping if it generates useful information in the\n * resulting map. This only works correctly if mappings are added **in order**, meaning you should\n * not add a mapping with a lower generated line/column than one that came before.\n */\nexport let maybeAddMapping: typeof addMapping;\n\n/**\n * Adds/removes the content of the source file to the source map.\n */\nexport let setSourceContent: (map: GenMapping, source: string, content: string | null) => void;\n\n/**\n * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let toDecodedMap: (map: GenMapping) => DecodedSourceMap;\n\n/**\n * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let toEncodedMap: (map: GenMapping) => EncodedSourceMap;\n\n/**\n * Constructs a new GenMapping, using the already present mappings of the input.\n */\nexport let fromMap: (input: SourceMapInput) => GenMapping;\n\n/**\n * Returns an array of high-level mapping objects for every recorded segment, which could then be\n * passed to the `source-map` library.\n */\nexport let allMappings: (map: GenMapping) => Mapping[];\n\n// This split declaration is only so that terser can elminiate the static initialization block.\nlet addSegmentInternal: (\n skipable: boolean,\n map: GenMapping,\n genLine: number,\n genColumn: number,\n source: S,\n sourceLine: S extends string ? number : null | undefined,\n sourceColumn: S extends string ? number : null | undefined,\n name: S extends string ? string | null | undefined : null | undefined,\n content: S extends string ? string | null | undefined : null | undefined,\n) => void;\n\n/**\n * Provides the state to generate a sourcemap.\n */\nexport class GenMapping {\n private _names = new SetArray();\n private _sources = new SetArray();\n private _sourcesContent: (string | null)[] = [];\n private _mappings: SourceMapSegment[][] = [];\n declare file: string | null | undefined;\n declare sourceRoot: string | null | undefined;\n\n constructor({ file, sourceRoot }: Options = {}) {\n this.file = file;\n this.sourceRoot = sourceRoot;\n }\n\n static {\n addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {\n return addSegmentInternal(\n false,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n );\n };\n\n maybeAddSegment = (\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n ) => {\n return addSegmentInternal(\n true,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n );\n };\n\n addMapping = (map, mapping) => {\n return addMappingInternal(false, map, mapping as Parameters[2]);\n };\n\n maybeAddMapping = (map, mapping) => {\n return addMappingInternal(true, map, mapping as Parameters[2]);\n };\n\n setSourceContent = (map, source, content) => {\n const { _sources: sources, _sourcesContent: sourcesContent } = map;\n sourcesContent[put(sources, source)] = content;\n };\n\n toDecodedMap = (map) => {\n const {\n file,\n sourceRoot,\n _mappings: mappings,\n _sources: sources,\n _sourcesContent: sourcesContent,\n _names: names,\n } = map;\n removeEmptyFinalLines(mappings);\n\n return {\n version: 3,\n file: file || undefined,\n names: names.array,\n sourceRoot: sourceRoot || undefined,\n sources: sources.array,\n sourcesContent,\n mappings,\n };\n };\n\n toEncodedMap = (map) => {\n const decoded = toDecodedMap(map);\n return {\n ...decoded,\n mappings: encode(decoded.mappings as SourceMapSegment[][]),\n };\n };\n\n allMappings = (map) => {\n const out: Mapping[] = [];\n const { _mappings: mappings, _sources: sources, _names: names } = map;\n\n for (let i = 0; i < mappings.length; i++) {\n const line = mappings[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n\n const generated = { line: i + 1, column: seg[COLUMN] };\n let source: string | undefined = undefined;\n let original: Pos | undefined = undefined;\n let name: string | undefined = undefined;\n\n if (seg.length !== 1) {\n source = sources.array[seg[SOURCES_INDEX]];\n original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };\n\n if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];\n }\n\n out.push({ generated, source, original, name } as Mapping);\n }\n }\n\n return out;\n };\n\n fromMap = (input) => {\n const map = new TraceMap(input);\n const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });\n\n putAll(gen._names, map.names);\n putAll(gen._sources, map.sources as string[]);\n gen._sourcesContent = map.sourcesContent || map.sources.map(() => null);\n gen._mappings = decodedMappings(map) as GenMapping['_mappings'];\n\n return gen;\n };\n\n // Internal helpers\n addSegmentInternal = (\n skipable,\n map,\n genLine,\n genColumn,\n source,\n sourceLine,\n sourceColumn,\n name,\n content,\n ) => {\n const {\n _mappings: mappings,\n _sources: sources,\n _sourcesContent: sourcesContent,\n _names: names,\n } = map;\n const line = getLine(mappings, genLine);\n const index = getColumnIndex(line, genColumn);\n\n if (!source) {\n if (skipable && skipSourceless(line, index)) return;\n return insert(line, index, [genColumn]);\n }\n\n // Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source\n // isn't nullish.\n assert(sourceLine);\n assert(sourceColumn);\n\n const sourcesIndex = put(sources, source);\n const namesIndex = name ? put(names, name) : NO_NAME;\n if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null;\n\n if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {\n return;\n }\n\n return insert(\n line,\n index,\n name\n ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]\n : [genColumn, sourcesIndex, sourceLine, sourceColumn],\n );\n };\n }\n}\n\nfunction assert(_val: unknown): asserts _val is T {\n // noop.\n}\n\nfunction getLine(mappings: SourceMapSegment[][], index: number): SourceMapSegment[] {\n for (let i = mappings.length; i <= index; i++) {\n mappings[i] = [];\n }\n return mappings[index];\n}\n\nfunction getColumnIndex(line: SourceMapSegment[], genColumn: number): number {\n let index = line.length;\n for (let i = index - 1; i >= 0; index = i--) {\n const current = line[i];\n if (genColumn >= current[COLUMN]) break;\n }\n return index;\n}\n\nfunction insert(array: T[], index: number, value: T) {\n for (let i = array.length; i > index; i--) {\n array[i] = array[i - 1];\n }\n array[index] = value;\n}\n\nfunction removeEmptyFinalLines(mappings: SourceMapSegment[][]) {\n const { length } = mappings;\n let len = length;\n for (let i = len - 1; i >= 0; len = i, i--) {\n if (mappings[i].length > 0) break;\n }\n if (len < length) mappings.length = len;\n}\n\nfunction putAll(strarr: SetArray, array: string[]) {\n for (let i = 0; i < array.length; i++) put(strarr, array[i]);\n}\n\nfunction skipSourceless(line: SourceMapSegment[], index: number): boolean {\n // The start of a line is already sourceless, so adding a sourceless segment to the beginning\n // doesn't generate any useful information.\n if (index === 0) return true;\n\n const prev = line[index - 1];\n // If the previous segment is also sourceless, then adding another sourceless segment doesn't\n // genrate any new information. Else, this segment will end the source/named segment and point to\n // a sourceless position, which is useful.\n return prev.length === 1;\n}\n\nfunction skipSource(\n line: SourceMapSegment[],\n index: number,\n sourcesIndex: number,\n sourceLine: number,\n sourceColumn: number,\n namesIndex: number,\n): boolean {\n // A source/named segment at the start of a line gives position at that genColumn\n if (index === 0) return false;\n\n const prev = line[index - 1];\n\n // If the previous segment is sourceless, then we're transitioning to a source.\n if (prev.length === 1) return false;\n\n // If the previous segment maps to the exact same source position, then this segment doesn't\n // provide any new position information.\n return (\n sourcesIndex === prev[SOURCES_INDEX] &&\n sourceLine === prev[SOURCE_LINE] &&\n sourceColumn === prev[SOURCE_COLUMN] &&\n namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)\n );\n}\n\nfunction addMappingInternal(\n skipable: boolean,\n map: GenMapping,\n mapping: {\n generated: Pos;\n source: S;\n original: S extends string ? Pos : null | undefined;\n name: S extends string ? string | null | undefined : null | undefined;\n content: S extends string ? string | null | undefined : null | undefined;\n },\n) {\n const { generated, source, original, name, content } = mapping;\n if (!source) {\n return addSegmentInternal(\n skipable,\n map,\n generated.line - 1,\n generated.column,\n null,\n null,\n null,\n null,\n null,\n );\n }\n const s: string = source;\n assert(original);\n return addSegmentInternal(\n skipable,\n map,\n generated.line - 1,\n generated.column,\n s,\n original.line - 1,\n original.column,\n name,\n content,\n );\n}\n"],"names":["addSegment","addMapping","maybeAddSegment","maybeAddMapping","setSourceContent","toDecodedMap","toEncodedMap","fromMap","allMappings","SetArray","put","encode","TraceMap","decodedMappings"],"mappings":";;;;;;IAWO,MAAM,MAAM,GAAG,CAAC,CAAC;IACjB,MAAM,aAAa,GAAG,CAAC,CAAC;IACxB,MAAM,WAAW,GAAG,CAAC,CAAC;IACtB,MAAM,aAAa,GAAG,CAAC,CAAC;IACxB,MAAM,WAAW,GAAG,CAAC;;ICQ5B,MAAM,OAAO,GAAG,CAAC,CAAC,CAAC;IAEnB;;;IAGG;AACQA,gCA+BT;IAEF;;;IAGG;AACQC,gCA+BT;IAEF;;;;IAIG;AACQC,qCAAmC;IAE9C;;;;IAIG;AACQC,qCAAmC;IAE9C;;IAEG;AACQC,sCAAoF;IAE/F;;;IAGG;AACQC,kCAAoD;IAE/D;;;IAGG;AACQC,kCAAoD;IAE/D;;IAEG;AACQC,6BAA+C;IAE1D;;;IAGG;AACQC,iCAA4C;IAEvD;IACA,IAAI,kBAUK,CAAC;IAEV;;IAEG;UACU,UAAU,CAAA;IAQrB,IAAA,WAAA,CAAY,EAAE,IAAI,EAAE,UAAU,KAAc,EAAE,EAAA;IAPtC,QAAA,IAAA,CAAA,MAAM,GAAG,IAAIC,iBAAQ,EAAE,CAAC;IACxB,QAAA,IAAA,CAAA,QAAQ,GAAG,IAAIA,iBAAQ,EAAE,CAAC;YAC1B,IAAe,CAAA,eAAA,GAAsB,EAAE,CAAC;YACxC,IAAS,CAAA,SAAA,GAAyB,EAAE,CAAC;IAK3C,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACjB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;SAC9B;IA2KF,CAAA;IAzKC,CAAA,MAAA;IACE,IAAAT,kBAAU,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,IAAI,EAAE,OAAO,KAAI;YACxF,OAAO,kBAAkB,CACvB,KAAK,EACL,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,CACR,CAAC;IACJ,KAAC,CAAC;IAEF,IAAAE,uBAAe,GAAG,CAChB,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,KACL;YACF,OAAO,kBAAkB,CACvB,IAAI,EACJ,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,CACR,CAAC;IACJ,KAAC,CAAC;IAEF,IAAAD,kBAAU,GAAG,CAAC,GAAG,EAAE,OAAO,KAAI;YAC5B,OAAO,kBAAkB,CAAC,KAAK,EAAE,GAAG,EAAE,OAAmD,CAAC,CAAC;IAC7F,KAAC,CAAC;IAEF,IAAAE,uBAAe,GAAG,CAAC,GAAG,EAAE,OAAO,KAAI;YACjC,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,OAAmD,CAAC,CAAC;IAC5F,KAAC,CAAC;QAEFC,wBAAgB,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,KAAI;YAC1C,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,GAAG,GAAG,CAAC;YACnE,cAAc,CAACM,YAAG,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC,GAAG,OAAO,CAAC;IACjD,KAAC,CAAC;IAEF,IAAAL,oBAAY,GAAG,CAAC,GAAG,KAAI;YACrB,MAAM,EACJ,IAAI,EACJ,UAAU,EACV,SAAS,EAAE,QAAQ,EACnB,QAAQ,EAAE,OAAO,EACjB,eAAe,EAAE,cAAc,EAC/B,MAAM,EAAE,KAAK,GACd,GAAG,GAAG,CAAC;YACR,qBAAqB,CAAC,QAAQ,CAAC,CAAC;YAEhC,OAAO;IACL,YAAA,OAAO,EAAE,CAAC;gBACV,IAAI,EAAE,IAAI,IAAI,SAAS;gBACvB,KAAK,EAAE,KAAK,CAAC,KAAK;gBAClB,UAAU,EAAE,UAAU,IAAI,SAAS;gBACnC,OAAO,EAAE,OAAO,CAAC,KAAK;gBACtB,cAAc;gBACd,QAAQ;aACT,CAAC;IACJ,KAAC,CAAC;IAEF,IAAAC,oBAAY,GAAG,CAAC,GAAG,KAAI;IACrB,QAAA,MAAM,OAAO,GAAGD,oBAAY,CAAC,GAAG,CAAC,CAAC;YAClC,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACV,EAAA,EAAA,QAAQ,EAAEM,qBAAM,CAAC,OAAO,CAAC,QAAgC,CAAC,EAC1D,CAAA,CAAA;IACJ,KAAC,CAAC;IAEF,IAAAH,mBAAW,GAAG,CAAC,GAAG,KAAI;YACpB,MAAM,GAAG,GAAc,EAAE,CAAC;IAC1B,QAAA,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC;IAEtE,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACxC,YAAA,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;IACzB,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACpC,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IAEpB,gBAAA,MAAM,SAAS,GAAG,EAAE,IAAI,EAAE,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;oBACvD,IAAI,MAAM,GAAuB,SAAS,CAAC;oBAC3C,IAAI,QAAQ,GAAoB,SAAS,CAAC;oBAC1C,IAAI,IAAI,GAAuB,SAAS,CAAC;IAEzC,gBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;wBACpB,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;IAC3C,oBAAA,QAAQ,GAAG,EAAE,IAAI,EAAE,GAAG,CAAC,WAAW,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,aAAa,CAAC,EAAE,CAAC;IAEtE,oBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;4BAAE,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC;IAC5D,iBAAA;IAED,gBAAA,GAAG,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAa,CAAC,CAAC;IAC5D,aAAA;IACF,SAAA;IAED,QAAA,OAAO,GAAG,CAAC;IACb,KAAC,CAAC;IAEF,IAAAD,eAAO,GAAG,CAAC,KAAK,KAAI;IAClB,QAAA,MAAM,GAAG,GAAG,IAAIK,qBAAQ,CAAC,KAAK,CAAC,CAAC;IAChC,QAAA,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,EAAE,IAAI,EAAE,GAAG,CAAC,IAAI,EAAE,UAAU,EAAE,GAAG,CAAC,UAAU,EAAE,CAAC,CAAC;YAE3E,MAAM,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC;YAC9B,MAAM,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,CAAC,OAAmB,CAAC,CAAC;IAC9C,QAAA,GAAG,CAAC,eAAe,GAAG,GAAG,CAAC,cAAc,IAAI,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC;IACxE,QAAA,GAAG,CAAC,SAAS,GAAGC,4BAAe,CAAC,GAAG,CAA4B,CAAC;IAEhE,QAAA,OAAO,GAAG,CAAC;IACb,KAAC,CAAC;;QAGF,kBAAkB,GAAG,CACnB,QAAQ,EACR,GAAG,EACH,OAAO,EACP,SAAS,EACT,MAAM,EACN,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,OAAO,KACL;IACF,QAAA,MAAM,EACJ,SAAS,EAAE,QAAQ,EACnB,QAAQ,EAAE,OAAO,EACjB,eAAe,EAAE,cAAc,EAC/B,MAAM,EAAE,KAAK,GACd,GAAG,GAAG,CAAC;YACR,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,KAAK,GAAG,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;YAE9C,IAAI,CAAC,MAAM,EAAE;IACX,YAAA,IAAI,QAAQ,IAAI,cAAc,CAAC,IAAI,EAAE,KAAK,CAAC;oBAAE,OAAO;gBACpD,OAAO,MAAM,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;IACzC,SAAA;YAOD,MAAM,YAAY,GAAGH,YAAG,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC1C,QAAA,MAAM,UAAU,GAAG,IAAI,GAAGA,YAAG,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC;IACrD,QAAA,IAAI,YAAY,KAAK,cAAc,CAAC,MAAM;gBAAE,cAAc,CAAC,YAAY,CAAC,GAAG,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,OAAO,GAAI,IAAI,CAAC;IAE3F,QAAA,IAAI,QAAQ,IAAI,UAAU,CAAC,IAAI,EAAE,KAAK,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC,EAAE;gBAC3F,OAAO;IACR,SAAA;IAED,QAAA,OAAO,MAAM,CACX,IAAI,EACJ,KAAK,EACL,IAAI;kBACA,CAAC,SAAS,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,UAAU,CAAC;kBAC/D,CAAC,SAAS,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC,CACxD,CAAC;IACJ,KAAC,CAAC;IACJ,CAAC,GAAA,CAAA;IAOH,SAAS,OAAO,CAAC,QAA8B,EAAE,KAAa,EAAA;IAC5D,IAAA,KAAK,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,EAAE,EAAE;IAC7C,QAAA,QAAQ,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;IAClB,KAAA;IACD,IAAA,OAAO,QAAQ,CAAC,KAAK,CAAC,CAAC;IACzB,CAAC;IAED,SAAS,cAAc,CAAC,IAAwB,EAAE,SAAiB,EAAA;IACjE,IAAA,IAAI,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC;IACxB,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,KAAK,GAAG,CAAC,EAAE,EAAE;IAC3C,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IACxB,QAAA,IAAI,SAAS,IAAI,OAAO,CAAC,MAAM,CAAC;gBAAE,MAAM;IACzC,KAAA;IACD,IAAA,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,MAAM,CAAI,KAAU,EAAE,KAAa,EAAE,KAAQ,EAAA;IACpD,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;YACzC,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IACzB,KAAA;IACD,IAAA,KAAK,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;IAED,SAAS,qBAAqB,CAAC,QAA8B,EAAA;IAC3D,IAAA,MAAM,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;QAC5B,IAAI,GAAG,GAAG,MAAM,CAAC;IACjB,IAAA,KAAK,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;IAC1C,QAAA,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC;gBAAE,MAAM;IACnC,KAAA;QACD,IAAI,GAAG,GAAG,MAAM;IAAE,QAAA,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;IAC1C,CAAC;IAED,SAAS,MAAM,CAAC,MAAgB,EAAE,KAAe,EAAA;IAC/C,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;YAAEA,YAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;IAC/D,CAAC;IAED,SAAS,cAAc,CAAC,IAAwB,EAAE,KAAa,EAAA;;;QAG7D,IAAI,KAAK,KAAK,CAAC;IAAE,QAAA,OAAO,IAAI,CAAC;QAE7B,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;;;;IAI7B,IAAA,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;IAC3B,CAAC;IAED,SAAS,UAAU,CACjB,IAAwB,EACxB,KAAa,EACb,YAAoB,EACpB,UAAkB,EAClB,YAAoB,EACpB,UAAkB,EAAA;;QAGlB,IAAI,KAAK,KAAK,CAAC;IAAE,QAAA,OAAO,KAAK,CAAC;QAE9B,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;;IAG7B,IAAA,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;IAAE,QAAA,OAAO,KAAK,CAAC;;;IAIpC,IAAA,QACE,YAAY,KAAK,IAAI,CAAC,aAAa,CAAC;IACpC,QAAA,UAAU,KAAK,IAAI,CAAC,WAAW,CAAC;IAChC,QAAA,YAAY,KAAK,IAAI,CAAC,aAAa,CAAC;YACpC,UAAU,MAAM,IAAI,CAAC,MAAM,KAAK,CAAC,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,OAAO,CAAC,EAChE;IACJ,CAAC;IAED,SAAS,kBAAkB,CACzB,QAAiB,EACjB,GAAe,EACf,OAMC,EAAA;IAED,IAAA,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;QAC/D,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,kBAAkB,CACvB,QAAQ,EACR,GAAG,EACH,SAAS,CAAC,IAAI,GAAG,CAAC,EAClB,SAAS,CAAC,MAAM,EAChB,IAAI,EACJ,IAAI,EACJ,IAAI,EACJ,IAAI,EACJ,IAAI,CACL,CAAC;IACH,KAAA;QACD,MAAM,CAAC,GAAW,MAAM,CAAC;IAEzB,IAAA,OAAO,kBAAkB,CACvB,QAAQ,EACR,GAAG,EACH,SAAS,CAAC,IAAI,GAAG,CAAC,EAClB,SAAS,CAAC,MAAM,EAChB,CAAC,EACD,QAAQ,CAAC,IAAI,GAAG,CAAC,EACjB,QAAQ,CAAC,MAAM,EACf,IAAI,EACJ,OAAO,CACR,CAAC;IACJ;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts b/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts new file mode 100644 index 0000000..d510d74 --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts @@ -0,0 +1,90 @@ +import type { SourceMapInput } from '@jridgewell/trace-mapping'; +import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types'; +export type { DecodedSourceMap, EncodedSourceMap, Mapping }; +export declare type Options = { + file?: string | null; + sourceRoot?: string | null; +}; +/** + * A low-level API to associate a generated position with an original source position. Line and + * column here are 0-based, unlike `addMapping`. + */ +export declare let addSegment: { + (map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void; + (map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void; + (map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void; +}; +/** + * A high-level API to associate a generated position with an original source position. Line is + * 1-based, but column is 0-based, due to legacy behavior in `source-map` library. + */ +export declare let addMapping: { + (map: GenMapping, mapping: { + generated: Pos; + source?: null; + original?: null; + name?: null; + content?: null; + }): void; + (map: GenMapping, mapping: { + generated: Pos; + source: string; + original: Pos; + name?: null; + content?: string | null; + }): void; + (map: GenMapping, mapping: { + generated: Pos; + source: string; + original: Pos; + name: string; + content?: string | null; + }): void; +}; +/** + * Same as `addSegment`, but will only add the segment if it generates useful information in the + * resulting map. This only works correctly if segments are added **in order**, meaning you should + * not add a segment with a lower generated line/column than one that came before. + */ +export declare let maybeAddSegment: typeof addSegment; +/** + * Same as `addMapping`, but will only add the mapping if it generates useful information in the + * resulting map. This only works correctly if mappings are added **in order**, meaning you should + * not add a mapping with a lower generated line/column than one that came before. + */ +export declare let maybeAddMapping: typeof addMapping; +/** + * Adds/removes the content of the source file to the source map. + */ +export declare let setSourceContent: (map: GenMapping, source: string, content: string | null) => void; +/** + * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +export declare let toDecodedMap: (map: GenMapping) => DecodedSourceMap; +/** + * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +export declare let toEncodedMap: (map: GenMapping) => EncodedSourceMap; +/** + * Constructs a new GenMapping, using the already present mappings of the input. + */ +export declare let fromMap: (input: SourceMapInput) => GenMapping; +/** + * Returns an array of high-level mapping objects for every recorded segment, which could then be + * passed to the `source-map` library. + */ +export declare let allMappings: (map: GenMapping) => Mapping[]; +/** + * Provides the state to generate a sourcemap. + */ +export declare class GenMapping { + private _names; + private _sources; + private _sourcesContent; + private _mappings; + file: string | null | undefined; + sourceRoot: string | null | undefined; + constructor({ file, sourceRoot }?: Options); +} diff --git a/node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts b/node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts new file mode 100644 index 0000000..e187ba9 --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts @@ -0,0 +1,12 @@ +declare type GeneratedColumn = number; +declare type SourcesIndex = number; +declare type SourceLine = number; +declare type SourceColumn = number; +declare type NamesIndex = number; +export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex]; +export declare const COLUMN = 0; +export declare const SOURCES_INDEX = 1; +export declare const SOURCE_LINE = 2; +export declare const SOURCE_COLUMN = 3; +export declare const NAMES_INDEX = 4; +export {}; diff --git a/node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts b/node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts new file mode 100644 index 0000000..b309c81 --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts @@ -0,0 +1,35 @@ +import type { SourceMapSegment } from './sourcemap-segment'; +export interface SourceMapV3 { + file?: string | null; + names: readonly string[]; + sourceRoot?: string; + sources: readonly (string | null)[]; + sourcesContent?: readonly (string | null)[]; + version: 3; +} +export interface EncodedSourceMap extends SourceMapV3 { + mappings: string; +} +export interface DecodedSourceMap extends SourceMapV3 { + mappings: readonly SourceMapSegment[][]; +} +export interface Pos { + line: number; + column: number; +} +export declare type Mapping = { + generated: Pos; + source: undefined; + original: undefined; + name: undefined; +} | { + generated: Pos; + source: string; + original: Pos; + name: string; +} | { + generated: Pos; + source: string; + original: Pos; + name: undefined; +}; diff --git a/node_modules/@jridgewell/gen-mapping/package.json b/node_modules/@jridgewell/gen-mapping/package.json new file mode 100644 index 0000000..69e0ac8 --- /dev/null +++ b/node_modules/@jridgewell/gen-mapping/package.json @@ -0,0 +1,77 @@ +{ + "name": "@jridgewell/gen-mapping", + "version": "0.3.3", + "description": "Generate source maps", + "keywords": [ + "source", + "map" + ], + "author": "Justin Ridgewell ", + "license": "MIT", + "repository": "https://github.com/jridgewell/gen-mapping", + "main": "dist/gen-mapping.umd.js", + "module": "dist/gen-mapping.mjs", + "types": "dist/types/gen-mapping.d.ts", + "exports": { + ".": [ + { + "types": "./dist/types/gen-mapping.d.ts", + "browser": "./dist/gen-mapping.umd.js", + "require": "./dist/gen-mapping.umd.js", + "import": "./dist/gen-mapping.mjs" + }, + "./dist/gen-mapping.umd.js" + ], + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=6.0.0" + }, + "scripts": { + "benchmark": "run-s build:rollup benchmark:*", + "benchmark:install": "cd benchmark && npm install", + "benchmark:only": "node benchmark/index.mjs", + "prebuild": "rm -rf dist", + "build": "run-s -n build:*", + "build:rollup": "rollup -c rollup.config.js", + "build:ts": "tsc --project tsconfig.build.json", + "lint": "run-s -n lint:*", + "lint:prettier": "npm run test:lint:prettier -- --write", + "lint:ts": "npm run test:lint:ts -- --fix", + "pretest": "run-s build:rollup", + "test": "run-s -n test:lint test:coverage", + "test:debug": "mocha --inspect-brk", + "test:lint": "run-s -n test:lint:*", + "test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", + "test:lint:ts": "eslint '{src,test}/**/*.ts'", + "test:only": "mocha", + "test:coverage": "c8 mocha", + "test:watch": "run-p 'build:rollup -- --watch' 'test:only -- --watch'", + "prepublishOnly": "npm run preversion", + "preversion": "run-s test build" + }, + "devDependencies": { + "@rollup/plugin-typescript": "8.3.2", + "@types/mocha": "9.1.1", + "@types/node": "17.0.29", + "@typescript-eslint/eslint-plugin": "5.21.0", + "@typescript-eslint/parser": "5.21.0", + "benchmark": "2.1.4", + "c8": "7.11.2", + "eslint": "8.14.0", + "eslint-config-prettier": "8.5.0", + "mocha": "9.2.2", + "npm-run-all": "4.1.5", + "prettier": "2.6.2", + "rollup": "2.70.2", + "typescript": "4.6.3" + }, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } +} diff --git a/node_modules/@jridgewell/resolve-uri/LICENSE b/node_modules/@jridgewell/resolve-uri/LICENSE new file mode 100644 index 0000000..0a81b2a --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/LICENSE @@ -0,0 +1,19 @@ +Copyright 2019 Justin Ridgewell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/node_modules/@jridgewell/resolve-uri/README.md b/node_modules/@jridgewell/resolve-uri/README.md new file mode 100644 index 0000000..2fe70df --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/README.md @@ -0,0 +1,40 @@ +# @jridgewell/resolve-uri + +> Resolve a URI relative to an optional base URI + +Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths. + +## Installation + +```sh +npm install @jridgewell/resolve-uri +``` + +## Usage + +```typescript +function resolve(input: string, base?: string): string; +``` + +```js +import resolve from '@jridgewell/resolve-uri'; + +resolve('foo', 'https://example.com'); // => 'https://example.com/foo' +``` + +| Input | Base | Resolution | Explanation | +|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------| +| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only | +| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol | +| `//example.com` | _rest_ | `//example.com/` | Input is normalized only | +| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin | +| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative | +| `/example` | _rest_ | `/example` | Input is normalized only | +| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base | +| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file | +| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory | +| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file | +| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory | +| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file | +| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory | +| `example` | `base/file` | `base/example` | Input is joined with the base without its file | diff --git a/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs new file mode 100644 index 0000000..94d8dce --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs @@ -0,0 +1,242 @@ +// Matches the scheme of a URL, eg "http://" +const schemeRegex = /^[\w+.-]+:\/\//; +/** + * Matches the parts of a URL: + * 1. Scheme, including ":", guaranteed. + * 2. User/password, including "@", optional. + * 3. Host, guaranteed. + * 4. Port, including ":", optional. + * 5. Path, including "/", optional. + * 6. Query, including "?", optional. + * 7. Hash, including "#", optional. + */ +const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/; +/** + * File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start + * with a leading `/`, they can have a domain (but only if they don't start with a Windows drive). + * + * 1. Host, optional. + * 2. Path, which may include "/", guaranteed. + * 3. Query, including "?", optional. + * 4. Hash, including "#", optional. + */ +const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i; +var UrlType; +(function (UrlType) { + UrlType[UrlType["Empty"] = 1] = "Empty"; + UrlType[UrlType["Hash"] = 2] = "Hash"; + UrlType[UrlType["Query"] = 3] = "Query"; + UrlType[UrlType["RelativePath"] = 4] = "RelativePath"; + UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath"; + UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative"; + UrlType[UrlType["Absolute"] = 7] = "Absolute"; +})(UrlType || (UrlType = {})); +function isAbsoluteUrl(input) { + return schemeRegex.test(input); +} +function isSchemeRelativeUrl(input) { + return input.startsWith('//'); +} +function isAbsolutePath(input) { + return input.startsWith('/'); +} +function isFileUrl(input) { + return input.startsWith('file:'); +} +function isRelative(input) { + return /^[.?#]/.test(input); +} +function parseAbsoluteUrl(input) { + const match = urlRegex.exec(input); + return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || ''); +} +function parseFileUrl(input) { + const match = fileRegex.exec(input); + const path = match[2]; + return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || ''); +} +function makeUrl(scheme, user, host, port, path, query, hash) { + return { + scheme, + user, + host, + port, + path, + query, + hash, + type: UrlType.Absolute, + }; +} +function parseUrl(input) { + if (isSchemeRelativeUrl(input)) { + const url = parseAbsoluteUrl('http:' + input); + url.scheme = ''; + url.type = UrlType.SchemeRelative; + return url; + } + if (isAbsolutePath(input)) { + const url = parseAbsoluteUrl('http://foo.com' + input); + url.scheme = ''; + url.host = ''; + url.type = UrlType.AbsolutePath; + return url; + } + if (isFileUrl(input)) + return parseFileUrl(input); + if (isAbsoluteUrl(input)) + return parseAbsoluteUrl(input); + const url = parseAbsoluteUrl('http://foo.com/' + input); + url.scheme = ''; + url.host = ''; + url.type = input + ? input.startsWith('?') + ? UrlType.Query + : input.startsWith('#') + ? UrlType.Hash + : UrlType.RelativePath + : UrlType.Empty; + return url; +} +function stripPathFilename(path) { + // If a path ends with a parent directory "..", then it's a relative path with excess parent + // paths. It's not a file, so we can't strip it. + if (path.endsWith('/..')) + return path; + const index = path.lastIndexOf('/'); + return path.slice(0, index + 1); +} +function mergePaths(url, base) { + normalizePath(base, base.type); + // If the path is just a "/", then it was an empty path to begin with (remember, we're a relative + // path). + if (url.path === '/') { + url.path = base.path; + } + else { + // Resolution happens relative to the base path's directory, not the file. + url.path = stripPathFilename(base.path) + url.path; + } +} +/** + * The path can have empty directories "//", unneeded parents "foo/..", or current directory + * "foo/.". We need to normalize to a standard representation. + */ +function normalizePath(url, type) { + const rel = type <= UrlType.RelativePath; + const pieces = url.path.split('/'); + // We need to preserve the first piece always, so that we output a leading slash. The item at + // pieces[0] is an empty string. + let pointer = 1; + // Positive is the number of real directories we've output, used for popping a parent directory. + // Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo". + let positive = 0; + // We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will + // generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a + // real directory, we won't need to append, unless the other conditions happen again. + let addTrailingSlash = false; + for (let i = 1; i < pieces.length; i++) { + const piece = pieces[i]; + // An empty directory, could be a trailing slash, or just a double "//" in the path. + if (!piece) { + addTrailingSlash = true; + continue; + } + // If we encounter a real directory, then we don't need to append anymore. + addTrailingSlash = false; + // A current directory, which we can always drop. + if (piece === '.') + continue; + // A parent directory, we need to see if there are any real directories we can pop. Else, we + // have an excess of parents, and we'll need to keep the "..". + if (piece === '..') { + if (positive) { + addTrailingSlash = true; + positive--; + pointer--; + } + else if (rel) { + // If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute + // URL, protocol relative URL, or an absolute path, we don't need to keep excess. + pieces[pointer++] = piece; + } + continue; + } + // We've encountered a real directory. Move it to the next insertion pointer, which accounts for + // any popped or dropped directories. + pieces[pointer++] = piece; + positive++; + } + let path = ''; + for (let i = 1; i < pointer; i++) { + path += '/' + pieces[i]; + } + if (!path || (addTrailingSlash && !path.endsWith('/..'))) { + path += '/'; + } + url.path = path; +} +/** + * Attempts to resolve `input` URL/path relative to `base`. + */ +function resolve(input, base) { + if (!input && !base) + return ''; + const url = parseUrl(input); + let inputType = url.type; + if (base && inputType !== UrlType.Absolute) { + const baseUrl = parseUrl(base); + const baseType = baseUrl.type; + switch (inputType) { + case UrlType.Empty: + url.hash = baseUrl.hash; + // fall through + case UrlType.Hash: + url.query = baseUrl.query; + // fall through + case UrlType.Query: + case UrlType.RelativePath: + mergePaths(url, baseUrl); + // fall through + case UrlType.AbsolutePath: + // The host, user, and port are joined, you can't copy one without the others. + url.user = baseUrl.user; + url.host = baseUrl.host; + url.port = baseUrl.port; + // fall through + case UrlType.SchemeRelative: + // The input doesn't have a schema at least, so we need to copy at least that over. + url.scheme = baseUrl.scheme; + } + if (baseType > inputType) + inputType = baseType; + } + normalizePath(url, inputType); + const queryHash = url.query + url.hash; + switch (inputType) { + // This is impossible, because of the empty checks at the start of the function. + // case UrlType.Empty: + case UrlType.Hash: + case UrlType.Query: + return queryHash; + case UrlType.RelativePath: { + // The first char is always a "/", and we need it to be relative. + const path = url.path.slice(1); + if (!path) + return queryHash || '.'; + if (isRelative(base || input) && !isRelative(path)) { + // If base started with a leading ".", or there is no base and input started with a ".", + // then we need to ensure that the relative path starts with a ".". We don't know if + // relative starts with a "..", though, so check before prepending. + return './' + path + queryHash; + } + return path + queryHash; + } + case UrlType.AbsolutePath: + return url.path + queryHash; + default: + return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash; + } +} + +export { resolve as default }; +//# sourceMappingURL=resolve-uri.mjs.map diff --git a/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map new file mode 100644 index 0000000..009d043 --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"resolve-uri.mjs","sources":["../src/resolve-uri.ts"],"sourcesContent":["// Matches the scheme of a URL, eg \"http://\"\nconst schemeRegex = /^[\\w+.-]+:\\/\\//;\n\n/**\n * Matches the parts of a URL:\n * 1. Scheme, including \":\", guaranteed.\n * 2. User/password, including \"@\", optional.\n * 3. Host, guaranteed.\n * 4. Port, including \":\", optional.\n * 5. Path, including \"/\", optional.\n * 6. Query, including \"?\", optional.\n * 7. Hash, including \"#\", optional.\n */\nconst urlRegex = /^([\\w+.-]+:)\\/\\/([^@/#?]*@)?([^:/#?]*)(:\\d+)?(\\/[^#?]*)?(\\?[^#]*)?(#.*)?/;\n\n/**\n * File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start\n * with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).\n *\n * 1. Host, optional.\n * 2. Path, which may include \"/\", guaranteed.\n * 3. Query, including \"?\", optional.\n * 4. Hash, including \"#\", optional.\n */\nconst fileRegex = /^file:(?:\\/\\/((?![a-z]:)[^/#?]*)?)?(\\/?[^#?]*)(\\?[^#]*)?(#.*)?/i;\n\ntype Url = {\n scheme: string;\n user: string;\n host: string;\n port: string;\n path: string;\n query: string;\n hash: string;\n type: UrlType;\n};\n\nenum UrlType {\n Empty = 1,\n Hash = 2,\n Query = 3,\n RelativePath = 4,\n AbsolutePath = 5,\n SchemeRelative = 6,\n Absolute = 7,\n}\n\nfunction isAbsoluteUrl(input: string): boolean {\n return schemeRegex.test(input);\n}\n\nfunction isSchemeRelativeUrl(input: string): boolean {\n return input.startsWith('//');\n}\n\nfunction isAbsolutePath(input: string): boolean {\n return input.startsWith('/');\n}\n\nfunction isFileUrl(input: string): boolean {\n return input.startsWith('file:');\n}\n\nfunction isRelative(input: string): boolean {\n return /^[.?#]/.test(input);\n}\n\nfunction parseAbsoluteUrl(input: string): Url {\n const match = urlRegex.exec(input)!;\n return makeUrl(\n match[1],\n match[2] || '',\n match[3],\n match[4] || '',\n match[5] || '/',\n match[6] || '',\n match[7] || '',\n );\n}\n\nfunction parseFileUrl(input: string): Url {\n const match = fileRegex.exec(input)!;\n const path = match[2];\n return makeUrl(\n 'file:',\n '',\n match[1] || '',\n '',\n isAbsolutePath(path) ? path : '/' + path,\n match[3] || '',\n match[4] || '',\n );\n}\n\nfunction makeUrl(\n scheme: string,\n user: string,\n host: string,\n port: string,\n path: string,\n query: string,\n hash: string,\n): Url {\n return {\n scheme,\n user,\n host,\n port,\n path,\n query,\n hash,\n type: UrlType.Absolute,\n };\n}\n\nfunction parseUrl(input: string): Url {\n if (isSchemeRelativeUrl(input)) {\n const url = parseAbsoluteUrl('http:' + input);\n url.scheme = '';\n url.type = UrlType.SchemeRelative;\n return url;\n }\n\n if (isAbsolutePath(input)) {\n const url = parseAbsoluteUrl('http://foo.com' + input);\n url.scheme = '';\n url.host = '';\n url.type = UrlType.AbsolutePath;\n return url;\n }\n\n if (isFileUrl(input)) return parseFileUrl(input);\n\n if (isAbsoluteUrl(input)) return parseAbsoluteUrl(input);\n\n const url = parseAbsoluteUrl('http://foo.com/' + input);\n url.scheme = '';\n url.host = '';\n url.type = input\n ? input.startsWith('?')\n ? UrlType.Query\n : input.startsWith('#')\n ? UrlType.Hash\n : UrlType.RelativePath\n : UrlType.Empty;\n return url;\n}\n\nfunction stripPathFilename(path: string): string {\n // If a path ends with a parent directory \"..\", then it's a relative path with excess parent\n // paths. It's not a file, so we can't strip it.\n if (path.endsWith('/..')) return path;\n const index = path.lastIndexOf('/');\n return path.slice(0, index + 1);\n}\n\nfunction mergePaths(url: Url, base: Url) {\n normalizePath(base, base.type);\n\n // If the path is just a \"/\", then it was an empty path to begin with (remember, we're a relative\n // path).\n if (url.path === '/') {\n url.path = base.path;\n } else {\n // Resolution happens relative to the base path's directory, not the file.\n url.path = stripPathFilename(base.path) + url.path;\n }\n}\n\n/**\n * The path can have empty directories \"//\", unneeded parents \"foo/..\", or current directory\n * \"foo/.\". We need to normalize to a standard representation.\n */\nfunction normalizePath(url: Url, type: UrlType) {\n const rel = type <= UrlType.RelativePath;\n const pieces = url.path.split('/');\n\n // We need to preserve the first piece always, so that we output a leading slash. The item at\n // pieces[0] is an empty string.\n let pointer = 1;\n\n // Positive is the number of real directories we've output, used for popping a parent directory.\n // Eg, \"foo/bar/..\" will have a positive 2, and we can decrement to be left with just \"foo\".\n let positive = 0;\n\n // We need to keep a trailing slash if we encounter an empty directory (eg, splitting \"foo/\" will\n // generate `[\"foo\", \"\"]` pieces). And, if we pop a parent directory. But once we encounter a\n // real directory, we won't need to append, unless the other conditions happen again.\n let addTrailingSlash = false;\n\n for (let i = 1; i < pieces.length; i++) {\n const piece = pieces[i];\n\n // An empty directory, could be a trailing slash, or just a double \"//\" in the path.\n if (!piece) {\n addTrailingSlash = true;\n continue;\n }\n\n // If we encounter a real directory, then we don't need to append anymore.\n addTrailingSlash = false;\n\n // A current directory, which we can always drop.\n if (piece === '.') continue;\n\n // A parent directory, we need to see if there are any real directories we can pop. Else, we\n // have an excess of parents, and we'll need to keep the \"..\".\n if (piece === '..') {\n if (positive) {\n addTrailingSlash = true;\n positive--;\n pointer--;\n } else if (rel) {\n // If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute\n // URL, protocol relative URL, or an absolute path, we don't need to keep excess.\n pieces[pointer++] = piece;\n }\n continue;\n }\n\n // We've encountered a real directory. Move it to the next insertion pointer, which accounts for\n // any popped or dropped directories.\n pieces[pointer++] = piece;\n positive++;\n }\n\n let path = '';\n for (let i = 1; i < pointer; i++) {\n path += '/' + pieces[i];\n }\n if (!path || (addTrailingSlash && !path.endsWith('/..'))) {\n path += '/';\n }\n url.path = path;\n}\n\n/**\n * Attempts to resolve `input` URL/path relative to `base`.\n */\nexport default function resolve(input: string, base: string | undefined): string {\n if (!input && !base) return '';\n\n const url = parseUrl(input);\n let inputType = url.type;\n\n if (base && inputType !== UrlType.Absolute) {\n const baseUrl = parseUrl(base);\n const baseType = baseUrl.type;\n\n switch (inputType) {\n case UrlType.Empty:\n url.hash = baseUrl.hash;\n // fall through\n\n case UrlType.Hash:\n url.query = baseUrl.query;\n // fall through\n\n case UrlType.Query:\n case UrlType.RelativePath:\n mergePaths(url, baseUrl);\n // fall through\n\n case UrlType.AbsolutePath:\n // The host, user, and port are joined, you can't copy one without the others.\n url.user = baseUrl.user;\n url.host = baseUrl.host;\n url.port = baseUrl.port;\n // fall through\n\n case UrlType.SchemeRelative:\n // The input doesn't have a schema at least, so we need to copy at least that over.\n url.scheme = baseUrl.scheme;\n }\n if (baseType > inputType) inputType = baseType;\n }\n\n normalizePath(url, inputType);\n\n const queryHash = url.query + url.hash;\n switch (inputType) {\n // This is impossible, because of the empty checks at the start of the function.\n // case UrlType.Empty:\n\n case UrlType.Hash:\n case UrlType.Query:\n return queryHash;\n\n case UrlType.RelativePath: {\n // The first char is always a \"/\", and we need it to be relative.\n const path = url.path.slice(1);\n\n if (!path) return queryHash || '.';\n\n if (isRelative(base || input) && !isRelative(path)) {\n // If base started with a leading \".\", or there is no base and input started with a \".\",\n // then we need to ensure that the relative path starts with a \".\". We don't know if\n // relative starts with a \"..\", though, so check before prepending.\n return './' + path + queryHash;\n }\n\n return path + queryHash;\n }\n\n case UrlType.AbsolutePath:\n return url.path + queryHash;\n\n default:\n return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;\n }\n}\n"],"names":[],"mappings":"AAAA;AACA,MAAM,WAAW,GAAG,gBAAgB,CAAC;AAErC;;;;;;;;;;AAUA,MAAM,QAAQ,GAAG,0EAA0E,CAAC;AAE5F;;;;;;;;;AASA,MAAM,SAAS,GAAG,iEAAiE,CAAC;AAapF,IAAK,OAQJ;AARD,WAAK,OAAO;IACV,uCAAS,CAAA;IACT,qCAAQ,CAAA;IACR,uCAAS,CAAA;IACT,qDAAgB,CAAA;IAChB,qDAAgB,CAAA;IAChB,yDAAkB,CAAA;IAClB,6CAAY,CAAA;AACd,CAAC,EARI,OAAO,KAAP,OAAO,QAQX;AAED,SAAS,aAAa,CAAC,KAAa;IAClC,OAAO,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACjC,CAAC;AAED,SAAS,mBAAmB,CAAC,KAAa;IACxC,OAAO,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;AAChC,CAAC;AAED,SAAS,cAAc,CAAC,KAAa;IACnC,OAAO,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAC/B,CAAC;AAED,SAAS,SAAS,CAAC,KAAa;IAC9B,OAAO,KAAK,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,UAAU,CAAC,KAAa;IAC/B,OAAO,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AAC9B,CAAC;AAED,SAAS,gBAAgB,CAAC,KAAa;IACrC,MAAM,KAAK,GAAG,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAE,CAAC;IACpC,OAAO,OAAO,CACZ,KAAK,CAAC,CAAC,CAAC,EACR,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,EACR,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,IAAI,GAAG,EACf,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,CACf,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,KAAa;IACjC,MAAM,KAAK,GAAG,SAAS,CAAC,IAAI,CAAC,KAAK,CAAE,CAAC;IACrC,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACtB,OAAO,OAAO,CACZ,OAAO,EACP,EAAE,EACF,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,EAAE,EACF,cAAc,CAAC,IAAI,CAAC,GAAG,IAAI,GAAG,GAAG,GAAG,IAAI,EACxC,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,CACf,CAAC;AACJ,CAAC;AAED,SAAS,OAAO,CACd,MAAc,EACd,IAAY,EACZ,IAAY,EACZ,IAAY,EACZ,IAAY,EACZ,KAAa,EACb,IAAY;IAEZ,OAAO;QACL,MAAM;QACN,IAAI;QACJ,IAAI;QACJ,IAAI;QACJ,IAAI;QACJ,KAAK;QACL,IAAI;QACJ,IAAI,EAAE,OAAO,CAAC,QAAQ;KACvB,CAAC;AACJ,CAAC;AAED,SAAS,QAAQ,CAAC,KAAa;IAC7B,IAAI,mBAAmB,CAAC,KAAK,CAAC,EAAE;QAC9B,MAAM,GAAG,GAAG,gBAAgB,CAAC,OAAO,GAAG,KAAK,CAAC,CAAC;QAC9C,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC;QAChB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,cAAc,CAAC;QAClC,OAAO,GAAG,CAAC;KACZ;IAED,IAAI,cAAc,CAAC,KAAK,CAAC,EAAE;QACzB,MAAM,GAAG,GAAG,gBAAgB,CAAC,gBAAgB,GAAG,KAAK,CAAC,CAAC;QACvD,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC;QAChB,GAAG,CAAC,IAAI,GAAG,EAAE,CAAC;QACd,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,YAAY,CAAC;QAChC,OAAO,GAAG,CAAC;KACZ;IAED,IAAI,SAAS,CAAC,KAAK,CAAC;QAAE,OAAO,YAAY,CAAC,KAAK,CAAC,CAAC;IAEjD,IAAI,aAAa,CAAC,KAAK,CAAC;QAAE,OAAO,gBAAgB,CAAC,KAAK,CAAC,CAAC;IAEzD,MAAM,GAAG,GAAG,gBAAgB,CAAC,iBAAiB,GAAG,KAAK,CAAC,CAAC;IACxD,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC;IAChB,GAAG,CAAC,IAAI,GAAG,EAAE,CAAC;IACd,GAAG,CAAC,IAAI,GAAG,KAAK;UACZ,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC;cACnB,OAAO,CAAC,KAAK;cACb,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC;kBACrB,OAAO,CAAC,IAAI;kBACZ,OAAO,CAAC,YAAY;UACtB,OAAO,CAAC,KAAK,CAAC;IAClB,OAAO,GAAG,CAAC;AACb,CAAC;AAED,SAAS,iBAAiB,CAAC,IAAY;;;IAGrC,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IACtC,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;IACpC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,UAAU,CAAC,GAAQ,EAAE,IAAS;IACrC,aAAa,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;;;IAI/B,IAAI,GAAG,CAAC,IAAI,KAAK,GAAG,EAAE;QACpB,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;KACtB;SAAM;;QAEL,GAAG,CAAC,IAAI,GAAG,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC;KACpD;AACH,CAAC;AAED;;;;AAIA,SAAS,aAAa,CAAC,GAAQ,EAAE,IAAa;IAC5C,MAAM,GAAG,GAAG,IAAI,IAAI,OAAO,CAAC,YAAY,CAAC;IACzC,MAAM,MAAM,GAAG,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;;;IAInC,IAAI,OAAO,GAAG,CAAC,CAAC;;;IAIhB,IAAI,QAAQ,GAAG,CAAC,CAAC;;;;IAKjB,IAAI,gBAAgB,GAAG,KAAK,CAAC;IAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,MAAM,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;;QAGxB,IAAI,CAAC,KAAK,EAAE;YACV,gBAAgB,GAAG,IAAI,CAAC;YACxB,SAAS;SACV;;QAGD,gBAAgB,GAAG,KAAK,CAAC;;QAGzB,IAAI,KAAK,KAAK,GAAG;YAAE,SAAS;;;QAI5B,IAAI,KAAK,KAAK,IAAI,EAAE;YAClB,IAAI,QAAQ,EAAE;gBACZ,gBAAgB,GAAG,IAAI,CAAC;gBACxB,QAAQ,EAAE,CAAC;gBACX,OAAO,EAAE,CAAC;aACX;iBAAM,IAAI,GAAG,EAAE;;;gBAGd,MAAM,CAAC,OAAO,EAAE,CAAC,GAAG,KAAK,CAAC;aAC3B;YACD,SAAS;SACV;;;QAID,MAAM,CAAC,OAAO,EAAE,CAAC,GAAG,KAAK,CAAC;QAC1B,QAAQ,EAAE,CAAC;KACZ;IAED,IAAI,IAAI,GAAG,EAAE,CAAC;IACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;QAChC,IAAI,IAAI,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;KACzB;IACD,IAAI,CAAC,IAAI,KAAK,gBAAgB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE;QACxD,IAAI,IAAI,GAAG,CAAC;KACb;IACD,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC;AAClB,CAAC;AAED;;;SAGwB,OAAO,CAAC,KAAa,EAAE,IAAwB;IACrE,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI;QAAE,OAAO,EAAE,CAAC;IAE/B,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC5B,IAAI,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC;IAEzB,IAAI,IAAI,IAAI,SAAS,KAAK,OAAO,CAAC,QAAQ,EAAE;QAC1C,MAAM,OAAO,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC/B,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC;QAE9B,QAAQ,SAAS;YACf,KAAK,OAAO,CAAC,KAAK;gBAChB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;;YAG1B,KAAK,OAAO,CAAC,IAAI;gBACf,GAAG,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;;YAG5B,KAAK,OAAO,CAAC,KAAK,CAAC;YACnB,KAAK,OAAO,CAAC,YAAY;gBACvB,UAAU,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;;YAG3B,KAAK,OAAO,CAAC,YAAY;;gBAEvB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;gBACxB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;gBACxB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;;YAG1B,KAAK,OAAO,CAAC,cAAc;;gBAEzB,GAAG,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;SAC/B;QACD,IAAI,QAAQ,GAAG,SAAS;YAAE,SAAS,GAAG,QAAQ,CAAC;KAChD;IAED,aAAa,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC;IAE9B,MAAM,SAAS,GAAG,GAAG,CAAC,KAAK,GAAG,GAAG,CAAC,IAAI,CAAC;IACvC,QAAQ,SAAS;;;QAIf,KAAK,OAAO,CAAC,IAAI,CAAC;QAClB,KAAK,OAAO,CAAC,KAAK;YAChB,OAAO,SAAS,CAAC;QAEnB,KAAK,OAAO,CAAC,YAAY,EAAE;;YAEzB,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YAE/B,IAAI,CAAC,IAAI;gBAAE,OAAO,SAAS,IAAI,GAAG,CAAC;YAEnC,IAAI,UAAU,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;;;;gBAIlD,OAAO,IAAI,GAAG,IAAI,GAAG,SAAS,CAAC;aAChC;YAED,OAAO,IAAI,GAAG,SAAS,CAAC;SACzB;QAED,KAAK,OAAO,CAAC,YAAY;YACvB,OAAO,GAAG,CAAC,IAAI,GAAG,SAAS,CAAC;QAE9B;YACE,OAAO,GAAG,CAAC,MAAM,GAAG,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,SAAS,CAAC;KACpF;AACH;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js new file mode 100644 index 0000000..0700a2d --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js @@ -0,0 +1,250 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory()); +})(this, (function () { 'use strict'; + + // Matches the scheme of a URL, eg "http://" + const schemeRegex = /^[\w+.-]+:\/\//; + /** + * Matches the parts of a URL: + * 1. Scheme, including ":", guaranteed. + * 2. User/password, including "@", optional. + * 3. Host, guaranteed. + * 4. Port, including ":", optional. + * 5. Path, including "/", optional. + * 6. Query, including "?", optional. + * 7. Hash, including "#", optional. + */ + const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/; + /** + * File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start + * with a leading `/`, they can have a domain (but only if they don't start with a Windows drive). + * + * 1. Host, optional. + * 2. Path, which may include "/", guaranteed. + * 3. Query, including "?", optional. + * 4. Hash, including "#", optional. + */ + const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i; + var UrlType; + (function (UrlType) { + UrlType[UrlType["Empty"] = 1] = "Empty"; + UrlType[UrlType["Hash"] = 2] = "Hash"; + UrlType[UrlType["Query"] = 3] = "Query"; + UrlType[UrlType["RelativePath"] = 4] = "RelativePath"; + UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath"; + UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative"; + UrlType[UrlType["Absolute"] = 7] = "Absolute"; + })(UrlType || (UrlType = {})); + function isAbsoluteUrl(input) { + return schemeRegex.test(input); + } + function isSchemeRelativeUrl(input) { + return input.startsWith('//'); + } + function isAbsolutePath(input) { + return input.startsWith('/'); + } + function isFileUrl(input) { + return input.startsWith('file:'); + } + function isRelative(input) { + return /^[.?#]/.test(input); + } + function parseAbsoluteUrl(input) { + const match = urlRegex.exec(input); + return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || ''); + } + function parseFileUrl(input) { + const match = fileRegex.exec(input); + const path = match[2]; + return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || ''); + } + function makeUrl(scheme, user, host, port, path, query, hash) { + return { + scheme, + user, + host, + port, + path, + query, + hash, + type: UrlType.Absolute, + }; + } + function parseUrl(input) { + if (isSchemeRelativeUrl(input)) { + const url = parseAbsoluteUrl('http:' + input); + url.scheme = ''; + url.type = UrlType.SchemeRelative; + return url; + } + if (isAbsolutePath(input)) { + const url = parseAbsoluteUrl('http://foo.com' + input); + url.scheme = ''; + url.host = ''; + url.type = UrlType.AbsolutePath; + return url; + } + if (isFileUrl(input)) + return parseFileUrl(input); + if (isAbsoluteUrl(input)) + return parseAbsoluteUrl(input); + const url = parseAbsoluteUrl('http://foo.com/' + input); + url.scheme = ''; + url.host = ''; + url.type = input + ? input.startsWith('?') + ? UrlType.Query + : input.startsWith('#') + ? UrlType.Hash + : UrlType.RelativePath + : UrlType.Empty; + return url; + } + function stripPathFilename(path) { + // If a path ends with a parent directory "..", then it's a relative path with excess parent + // paths. It's not a file, so we can't strip it. + if (path.endsWith('/..')) + return path; + const index = path.lastIndexOf('/'); + return path.slice(0, index + 1); + } + function mergePaths(url, base) { + normalizePath(base, base.type); + // If the path is just a "/", then it was an empty path to begin with (remember, we're a relative + // path). + if (url.path === '/') { + url.path = base.path; + } + else { + // Resolution happens relative to the base path's directory, not the file. + url.path = stripPathFilename(base.path) + url.path; + } + } + /** + * The path can have empty directories "//", unneeded parents "foo/..", or current directory + * "foo/.". We need to normalize to a standard representation. + */ + function normalizePath(url, type) { + const rel = type <= UrlType.RelativePath; + const pieces = url.path.split('/'); + // We need to preserve the first piece always, so that we output a leading slash. The item at + // pieces[0] is an empty string. + let pointer = 1; + // Positive is the number of real directories we've output, used for popping a parent directory. + // Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo". + let positive = 0; + // We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will + // generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a + // real directory, we won't need to append, unless the other conditions happen again. + let addTrailingSlash = false; + for (let i = 1; i < pieces.length; i++) { + const piece = pieces[i]; + // An empty directory, could be a trailing slash, or just a double "//" in the path. + if (!piece) { + addTrailingSlash = true; + continue; + } + // If we encounter a real directory, then we don't need to append anymore. + addTrailingSlash = false; + // A current directory, which we can always drop. + if (piece === '.') + continue; + // A parent directory, we need to see if there are any real directories we can pop. Else, we + // have an excess of parents, and we'll need to keep the "..". + if (piece === '..') { + if (positive) { + addTrailingSlash = true; + positive--; + pointer--; + } + else if (rel) { + // If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute + // URL, protocol relative URL, or an absolute path, we don't need to keep excess. + pieces[pointer++] = piece; + } + continue; + } + // We've encountered a real directory. Move it to the next insertion pointer, which accounts for + // any popped or dropped directories. + pieces[pointer++] = piece; + positive++; + } + let path = ''; + for (let i = 1; i < pointer; i++) { + path += '/' + pieces[i]; + } + if (!path || (addTrailingSlash && !path.endsWith('/..'))) { + path += '/'; + } + url.path = path; + } + /** + * Attempts to resolve `input` URL/path relative to `base`. + */ + function resolve(input, base) { + if (!input && !base) + return ''; + const url = parseUrl(input); + let inputType = url.type; + if (base && inputType !== UrlType.Absolute) { + const baseUrl = parseUrl(base); + const baseType = baseUrl.type; + switch (inputType) { + case UrlType.Empty: + url.hash = baseUrl.hash; + // fall through + case UrlType.Hash: + url.query = baseUrl.query; + // fall through + case UrlType.Query: + case UrlType.RelativePath: + mergePaths(url, baseUrl); + // fall through + case UrlType.AbsolutePath: + // The host, user, and port are joined, you can't copy one without the others. + url.user = baseUrl.user; + url.host = baseUrl.host; + url.port = baseUrl.port; + // fall through + case UrlType.SchemeRelative: + // The input doesn't have a schema at least, so we need to copy at least that over. + url.scheme = baseUrl.scheme; + } + if (baseType > inputType) + inputType = baseType; + } + normalizePath(url, inputType); + const queryHash = url.query + url.hash; + switch (inputType) { + // This is impossible, because of the empty checks at the start of the function. + // case UrlType.Empty: + case UrlType.Hash: + case UrlType.Query: + return queryHash; + case UrlType.RelativePath: { + // The first char is always a "/", and we need it to be relative. + const path = url.path.slice(1); + if (!path) + return queryHash || '.'; + if (isRelative(base || input) && !isRelative(path)) { + // If base started with a leading ".", or there is no base and input started with a ".", + // then we need to ensure that the relative path starts with a ".". We don't know if + // relative starts with a "..", though, so check before prepending. + return './' + path + queryHash; + } + return path + queryHash; + } + case UrlType.AbsolutePath: + return url.path + queryHash; + default: + return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash; + } + } + + return resolve; + +})); +//# sourceMappingURL=resolve-uri.umd.js.map diff --git a/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map new file mode 100644 index 0000000..a3e39eb --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map @@ -0,0 +1 @@ +{"version":3,"file":"resolve-uri.umd.js","sources":["../src/resolve-uri.ts"],"sourcesContent":["// Matches the scheme of a URL, eg \"http://\"\nconst schemeRegex = /^[\\w+.-]+:\\/\\//;\n\n/**\n * Matches the parts of a URL:\n * 1. Scheme, including \":\", guaranteed.\n * 2. User/password, including \"@\", optional.\n * 3. Host, guaranteed.\n * 4. Port, including \":\", optional.\n * 5. Path, including \"/\", optional.\n * 6. Query, including \"?\", optional.\n * 7. Hash, including \"#\", optional.\n */\nconst urlRegex = /^([\\w+.-]+:)\\/\\/([^@/#?]*@)?([^:/#?]*)(:\\d+)?(\\/[^#?]*)?(\\?[^#]*)?(#.*)?/;\n\n/**\n * File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start\n * with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).\n *\n * 1. Host, optional.\n * 2. Path, which may include \"/\", guaranteed.\n * 3. Query, including \"?\", optional.\n * 4. Hash, including \"#\", optional.\n */\nconst fileRegex = /^file:(?:\\/\\/((?![a-z]:)[^/#?]*)?)?(\\/?[^#?]*)(\\?[^#]*)?(#.*)?/i;\n\ntype Url = {\n scheme: string;\n user: string;\n host: string;\n port: string;\n path: string;\n query: string;\n hash: string;\n type: UrlType;\n};\n\nenum UrlType {\n Empty = 1,\n Hash = 2,\n Query = 3,\n RelativePath = 4,\n AbsolutePath = 5,\n SchemeRelative = 6,\n Absolute = 7,\n}\n\nfunction isAbsoluteUrl(input: string): boolean {\n return schemeRegex.test(input);\n}\n\nfunction isSchemeRelativeUrl(input: string): boolean {\n return input.startsWith('//');\n}\n\nfunction isAbsolutePath(input: string): boolean {\n return input.startsWith('/');\n}\n\nfunction isFileUrl(input: string): boolean {\n return input.startsWith('file:');\n}\n\nfunction isRelative(input: string): boolean {\n return /^[.?#]/.test(input);\n}\n\nfunction parseAbsoluteUrl(input: string): Url {\n const match = urlRegex.exec(input)!;\n return makeUrl(\n match[1],\n match[2] || '',\n match[3],\n match[4] || '',\n match[5] || '/',\n match[6] || '',\n match[7] || '',\n );\n}\n\nfunction parseFileUrl(input: string): Url {\n const match = fileRegex.exec(input)!;\n const path = match[2];\n return makeUrl(\n 'file:',\n '',\n match[1] || '',\n '',\n isAbsolutePath(path) ? path : '/' + path,\n match[3] || '',\n match[4] || '',\n );\n}\n\nfunction makeUrl(\n scheme: string,\n user: string,\n host: string,\n port: string,\n path: string,\n query: string,\n hash: string,\n): Url {\n return {\n scheme,\n user,\n host,\n port,\n path,\n query,\n hash,\n type: UrlType.Absolute,\n };\n}\n\nfunction parseUrl(input: string): Url {\n if (isSchemeRelativeUrl(input)) {\n const url = parseAbsoluteUrl('http:' + input);\n url.scheme = '';\n url.type = UrlType.SchemeRelative;\n return url;\n }\n\n if (isAbsolutePath(input)) {\n const url = parseAbsoluteUrl('http://foo.com' + input);\n url.scheme = '';\n url.host = '';\n url.type = UrlType.AbsolutePath;\n return url;\n }\n\n if (isFileUrl(input)) return parseFileUrl(input);\n\n if (isAbsoluteUrl(input)) return parseAbsoluteUrl(input);\n\n const url = parseAbsoluteUrl('http://foo.com/' + input);\n url.scheme = '';\n url.host = '';\n url.type = input\n ? input.startsWith('?')\n ? UrlType.Query\n : input.startsWith('#')\n ? UrlType.Hash\n : UrlType.RelativePath\n : UrlType.Empty;\n return url;\n}\n\nfunction stripPathFilename(path: string): string {\n // If a path ends with a parent directory \"..\", then it's a relative path with excess parent\n // paths. It's not a file, so we can't strip it.\n if (path.endsWith('/..')) return path;\n const index = path.lastIndexOf('/');\n return path.slice(0, index + 1);\n}\n\nfunction mergePaths(url: Url, base: Url) {\n normalizePath(base, base.type);\n\n // If the path is just a \"/\", then it was an empty path to begin with (remember, we're a relative\n // path).\n if (url.path === '/') {\n url.path = base.path;\n } else {\n // Resolution happens relative to the base path's directory, not the file.\n url.path = stripPathFilename(base.path) + url.path;\n }\n}\n\n/**\n * The path can have empty directories \"//\", unneeded parents \"foo/..\", or current directory\n * \"foo/.\". We need to normalize to a standard representation.\n */\nfunction normalizePath(url: Url, type: UrlType) {\n const rel = type <= UrlType.RelativePath;\n const pieces = url.path.split('/');\n\n // We need to preserve the first piece always, so that we output a leading slash. The item at\n // pieces[0] is an empty string.\n let pointer = 1;\n\n // Positive is the number of real directories we've output, used for popping a parent directory.\n // Eg, \"foo/bar/..\" will have a positive 2, and we can decrement to be left with just \"foo\".\n let positive = 0;\n\n // We need to keep a trailing slash if we encounter an empty directory (eg, splitting \"foo/\" will\n // generate `[\"foo\", \"\"]` pieces). And, if we pop a parent directory. But once we encounter a\n // real directory, we won't need to append, unless the other conditions happen again.\n let addTrailingSlash = false;\n\n for (let i = 1; i < pieces.length; i++) {\n const piece = pieces[i];\n\n // An empty directory, could be a trailing slash, or just a double \"//\" in the path.\n if (!piece) {\n addTrailingSlash = true;\n continue;\n }\n\n // If we encounter a real directory, then we don't need to append anymore.\n addTrailingSlash = false;\n\n // A current directory, which we can always drop.\n if (piece === '.') continue;\n\n // A parent directory, we need to see if there are any real directories we can pop. Else, we\n // have an excess of parents, and we'll need to keep the \"..\".\n if (piece === '..') {\n if (positive) {\n addTrailingSlash = true;\n positive--;\n pointer--;\n } else if (rel) {\n // If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute\n // URL, protocol relative URL, or an absolute path, we don't need to keep excess.\n pieces[pointer++] = piece;\n }\n continue;\n }\n\n // We've encountered a real directory. Move it to the next insertion pointer, which accounts for\n // any popped or dropped directories.\n pieces[pointer++] = piece;\n positive++;\n }\n\n let path = '';\n for (let i = 1; i < pointer; i++) {\n path += '/' + pieces[i];\n }\n if (!path || (addTrailingSlash && !path.endsWith('/..'))) {\n path += '/';\n }\n url.path = path;\n}\n\n/**\n * Attempts to resolve `input` URL/path relative to `base`.\n */\nexport default function resolve(input: string, base: string | undefined): string {\n if (!input && !base) return '';\n\n const url = parseUrl(input);\n let inputType = url.type;\n\n if (base && inputType !== UrlType.Absolute) {\n const baseUrl = parseUrl(base);\n const baseType = baseUrl.type;\n\n switch (inputType) {\n case UrlType.Empty:\n url.hash = baseUrl.hash;\n // fall through\n\n case UrlType.Hash:\n url.query = baseUrl.query;\n // fall through\n\n case UrlType.Query:\n case UrlType.RelativePath:\n mergePaths(url, baseUrl);\n // fall through\n\n case UrlType.AbsolutePath:\n // The host, user, and port are joined, you can't copy one without the others.\n url.user = baseUrl.user;\n url.host = baseUrl.host;\n url.port = baseUrl.port;\n // fall through\n\n case UrlType.SchemeRelative:\n // The input doesn't have a schema at least, so we need to copy at least that over.\n url.scheme = baseUrl.scheme;\n }\n if (baseType > inputType) inputType = baseType;\n }\n\n normalizePath(url, inputType);\n\n const queryHash = url.query + url.hash;\n switch (inputType) {\n // This is impossible, because of the empty checks at the start of the function.\n // case UrlType.Empty:\n\n case UrlType.Hash:\n case UrlType.Query:\n return queryHash;\n\n case UrlType.RelativePath: {\n // The first char is always a \"/\", and we need it to be relative.\n const path = url.path.slice(1);\n\n if (!path) return queryHash || '.';\n\n if (isRelative(base || input) && !isRelative(path)) {\n // If base started with a leading \".\", or there is no base and input started with a \".\",\n // then we need to ensure that the relative path starts with a \".\". We don't know if\n // relative starts with a \"..\", though, so check before prepending.\n return './' + path + queryHash;\n }\n\n return path + queryHash;\n }\n\n case UrlType.AbsolutePath:\n return url.path + queryHash;\n\n default:\n return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;\n }\n}\n"],"names":[],"mappings":";;;;;;IAAA;IACA,MAAM,WAAW,GAAG,gBAAgB,CAAC;IAErC;;;;;;;;;;IAUA,MAAM,QAAQ,GAAG,0EAA0E,CAAC;IAE5F;;;;;;;;;IASA,MAAM,SAAS,GAAG,iEAAiE,CAAC;IAapF,IAAK,OAQJ;IARD,WAAK,OAAO;QACV,uCAAS,CAAA;QACT,qCAAQ,CAAA;QACR,uCAAS,CAAA;QACT,qDAAgB,CAAA;QAChB,qDAAgB,CAAA;QAChB,yDAAkB,CAAA;QAClB,6CAAY,CAAA;IACd,CAAC,EARI,OAAO,KAAP,OAAO,QAQX;IAED,SAAS,aAAa,CAAC,KAAa;QAClC,OAAO,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAED,SAAS,mBAAmB,CAAC,KAAa;QACxC,OAAO,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;IAChC,CAAC;IAED,SAAS,cAAc,CAAC,KAAa;QACnC,OAAO,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC/B,CAAC;IAED,SAAS,SAAS,CAAC,KAAa;QAC9B,OAAO,KAAK,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;IACnC,CAAC;IAED,SAAS,UAAU,CAAC,KAAa;QAC/B,OAAO,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAC9B,CAAC;IAED,SAAS,gBAAgB,CAAC,KAAa;QACrC,MAAM,KAAK,GAAG,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAE,CAAC;QACpC,OAAO,OAAO,CACZ,KAAK,CAAC,CAAC,CAAC,EACR,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,EACR,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,IAAI,GAAG,EACf,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,CACf,CAAC;IACJ,CAAC;IAED,SAAS,YAAY,CAAC,KAAa;QACjC,MAAM,KAAK,GAAG,SAAS,CAAC,IAAI,CAAC,KAAK,CAAE,CAAC;QACrC,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACtB,OAAO,OAAO,CACZ,OAAO,EACP,EAAE,EACF,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,EAAE,EACF,cAAc,CAAC,IAAI,CAAC,GAAG,IAAI,GAAG,GAAG,GAAG,IAAI,EACxC,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,EACd,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE,CACf,CAAC;IACJ,CAAC;IAED,SAAS,OAAO,CACd,MAAc,EACd,IAAY,EACZ,IAAY,EACZ,IAAY,EACZ,IAAY,EACZ,KAAa,EACb,IAAY;QAEZ,OAAO;YACL,MAAM;YACN,IAAI;YACJ,IAAI;YACJ,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,IAAI;YACJ,IAAI,EAAE,OAAO,CAAC,QAAQ;SACvB,CAAC;IACJ,CAAC;IAED,SAAS,QAAQ,CAAC,KAAa;QAC7B,IAAI,mBAAmB,CAAC,KAAK,CAAC,EAAE;YAC9B,MAAM,GAAG,GAAG,gBAAgB,CAAC,OAAO,GAAG,KAAK,CAAC,CAAC;YAC9C,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC;YAChB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,cAAc,CAAC;YAClC,OAAO,GAAG,CAAC;SACZ;QAED,IAAI,cAAc,CAAC,KAAK,CAAC,EAAE;YACzB,MAAM,GAAG,GAAG,gBAAgB,CAAC,gBAAgB,GAAG,KAAK,CAAC,CAAC;YACvD,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC;YAChB,GAAG,CAAC,IAAI,GAAG,EAAE,CAAC;YACd,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,YAAY,CAAC;YAChC,OAAO,GAAG,CAAC;SACZ;QAED,IAAI,SAAS,CAAC,KAAK,CAAC;YAAE,OAAO,YAAY,CAAC,KAAK,CAAC,CAAC;QAEjD,IAAI,aAAa,CAAC,KAAK,CAAC;YAAE,OAAO,gBAAgB,CAAC,KAAK,CAAC,CAAC;QAEzD,MAAM,GAAG,GAAG,gBAAgB,CAAC,iBAAiB,GAAG,KAAK,CAAC,CAAC;QACxD,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC;QAChB,GAAG,CAAC,IAAI,GAAG,EAAE,CAAC;QACd,GAAG,CAAC,IAAI,GAAG,KAAK;cACZ,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC;kBACnB,OAAO,CAAC,KAAK;kBACb,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC;sBACrB,OAAO,CAAC,IAAI;sBACZ,OAAO,CAAC,YAAY;cACtB,OAAO,CAAC,KAAK,CAAC;QAClB,OAAO,GAAG,CAAC;IACb,CAAC;IAED,SAAS,iBAAiB,CAAC,IAAY;;;QAGrC,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC;YAAE,OAAO,IAAI,CAAC;QACtC,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC;IAClC,CAAC;IAED,SAAS,UAAU,CAAC,GAAQ,EAAE,IAAS;QACrC,aAAa,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;;;QAI/B,IAAI,GAAG,CAAC,IAAI,KAAK,GAAG,EAAE;YACpB,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;SACtB;aAAM;;YAEL,GAAG,CAAC,IAAI,GAAG,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC;SACpD;IACH,CAAC;IAED;;;;IAIA,SAAS,aAAa,CAAC,GAAQ,EAAE,IAAa;QAC5C,MAAM,GAAG,GAAG,IAAI,IAAI,OAAO,CAAC,YAAY,CAAC;QACzC,MAAM,MAAM,GAAG,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;;;QAInC,IAAI,OAAO,GAAG,CAAC,CAAC;;;QAIhB,IAAI,QAAQ,GAAG,CAAC,CAAC;;;;QAKjB,IAAI,gBAAgB,GAAG,KAAK,CAAC;QAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACtC,MAAM,KAAK,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;;YAGxB,IAAI,CAAC,KAAK,EAAE;gBACV,gBAAgB,GAAG,IAAI,CAAC;gBACxB,SAAS;aACV;;YAGD,gBAAgB,GAAG,KAAK,CAAC;;YAGzB,IAAI,KAAK,KAAK,GAAG;gBAAE,SAAS;;;YAI5B,IAAI,KAAK,KAAK,IAAI,EAAE;gBAClB,IAAI,QAAQ,EAAE;oBACZ,gBAAgB,GAAG,IAAI,CAAC;oBACxB,QAAQ,EAAE,CAAC;oBACX,OAAO,EAAE,CAAC;iBACX;qBAAM,IAAI,GAAG,EAAE;;;oBAGd,MAAM,CAAC,OAAO,EAAE,CAAC,GAAG,KAAK,CAAC;iBAC3B;gBACD,SAAS;aACV;;;YAID,MAAM,CAAC,OAAO,EAAE,CAAC,GAAG,KAAK,CAAC;YAC1B,QAAQ,EAAE,CAAC;SACZ;QAED,IAAI,IAAI,GAAG,EAAE,CAAC;QACd,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;YAChC,IAAI,IAAI,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;SACzB;QACD,IAAI,CAAC,IAAI,KAAK,gBAAgB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE;YACxD,IAAI,IAAI,GAAG,CAAC;SACb;QACD,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC;IAClB,CAAC;IAED;;;aAGwB,OAAO,CAAC,KAAa,EAAE,IAAwB;QACrE,IAAI,CAAC,KAAK,IAAI,CAAC,IAAI;YAAE,OAAO,EAAE,CAAC;QAE/B,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;QAC5B,IAAI,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC;QAEzB,IAAI,IAAI,IAAI,SAAS,KAAK,OAAO,CAAC,QAAQ,EAAE;YAC1C,MAAM,OAAO,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;YAC/B,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC;YAE9B,QAAQ,SAAS;gBACf,KAAK,OAAO,CAAC,KAAK;oBAChB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;;gBAG1B,KAAK,OAAO,CAAC,IAAI;oBACf,GAAG,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;;gBAG5B,KAAK,OAAO,CAAC,KAAK,CAAC;gBACnB,KAAK,OAAO,CAAC,YAAY;oBACvB,UAAU,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;;gBAG3B,KAAK,OAAO,CAAC,YAAY;;oBAEvB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;oBACxB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;oBACxB,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;;gBAG1B,KAAK,OAAO,CAAC,cAAc;;oBAEzB,GAAG,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;aAC/B;YACD,IAAI,QAAQ,GAAG,SAAS;gBAAE,SAAS,GAAG,QAAQ,CAAC;SAChD;QAED,aAAa,CAAC,GAAG,EAAE,SAAS,CAAC,CAAC;QAE9B,MAAM,SAAS,GAAG,GAAG,CAAC,KAAK,GAAG,GAAG,CAAC,IAAI,CAAC;QACvC,QAAQ,SAAS;;;YAIf,KAAK,OAAO,CAAC,IAAI,CAAC;YAClB,KAAK,OAAO,CAAC,KAAK;gBAChB,OAAO,SAAS,CAAC;YAEnB,KAAK,OAAO,CAAC,YAAY,EAAE;;gBAEzB,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;gBAE/B,IAAI,CAAC,IAAI;oBAAE,OAAO,SAAS,IAAI,GAAG,CAAC;gBAEnC,IAAI,UAAU,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;;;;oBAIlD,OAAO,IAAI,GAAG,IAAI,GAAG,SAAS,CAAC;iBAChC;gBAED,OAAO,IAAI,GAAG,SAAS,CAAC;aACzB;YAED,KAAK,OAAO,CAAC,YAAY;gBACvB,OAAO,GAAG,CAAC,IAAI,GAAG,SAAS,CAAC;YAE9B;gBACE,OAAO,GAAG,CAAC,MAAM,GAAG,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,SAAS,CAAC;SACpF;IACH;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts b/node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts new file mode 100644 index 0000000..b7f0b3b --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts @@ -0,0 +1,4 @@ +/** + * Attempts to resolve `input` URL/path relative to `base`. + */ +export default function resolve(input: string, base: string | undefined): string; diff --git a/node_modules/@jridgewell/resolve-uri/package.json b/node_modules/@jridgewell/resolve-uri/package.json new file mode 100644 index 0000000..114937a --- /dev/null +++ b/node_modules/@jridgewell/resolve-uri/package.json @@ -0,0 +1,69 @@ +{ + "name": "@jridgewell/resolve-uri", + "version": "3.1.0", + "description": "Resolve a URI relative to an optional base URI", + "keywords": [ + "resolve", + "uri", + "url", + "path" + ], + "author": "Justin Ridgewell ", + "license": "MIT", + "repository": "https://github.com/jridgewell/resolve-uri", + "main": "dist/resolve-uri.umd.js", + "module": "dist/resolve-uri.mjs", + "typings": "dist/types/resolve-uri.d.ts", + "exports": { + ".": [ + { + "types": "./dist/types/resolve-uri.d.ts", + "browser": "./dist/resolve-uri.umd.js", + "require": "./dist/resolve-uri.umd.js", + "import": "./dist/resolve-uri.mjs" + }, + "./dist/resolve-uri.umd.js" + ], + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=6.0.0" + }, + "scripts": { + "prebuild": "rm -rf dist", + "build": "run-s -n build:*", + "build:rollup": "rollup -c rollup.config.js", + "build:ts": "tsc --project tsconfig.build.json", + "lint": "run-s -n lint:*", + "lint:prettier": "npm run test:lint:prettier -- --write", + "lint:ts": "npm run test:lint:ts -- --fix", + "pretest": "run-s build:rollup", + "test": "run-s -n test:lint test:only", + "test:debug": "mocha --inspect-brk", + "test:lint": "run-s -n test:lint:*", + "test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", + "test:lint:ts": "eslint '{src,test}/**/*.ts'", + "test:only": "mocha", + "test:coverage": "c8 mocha", + "test:watch": "mocha --watch", + "prepublishOnly": "npm run preversion", + "preversion": "run-s test build" + }, + "devDependencies": { + "@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*", + "@rollup/plugin-typescript": "8.3.0", + "@typescript-eslint/eslint-plugin": "5.10.0", + "@typescript-eslint/parser": "5.10.0", + "c8": "7.11.0", + "eslint": "8.7.0", + "eslint-config-prettier": "8.3.0", + "mocha": "9.2.0", + "npm-run-all": "4.1.5", + "prettier": "2.5.1", + "rollup": "2.66.0", + "typescript": "4.5.5" + } +} diff --git a/node_modules/@jridgewell/set-array/LICENSE b/node_modules/@jridgewell/set-array/LICENSE new file mode 100644 index 0000000..352f071 --- /dev/null +++ b/node_modules/@jridgewell/set-array/LICENSE @@ -0,0 +1,19 @@ +Copyright 2022 Justin Ridgewell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@jridgewell/set-array/README.md b/node_modules/@jridgewell/set-array/README.md new file mode 100644 index 0000000..2ed155f --- /dev/null +++ b/node_modules/@jridgewell/set-array/README.md @@ -0,0 +1,37 @@ +# @jridgewell/set-array + +> Like a Set, but provides the index of the `key` in the backing array + +This is designed to allow synchronizing a second array with the contents of the backing array, like +how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there +are never duplicates. + +## Installation + +```sh +npm install @jridgewell/set-array +``` + +## Usage + +```js +import { SetArray, get, put, pop } from '@jridgewell/set-array'; + +const sa = new SetArray(); + +let index = put(sa, 'first'); +assert.strictEqual(index, 0); + +index = put(sa, 'second'); +assert.strictEqual(index, 1); + +assert.deepEqual(sa.array, [ 'first', 'second' ]); + +index = get(sa, 'first'); +assert.strictEqual(index, 0); + +pop(sa); +index = get(sa, 'second'); +assert.strictEqual(index, undefined); +assert.deepEqual(sa.array, [ 'first' ]); +``` diff --git a/node_modules/@jridgewell/set-array/dist/set-array.mjs b/node_modules/@jridgewell/set-array/dist/set-array.mjs new file mode 100644 index 0000000..b7f1a9c --- /dev/null +++ b/node_modules/@jridgewell/set-array/dist/set-array.mjs @@ -0,0 +1,48 @@ +/** + * Gets the index associated with `key` in the backing array, if it is already present. + */ +let get; +/** + * Puts `key` into the backing array, if it is not already present. Returns + * the index of the `key` in the backing array. + */ +let put; +/** + * Pops the last added item out of the SetArray. + */ +let pop; +/** + * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the + * index of the `key` in the backing array. + * + * This is designed to allow synchronizing a second array with the contents of the backing array, + * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, + * and there are never duplicates. + */ +class SetArray { + constructor() { + this._indexes = { __proto__: null }; + this.array = []; + } +} +(() => { + get = (strarr, key) => strarr._indexes[key]; + put = (strarr, key) => { + // The key may or may not be present. If it is present, it's a number. + const index = get(strarr, key); + if (index !== undefined) + return index; + const { array, _indexes: indexes } = strarr; + return (indexes[key] = array.push(key) - 1); + }; + pop = (strarr) => { + const { array, _indexes: indexes } = strarr; + if (array.length === 0) + return; + const last = array.pop(); + indexes[last] = undefined; + }; +})(); + +export { SetArray, get, pop, put }; +//# sourceMappingURL=set-array.mjs.map diff --git a/node_modules/@jridgewell/set-array/dist/set-array.mjs.map b/node_modules/@jridgewell/set-array/dist/set-array.mjs.map new file mode 100644 index 0000000..ead5643 --- /dev/null +++ b/node_modules/@jridgewell/set-array/dist/set-array.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":[],"mappings":"AAAA;;;IAGW,IAA2D;AAEtE;;;;IAIW,IAA+C;AAE1D;;;IAGW,IAAgC;AAE3C;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CAuBF;AArBC;IACE,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IAE5C,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;QAEhB,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;KAC3D,CAAC;IAEF,GAAG,GAAG,CAAC,MAAM;QACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;QACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;KAC3B,CAAC;AACJ,CAAC,GAAA;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/set-array/dist/set-array.umd.js b/node_modules/@jridgewell/set-array/dist/set-array.umd.js new file mode 100644 index 0000000..a1c200a --- /dev/null +++ b/node_modules/@jridgewell/set-array/dist/set-array.umd.js @@ -0,0 +1,58 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {})); +})(this, (function (exports) { 'use strict'; + + /** + * Gets the index associated with `key` in the backing array, if it is already present. + */ + exports.get = void 0; + /** + * Puts `key` into the backing array, if it is not already present. Returns + * the index of the `key` in the backing array. + */ + exports.put = void 0; + /** + * Pops the last added item out of the SetArray. + */ + exports.pop = void 0; + /** + * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the + * index of the `key` in the backing array. + * + * This is designed to allow synchronizing a second array with the contents of the backing array, + * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, + * and there are never duplicates. + */ + class SetArray { + constructor() { + this._indexes = { __proto__: null }; + this.array = []; + } + } + (() => { + exports.get = (strarr, key) => strarr._indexes[key]; + exports.put = (strarr, key) => { + // The key may or may not be present. If it is present, it's a number. + const index = exports.get(strarr, key); + if (index !== undefined) + return index; + const { array, _indexes: indexes } = strarr; + return (indexes[key] = array.push(key) - 1); + }; + exports.pop = (strarr) => { + const { array, _indexes: indexes } = strarr; + if (array.length === 0) + return; + const last = array.pop(); + indexes[last] = undefined; + }; + })(); + + exports.SetArray = SetArray; + + Object.defineProperty(exports, '__esModule', { value: true }); + +})); +//# sourceMappingURL=set-array.umd.js.map diff --git a/node_modules/@jridgewell/set-array/dist/set-array.umd.js.map b/node_modules/@jridgewell/set-array/dist/set-array.umd.js.map new file mode 100644 index 0000000..10005af --- /dev/null +++ b/node_modules/@jridgewell/set-array/dist/set-array.umd.js.map @@ -0,0 +1 @@ +{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":["get","put","pop"],"mappings":";;;;;;IAAA;;;AAGWA,yBAA2D;IAEtE;;;;AAIWC,yBAA+C;IAE1D;;;AAGWC,yBAAgC;IAE3C;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KAuBF;IArBC;QACEF,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;QAE5CC,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;YAEhB,MAAM,KAAK,GAAGD,WAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;YAC/B,IAAI,KAAK,KAAK,SAAS;gBAAE,OAAO,KAAK,CAAC;YAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;SAC3D,CAAC;QAEFE,WAAG,GAAG,CAAC,MAAM;YACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,OAAO;YAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;YACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;SAC3B,CAAC;IACJ,CAAC,GAAA;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/set-array/dist/types/set-array.d.ts b/node_modules/@jridgewell/set-array/dist/types/set-array.d.ts new file mode 100644 index 0000000..7ed59b9 --- /dev/null +++ b/node_modules/@jridgewell/set-array/dist/types/set-array.d.ts @@ -0,0 +1,26 @@ +/** + * Gets the index associated with `key` in the backing array, if it is already present. + */ +export declare let get: (strarr: SetArray, key: string) => number | undefined; +/** + * Puts `key` into the backing array, if it is not already present. Returns + * the index of the `key` in the backing array. + */ +export declare let put: (strarr: SetArray, key: string) => number; +/** + * Pops the last added item out of the SetArray. + */ +export declare let pop: (strarr: SetArray) => void; +/** + * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the + * index of the `key` in the backing array. + * + * This is designed to allow synchronizing a second array with the contents of the backing array, + * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, + * and there are never duplicates. + */ +export declare class SetArray { + private _indexes; + array: readonly string[]; + constructor(); +} diff --git a/node_modules/@jridgewell/set-array/package.json b/node_modules/@jridgewell/set-array/package.json new file mode 100644 index 0000000..aec4ee0 --- /dev/null +++ b/node_modules/@jridgewell/set-array/package.json @@ -0,0 +1,66 @@ +{ + "name": "@jridgewell/set-array", + "version": "1.1.2", + "description": "Like a Set, but provides the index of the `key` in the backing array", + "keywords": [], + "author": "Justin Ridgewell ", + "license": "MIT", + "repository": "https://github.com/jridgewell/set-array", + "main": "dist/set-array.umd.js", + "module": "dist/set-array.mjs", + "typings": "dist/types/set-array.d.ts", + "exports": { + ".": [ + { + "types": "./dist/types/set-array.d.ts", + "browser": "./dist/set-array.umd.js", + "require": "./dist/set-array.umd.js", + "import": "./dist/set-array.mjs" + }, + "./dist/set-array.umd.js" + ], + "./package.json": "./package.json" + }, + "files": [ + "dist", + "src" + ], + "engines": { + "node": ">=6.0.0" + }, + "scripts": { + "prebuild": "rm -rf dist", + "build": "run-s -n build:*", + "build:rollup": "rollup -c rollup.config.js", + "build:ts": "tsc --project tsconfig.build.json", + "lint": "run-s -n lint:*", + "lint:prettier": "npm run test:lint:prettier -- --write", + "lint:ts": "npm run test:lint:ts -- --fix", + "pretest": "run-s build:rollup", + "test": "run-s -n test:lint test:only", + "test:debug": "mocha --inspect-brk", + "test:lint": "run-s -n test:lint:*", + "test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", + "test:lint:ts": "eslint '{src,test}/**/*.ts'", + "test:only": "mocha", + "test:coverage": "c8 mocha", + "test:watch": "mocha --watch", + "prepublishOnly": "npm run preversion", + "preversion": "run-s test build" + }, + "devDependencies": { + "@rollup/plugin-typescript": "8.3.0", + "@types/mocha": "9.1.1", + "@types/node": "17.0.29", + "@typescript-eslint/eslint-plugin": "5.10.0", + "@typescript-eslint/parser": "5.10.0", + "c8": "7.11.0", + "eslint": "8.7.0", + "eslint-config-prettier": "8.3.0", + "mocha": "9.2.0", + "npm-run-all": "4.1.5", + "prettier": "2.5.1", + "rollup": "2.66.0", + "typescript": "4.5.5" + } +} diff --git a/node_modules/@jridgewell/set-array/src/set-array.ts b/node_modules/@jridgewell/set-array/src/set-array.ts new file mode 100644 index 0000000..f9ff604 --- /dev/null +++ b/node_modules/@jridgewell/set-array/src/set-array.ts @@ -0,0 +1,55 @@ +/** + * Gets the index associated with `key` in the backing array, if it is already present. + */ +export let get: (strarr: SetArray, key: string) => number | undefined; + +/** + * Puts `key` into the backing array, if it is not already present. Returns + * the index of the `key` in the backing array. + */ +export let put: (strarr: SetArray, key: string) => number; + +/** + * Pops the last added item out of the SetArray. + */ +export let pop: (strarr: SetArray) => void; + +/** + * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the + * index of the `key` in the backing array. + * + * This is designed to allow synchronizing a second array with the contents of the backing array, + * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, + * and there are never duplicates. + */ +export class SetArray { + private declare _indexes: { [key: string]: number | undefined }; + declare array: readonly string[]; + + constructor() { + this._indexes = { __proto__: null } as any; + this.array = []; + } + + static { + get = (strarr, key) => strarr._indexes[key]; + + put = (strarr, key) => { + // The key may or may not be present. If it is present, it's a number. + const index = get(strarr, key); + if (index !== undefined) return index; + + const { array, _indexes: indexes } = strarr; + + return (indexes[key] = (array as string[]).push(key) - 1); + }; + + pop = (strarr) => { + const { array, _indexes: indexes } = strarr; + if (array.length === 0) return; + + const last = (array as string[]).pop()!; + indexes[last] = undefined; + }; + } +} diff --git a/node_modules/@jridgewell/sourcemap-codec/LICENSE b/node_modules/@jridgewell/sourcemap-codec/LICENSE new file mode 100644 index 0000000..a331065 --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2015 Rich Harris + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@jridgewell/sourcemap-codec/README.md b/node_modules/@jridgewell/sourcemap-codec/README.md new file mode 100644 index 0000000..5cbb315 --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/README.md @@ -0,0 +1,200 @@ +# @jridgewell/sourcemap-codec + +Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit). + + +## Why? + +Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap. + +This package makes the process slightly easier. + + +## Installation + +```bash +npm install @jridgewell/sourcemap-codec +``` + + +## Usage + +```js +import { encode, decode } from '@jridgewell/sourcemap-codec'; + +var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' ); + +assert.deepEqual( decoded, [ + // the first line (of the generated code) has no mappings, + // as shown by the starting semi-colon (which separates lines) + [], + + // the second line contains four (comma-separated) segments + [ + // segments are encoded as you'd expect: + // [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ] + + // i.e. the first segment begins at column 2, and maps back to the second column + // of the second line (both zero-based) of the 0th source, and uses the 0th + // name in the `map.names` array + [ 2, 0, 2, 2, 0 ], + + // the remaining segments are 4-length rather than 5-length, + // because they don't map a name + [ 4, 0, 2, 4 ], + [ 6, 0, 2, 5 ], + [ 7, 0, 2, 7 ] + ], + + // the final line contains two segments + [ + [ 2, 1, 10, 19 ], + [ 12, 1, 11, 20 ] + ] +]); + +var encoded = encode( decoded ); +assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' ); +``` + +## Benchmarks + +``` +node v18.0.0 + +amp.js.map - 45120 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 5479160 bytes +sourcemap-codec 5659336 bytes +source-map-0.6.1 17144440 bytes +source-map-0.8.0 6867424 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled) +decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled) +decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled) +decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled) +Fastest is decode: @jridgewell/sourcemap-codec + +Encode Memory Usage: +@jridgewell/sourcemap-codec 1261620 bytes +sourcemap-codec 9119248 bytes +source-map-0.6.1 8968560 bytes +source-map-0.8.0 8952952 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled) +encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled) +encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled) +encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec + + +*** + + +babel.min.js.map - 347793 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 35338184 bytes +sourcemap-codec 35922736 bytes +source-map-0.6.1 62366360 bytes +source-map-0.8.0 44337416 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled) +decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled) +decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled) +decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled) +Fastest is decode: source-map-0.8.0 + +Encode Memory Usage: +@jridgewell/sourcemap-codec 7212604 bytes +sourcemap-codec 21421456 bytes +source-map-0.6.1 25286888 bytes +source-map-0.8.0 25498744 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled) +encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled) +encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled) +encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec + + +*** + + +preact.js.map - 1992 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 500272 bytes +sourcemap-codec 516864 bytes +source-map-0.6.1 1596672 bytes +source-map-0.8.0 517272 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled) +decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled) +decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled) +decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled) +Fastest is decode: @jridgewell/sourcemap-codec + +Encode Memory Usage: +@jridgewell/sourcemap-codec 321026 bytes +sourcemap-codec 830832 bytes +source-map-0.6.1 586608 bytes +source-map-0.8.0 586680 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled) +encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled) +encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled) +encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec + + +*** + + +react.js.map - 5726 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 734848 bytes +sourcemap-codec 954200 bytes +source-map-0.6.1 2276432 bytes +source-map-0.8.0 955488 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled) +decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled) +decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled) +decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled) +Fastest is decode: @jridgewell/sourcemap-codec + +Encode Memory Usage: +@jridgewell/sourcemap-codec 638672 bytes +sourcemap-codec 1109840 bytes +source-map-0.6.1 1321224 bytes +source-map-0.8.0 1324448 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled) +encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled) +encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled) +encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec +``` + +# License + +MIT diff --git a/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs new file mode 100644 index 0000000..3dff372 --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs @@ -0,0 +1,164 @@ +const comma = ','.charCodeAt(0); +const semicolon = ';'.charCodeAt(0); +const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; +const intToChar = new Uint8Array(64); // 64 possible chars. +const charToInt = new Uint8Array(128); // z is 122 in ASCII +for (let i = 0; i < chars.length; i++) { + const c = chars.charCodeAt(i); + intToChar[i] = c; + charToInt[c] = i; +} +// Provide a fallback for older environments. +const td = typeof TextDecoder !== 'undefined' + ? /* #__PURE__ */ new TextDecoder() + : typeof Buffer !== 'undefined' + ? { + decode(buf) { + const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + return out.toString(); + }, + } + : { + decode(buf) { + let out = ''; + for (let i = 0; i < buf.length; i++) { + out += String.fromCharCode(buf[i]); + } + return out; + }, + }; +function decode(mappings) { + const state = new Int32Array(5); + const decoded = []; + let index = 0; + do { + const semi = indexOf(mappings, index); + const line = []; + let sorted = true; + let lastCol = 0; + state[0] = 0; + for (let i = index; i < semi; i++) { + let seg; + i = decodeInteger(mappings, i, state, 0); // genColumn + const col = state[0]; + if (col < lastCol) + sorted = false; + lastCol = col; + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 1); // sourcesIndex + i = decodeInteger(mappings, i, state, 2); // sourceLine + i = decodeInteger(mappings, i, state, 3); // sourceColumn + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 4); // namesIndex + seg = [col, state[1], state[2], state[3], state[4]]; + } + else { + seg = [col, state[1], state[2], state[3]]; + } + } + else { + seg = [col]; + } + line.push(seg); + } + if (!sorted) + sort(line); + decoded.push(line); + index = semi + 1; + } while (index <= mappings.length); + return decoded; +} +function indexOf(mappings, index) { + const idx = mappings.indexOf(';', index); + return idx === -1 ? mappings.length : idx; +} +function decodeInteger(mappings, pos, state, j) { + let value = 0; + let shift = 0; + let integer = 0; + do { + const c = mappings.charCodeAt(pos++); + integer = charToInt[c]; + value |= (integer & 31) << shift; + shift += 5; + } while (integer & 32); + const shouldNegate = value & 1; + value >>>= 1; + if (shouldNegate) { + value = -0x80000000 | -value; + } + state[j] += value; + return pos; +} +function hasMoreVlq(mappings, i, length) { + if (i >= length) + return false; + return mappings.charCodeAt(i) !== comma; +} +function sort(line) { + line.sort(sortComparator); +} +function sortComparator(a, b) { + return a[0] - b[0]; +} +function encode(decoded) { + const state = new Int32Array(5); + const bufLength = 1024 * 16; + const subLength = bufLength - 36; + const buf = new Uint8Array(bufLength); + const sub = buf.subarray(0, subLength); + let pos = 0; + let out = ''; + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + if (i > 0) { + if (pos === bufLength) { + out += td.decode(buf); + pos = 0; + } + buf[pos++] = semicolon; + } + if (line.length === 0) + continue; + state[0] = 0; + for (let j = 0; j < line.length; j++) { + const segment = line[j]; + // We can push up to 5 ints, each int can take at most 7 chars, and we + // may push a comma. + if (pos > subLength) { + out += td.decode(sub); + buf.copyWithin(0, subLength, pos); + pos -= subLength; + } + if (j > 0) + buf[pos++] = comma; + pos = encodeInteger(buf, pos, state, segment, 0); // genColumn + if (segment.length === 1) + continue; + pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex + pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine + pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn + if (segment.length === 4) + continue; + pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex + } + } + return out + td.decode(buf.subarray(0, pos)); +} +function encodeInteger(buf, pos, state, segment, j) { + const next = segment[j]; + let num = next - state[j]; + state[j] = next; + num = num < 0 ? (-num << 1) | 1 : num << 1; + do { + let clamped = num & 0b011111; + num >>>= 5; + if (num > 0) + clamped |= 0b100000; + buf[pos++] = intToChar[clamped]; + } while (num > 0); + return pos; +} + +export { decode, encode }; +//# sourceMappingURL=sourcemap-codec.mjs.map diff --git a/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map new file mode 100644 index 0000000..236fd12 --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"sourcemap-codec.mjs","sources":["../src/sourcemap-codec.ts"],"sourcesContent":["export type SourceMapSegment =\n | [number]\n | [number, number, number, number]\n | [number, number, number, number, number];\nexport type SourceMapLine = SourceMapSegment[];\nexport type SourceMapMappings = SourceMapLine[];\n\nconst comma = ','.charCodeAt(0);\nconst semicolon = ';'.charCodeAt(0);\nconst chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';\nconst intToChar = new Uint8Array(64); // 64 possible chars.\nconst charToInt = new Uint8Array(128); // z is 122 in ASCII\n\nfor (let i = 0; i < chars.length; i++) {\n const c = chars.charCodeAt(i);\n intToChar[i] = c;\n charToInt[c] = i;\n}\n\n// Provide a fallback for older environments.\nconst td =\n typeof TextDecoder !== 'undefined'\n ? /* #__PURE__ */ new TextDecoder()\n : typeof Buffer !== 'undefined'\n ? {\n decode(buf: Uint8Array) {\n const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);\n return out.toString();\n },\n }\n : {\n decode(buf: Uint8Array) {\n let out = '';\n for (let i = 0; i < buf.length; i++) {\n out += String.fromCharCode(buf[i]);\n }\n return out;\n },\n };\n\nexport function decode(mappings: string): SourceMapMappings {\n const state: [number, number, number, number, number] = new Int32Array(5) as any;\n const decoded: SourceMapMappings = [];\n\n let index = 0;\n do {\n const semi = indexOf(mappings, index);\n const line: SourceMapLine = [];\n let sorted = true;\n let lastCol = 0;\n state[0] = 0;\n\n for (let i = index; i < semi; i++) {\n let seg: SourceMapSegment;\n\n i = decodeInteger(mappings, i, state, 0); // genColumn\n const col = state[0];\n if (col < lastCol) sorted = false;\n lastCol = col;\n\n if (hasMoreVlq(mappings, i, semi)) {\n i = decodeInteger(mappings, i, state, 1); // sourcesIndex\n i = decodeInteger(mappings, i, state, 2); // sourceLine\n i = decodeInteger(mappings, i, state, 3); // sourceColumn\n\n if (hasMoreVlq(mappings, i, semi)) {\n i = decodeInteger(mappings, i, state, 4); // namesIndex\n seg = [col, state[1], state[2], state[3], state[4]];\n } else {\n seg = [col, state[1], state[2], state[3]];\n }\n } else {\n seg = [col];\n }\n\n line.push(seg);\n }\n\n if (!sorted) sort(line);\n decoded.push(line);\n index = semi + 1;\n } while (index <= mappings.length);\n\n return decoded;\n}\n\nfunction indexOf(mappings: string, index: number): number {\n const idx = mappings.indexOf(';', index);\n return idx === -1 ? mappings.length : idx;\n}\n\nfunction decodeInteger(mappings: string, pos: number, state: SourceMapSegment, j: number): number {\n let value = 0;\n let shift = 0;\n let integer = 0;\n\n do {\n const c = mappings.charCodeAt(pos++);\n integer = charToInt[c];\n value |= (integer & 31) << shift;\n shift += 5;\n } while (integer & 32);\n\n const shouldNegate = value & 1;\n value >>>= 1;\n\n if (shouldNegate) {\n value = -0x80000000 | -value;\n }\n\n state[j] += value;\n return pos;\n}\n\nfunction hasMoreVlq(mappings: string, i: number, length: number): boolean {\n if (i >= length) return false;\n return mappings.charCodeAt(i) !== comma;\n}\n\nfunction sort(line: SourceMapSegment[]) {\n line.sort(sortComparator);\n}\n\nfunction sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {\n return a[0] - b[0];\n}\n\nexport function encode(decoded: SourceMapMappings): string;\nexport function encode(decoded: Readonly): string;\nexport function encode(decoded: Readonly): string {\n const state: [number, number, number, number, number] = new Int32Array(5) as any;\n const bufLength = 1024 * 16;\n const subLength = bufLength - 36;\n const buf = new Uint8Array(bufLength);\n const sub = buf.subarray(0, subLength);\n let pos = 0;\n let out = '';\n\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n if (i > 0) {\n if (pos === bufLength) {\n out += td.decode(buf);\n pos = 0;\n }\n buf[pos++] = semicolon;\n }\n if (line.length === 0) continue;\n\n state[0] = 0;\n\n for (let j = 0; j < line.length; j++) {\n const segment = line[j];\n // We can push up to 5 ints, each int can take at most 7 chars, and we\n // may push a comma.\n if (pos > subLength) {\n out += td.decode(sub);\n buf.copyWithin(0, subLength, pos);\n pos -= subLength;\n }\n if (j > 0) buf[pos++] = comma;\n\n pos = encodeInteger(buf, pos, state, segment, 0); // genColumn\n\n if (segment.length === 1) continue;\n pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex\n pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine\n pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn\n\n if (segment.length === 4) continue;\n pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex\n }\n }\n\n return out + td.decode(buf.subarray(0, pos));\n}\n\nfunction encodeInteger(\n buf: Uint8Array,\n pos: number,\n state: SourceMapSegment,\n segment: SourceMapSegment,\n j: number,\n): number {\n const next = segment[j];\n let num = next - state[j];\n state[j] = next;\n\n num = num < 0 ? (-num << 1) | 1 : num << 1;\n do {\n let clamped = num & 0b011111;\n num >>>= 5;\n if (num > 0) clamped |= 0b100000;\n buf[pos++] = intToChar[clamped];\n } while (num > 0);\n\n return pos;\n}\n"],"names":[],"mappings":"AAOA,MAAM,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AAChC,MAAM,SAAS,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACpC,MAAM,KAAK,GAAG,kEAAkE,CAAC;AACjF,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;AACrC,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC;AAEtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CAAC,GAAG,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAC9B,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;IACjB,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;CAClB;AAED;AACA,MAAM,EAAE,GACN,OAAO,WAAW,KAAK,WAAW;sBACd,IAAI,WAAW,EAAE;MACjC,OAAO,MAAM,KAAK,WAAW;UAC7B;YACE,MAAM,CAAC,GAAe;gBACpB,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,UAAU,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;gBACpE,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;aACvB;SACF;UACD;YACE,MAAM,CAAC,GAAe;gBACpB,IAAI,GAAG,GAAG,EAAE,CAAC;gBACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;oBACnC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;iBACpC;gBACD,OAAO,GAAG,CAAC;aACZ;SACF,CAAC;SAEQ,MAAM,CAAC,QAAgB;IACrC,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;IACjF,MAAM,OAAO,GAAsB,EAAE,CAAC;IAEtC,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,GAAG;QACD,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QACtC,MAAM,IAAI,GAAkB,EAAE,CAAC;QAC/B,IAAI,MAAM,GAAG,IAAI,CAAC;QAClB,IAAI,OAAO,GAAG,CAAC,CAAC;QAChB,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QAEb,KAAK,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;YACjC,IAAI,GAAqB,CAAC;YAE1B,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;YACzC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACrB,IAAI,GAAG,GAAG,OAAO;gBAAE,MAAM,GAAG,KAAK,CAAC;YAClC,OAAO,GAAG,GAAG,CAAC;YAEd,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;gBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBAEzC,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;oBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBACzC,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;iBACrD;qBAAM;oBACL,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;iBAC3C;aACF;iBAAM;gBACL,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;aACb;YAED,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAChB;QAED,IAAI,CAAC,MAAM;YAAE,IAAI,CAAC,IAAI,CAAC,CAAC;QACxB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACnB,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC;KAClB,QAAQ,KAAK,IAAI,QAAQ,CAAC,MAAM,EAAE;IAEnC,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,OAAO,CAAC,QAAgB,EAAE,KAAa;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IACzC,OAAO,GAAG,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;AAC5C,CAAC;AAED,SAAS,aAAa,CAAC,QAAgB,EAAE,GAAW,EAAE,KAAuB,EAAE,CAAS;IACtF,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,OAAO,GAAG,CAAC,CAAC;IAEhB,GAAG;QACD,MAAM,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,CAAC;QACrC,OAAO,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;QACvB,KAAK,IAAI,CAAC,OAAO,GAAG,EAAE,KAAK,KAAK,CAAC;QACjC,KAAK,IAAI,CAAC,CAAC;KACZ,QAAQ,OAAO,GAAG,EAAE,EAAE;IAEvB,MAAM,YAAY,GAAG,KAAK,GAAG,CAAC,CAAC;IAC/B,KAAK,MAAM,CAAC,CAAC;IAEb,IAAI,YAAY,EAAE;QAChB,KAAK,GAAG,CAAC,UAAU,GAAG,CAAC,KAAK,CAAC;KAC9B;IAED,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;IAClB,OAAO,GAAG,CAAC;AACb,CAAC;AAED,SAAS,UAAU,CAAC,QAAgB,EAAE,CAAS,EAAE,MAAc;IAC7D,IAAI,CAAC,IAAI,MAAM;QAAE,OAAO,KAAK,CAAC;IAC9B,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC;AAC1C,CAAC;AAED,SAAS,IAAI,CAAC,IAAwB;IACpC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,cAAc,CAAC,CAAmB,EAAE,CAAmB;IAC9D,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,CAAC;SAIe,MAAM,CAAC,OAAoC;IACzD,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;IACjF,MAAM,SAAS,GAAG,IAAI,GAAG,EAAE,CAAC;IAC5B,MAAM,SAAS,GAAG,SAAS,GAAG,EAAE,CAAC;IACjC,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAC;IACtC,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;IACvC,IAAI,GAAG,GAAG,CAAC,CAAC;IACZ,IAAI,GAAG,GAAG,EAAE,CAAC;IAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACxB,IAAI,CAAC,GAAG,CAAC,EAAE;YACT,IAAI,GAAG,KAAK,SAAS,EAAE;gBACrB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;gBACtB,GAAG,GAAG,CAAC,CAAC;aACT;YACD,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC;SACxB;QACD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YAAE,SAAS;QAEhC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACpC,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;;;YAGxB,IAAI,GAAG,GAAG,SAAS,EAAE;gBACnB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;gBACtB,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;gBAClC,GAAG,IAAI,SAAS,CAAC;aAClB;YACD,IAAI,CAAC,GAAG,CAAC;gBAAE,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,KAAK,CAAC;YAE9B,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;SAClD;KACF;IAED,OAAO,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC;AAC/C,CAAC;AAED,SAAS,aAAa,CACpB,GAAe,EACf,GAAW,EACX,KAAuB,EACvB,OAAyB,EACzB,CAAS;IAET,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAC1B,KAAK,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IAEhB,GAAG,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC;IAC3C,GAAG;QACD,IAAI,OAAO,GAAG,GAAG,GAAG,QAAQ,CAAC;QAC7B,GAAG,MAAM,CAAC,CAAC;QACX,IAAI,GAAG,GAAG,CAAC;YAAE,OAAO,IAAI,QAAQ,CAAC;QACjC,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;KACjC,QAAQ,GAAG,GAAG,CAAC,EAAE;IAElB,OAAO,GAAG,CAAC;AACb;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js new file mode 100644 index 0000000..bec92a9 --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js @@ -0,0 +1,175 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {})); +})(this, (function (exports) { 'use strict'; + + const comma = ','.charCodeAt(0); + const semicolon = ';'.charCodeAt(0); + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; + const intToChar = new Uint8Array(64); // 64 possible chars. + const charToInt = new Uint8Array(128); // z is 122 in ASCII + for (let i = 0; i < chars.length; i++) { + const c = chars.charCodeAt(i); + intToChar[i] = c; + charToInt[c] = i; + } + // Provide a fallback for older environments. + const td = typeof TextDecoder !== 'undefined' + ? /* #__PURE__ */ new TextDecoder() + : typeof Buffer !== 'undefined' + ? { + decode(buf) { + const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + return out.toString(); + }, + } + : { + decode(buf) { + let out = ''; + for (let i = 0; i < buf.length; i++) { + out += String.fromCharCode(buf[i]); + } + return out; + }, + }; + function decode(mappings) { + const state = new Int32Array(5); + const decoded = []; + let index = 0; + do { + const semi = indexOf(mappings, index); + const line = []; + let sorted = true; + let lastCol = 0; + state[0] = 0; + for (let i = index; i < semi; i++) { + let seg; + i = decodeInteger(mappings, i, state, 0); // genColumn + const col = state[0]; + if (col < lastCol) + sorted = false; + lastCol = col; + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 1); // sourcesIndex + i = decodeInteger(mappings, i, state, 2); // sourceLine + i = decodeInteger(mappings, i, state, 3); // sourceColumn + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 4); // namesIndex + seg = [col, state[1], state[2], state[3], state[4]]; + } + else { + seg = [col, state[1], state[2], state[3]]; + } + } + else { + seg = [col]; + } + line.push(seg); + } + if (!sorted) + sort(line); + decoded.push(line); + index = semi + 1; + } while (index <= mappings.length); + return decoded; + } + function indexOf(mappings, index) { + const idx = mappings.indexOf(';', index); + return idx === -1 ? mappings.length : idx; + } + function decodeInteger(mappings, pos, state, j) { + let value = 0; + let shift = 0; + let integer = 0; + do { + const c = mappings.charCodeAt(pos++); + integer = charToInt[c]; + value |= (integer & 31) << shift; + shift += 5; + } while (integer & 32); + const shouldNegate = value & 1; + value >>>= 1; + if (shouldNegate) { + value = -0x80000000 | -value; + } + state[j] += value; + return pos; + } + function hasMoreVlq(mappings, i, length) { + if (i >= length) + return false; + return mappings.charCodeAt(i) !== comma; + } + function sort(line) { + line.sort(sortComparator); + } + function sortComparator(a, b) { + return a[0] - b[0]; + } + function encode(decoded) { + const state = new Int32Array(5); + const bufLength = 1024 * 16; + const subLength = bufLength - 36; + const buf = new Uint8Array(bufLength); + const sub = buf.subarray(0, subLength); + let pos = 0; + let out = ''; + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + if (i > 0) { + if (pos === bufLength) { + out += td.decode(buf); + pos = 0; + } + buf[pos++] = semicolon; + } + if (line.length === 0) + continue; + state[0] = 0; + for (let j = 0; j < line.length; j++) { + const segment = line[j]; + // We can push up to 5 ints, each int can take at most 7 chars, and we + // may push a comma. + if (pos > subLength) { + out += td.decode(sub); + buf.copyWithin(0, subLength, pos); + pos -= subLength; + } + if (j > 0) + buf[pos++] = comma; + pos = encodeInteger(buf, pos, state, segment, 0); // genColumn + if (segment.length === 1) + continue; + pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex + pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine + pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn + if (segment.length === 4) + continue; + pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex + } + } + return out + td.decode(buf.subarray(0, pos)); + } + function encodeInteger(buf, pos, state, segment, j) { + const next = segment[j]; + let num = next - state[j]; + state[j] = next; + num = num < 0 ? (-num << 1) | 1 : num << 1; + do { + let clamped = num & 0b011111; + num >>>= 5; + if (num > 0) + clamped |= 0b100000; + buf[pos++] = intToChar[clamped]; + } while (num > 0); + return pos; + } + + exports.decode = decode; + exports.encode = encode; + + Object.defineProperty(exports, '__esModule', { value: true }); + +})); +//# sourceMappingURL=sourcemap-codec.umd.js.map diff --git a/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map new file mode 100644 index 0000000..b6b2003 --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sourcemap-codec.umd.js","sources":["../src/sourcemap-codec.ts"],"sourcesContent":["export type SourceMapSegment =\n | [number]\n | [number, number, number, number]\n | [number, number, number, number, number];\nexport type SourceMapLine = SourceMapSegment[];\nexport type SourceMapMappings = SourceMapLine[];\n\nconst comma = ','.charCodeAt(0);\nconst semicolon = ';'.charCodeAt(0);\nconst chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';\nconst intToChar = new Uint8Array(64); // 64 possible chars.\nconst charToInt = new Uint8Array(128); // z is 122 in ASCII\n\nfor (let i = 0; i < chars.length; i++) {\n const c = chars.charCodeAt(i);\n intToChar[i] = c;\n charToInt[c] = i;\n}\n\n// Provide a fallback for older environments.\nconst td =\n typeof TextDecoder !== 'undefined'\n ? /* #__PURE__ */ new TextDecoder()\n : typeof Buffer !== 'undefined'\n ? {\n decode(buf: Uint8Array) {\n const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);\n return out.toString();\n },\n }\n : {\n decode(buf: Uint8Array) {\n let out = '';\n for (let i = 0; i < buf.length; i++) {\n out += String.fromCharCode(buf[i]);\n }\n return out;\n },\n };\n\nexport function decode(mappings: string): SourceMapMappings {\n const state: [number, number, number, number, number] = new Int32Array(5) as any;\n const decoded: SourceMapMappings = [];\n\n let index = 0;\n do {\n const semi = indexOf(mappings, index);\n const line: SourceMapLine = [];\n let sorted = true;\n let lastCol = 0;\n state[0] = 0;\n\n for (let i = index; i < semi; i++) {\n let seg: SourceMapSegment;\n\n i = decodeInteger(mappings, i, state, 0); // genColumn\n const col = state[0];\n if (col < lastCol) sorted = false;\n lastCol = col;\n\n if (hasMoreVlq(mappings, i, semi)) {\n i = decodeInteger(mappings, i, state, 1); // sourcesIndex\n i = decodeInteger(mappings, i, state, 2); // sourceLine\n i = decodeInteger(mappings, i, state, 3); // sourceColumn\n\n if (hasMoreVlq(mappings, i, semi)) {\n i = decodeInteger(mappings, i, state, 4); // namesIndex\n seg = [col, state[1], state[2], state[3], state[4]];\n } else {\n seg = [col, state[1], state[2], state[3]];\n }\n } else {\n seg = [col];\n }\n\n line.push(seg);\n }\n\n if (!sorted) sort(line);\n decoded.push(line);\n index = semi + 1;\n } while (index <= mappings.length);\n\n return decoded;\n}\n\nfunction indexOf(mappings: string, index: number): number {\n const idx = mappings.indexOf(';', index);\n return idx === -1 ? mappings.length : idx;\n}\n\nfunction decodeInteger(mappings: string, pos: number, state: SourceMapSegment, j: number): number {\n let value = 0;\n let shift = 0;\n let integer = 0;\n\n do {\n const c = mappings.charCodeAt(pos++);\n integer = charToInt[c];\n value |= (integer & 31) << shift;\n shift += 5;\n } while (integer & 32);\n\n const shouldNegate = value & 1;\n value >>>= 1;\n\n if (shouldNegate) {\n value = -0x80000000 | -value;\n }\n\n state[j] += value;\n return pos;\n}\n\nfunction hasMoreVlq(mappings: string, i: number, length: number): boolean {\n if (i >= length) return false;\n return mappings.charCodeAt(i) !== comma;\n}\n\nfunction sort(line: SourceMapSegment[]) {\n line.sort(sortComparator);\n}\n\nfunction sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {\n return a[0] - b[0];\n}\n\nexport function encode(decoded: SourceMapMappings): string;\nexport function encode(decoded: Readonly): string;\nexport function encode(decoded: Readonly): string {\n const state: [number, number, number, number, number] = new Int32Array(5) as any;\n const bufLength = 1024 * 16;\n const subLength = bufLength - 36;\n const buf = new Uint8Array(bufLength);\n const sub = buf.subarray(0, subLength);\n let pos = 0;\n let out = '';\n\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n if (i > 0) {\n if (pos === bufLength) {\n out += td.decode(buf);\n pos = 0;\n }\n buf[pos++] = semicolon;\n }\n if (line.length === 0) continue;\n\n state[0] = 0;\n\n for (let j = 0; j < line.length; j++) {\n const segment = line[j];\n // We can push up to 5 ints, each int can take at most 7 chars, and we\n // may push a comma.\n if (pos > subLength) {\n out += td.decode(sub);\n buf.copyWithin(0, subLength, pos);\n pos -= subLength;\n }\n if (j > 0) buf[pos++] = comma;\n\n pos = encodeInteger(buf, pos, state, segment, 0); // genColumn\n\n if (segment.length === 1) continue;\n pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex\n pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine\n pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn\n\n if (segment.length === 4) continue;\n pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex\n }\n }\n\n return out + td.decode(buf.subarray(0, pos));\n}\n\nfunction encodeInteger(\n buf: Uint8Array,\n pos: number,\n state: SourceMapSegment,\n segment: SourceMapSegment,\n j: number,\n): number {\n const next = segment[j];\n let num = next - state[j];\n state[j] = next;\n\n num = num < 0 ? (-num << 1) | 1 : num << 1;\n do {\n let clamped = num & 0b011111;\n num >>>= 5;\n if (num > 0) clamped |= 0b100000;\n buf[pos++] = intToChar[clamped];\n } while (num > 0);\n\n return pos;\n}\n"],"names":[],"mappings":";;;;;;IAOA,MAAM,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChC,MAAM,SAAS,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACpC,MAAM,KAAK,GAAG,kEAAkE,CAAC;IACjF,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;IACrC,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC;IAEtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,MAAM,CAAC,GAAG,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAC9B,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;KAClB;IAED;IACA,MAAM,EAAE,GACN,OAAO,WAAW,KAAK,WAAW;0BACd,IAAI,WAAW,EAAE;UACjC,OAAO,MAAM,KAAK,WAAW;cAC7B;gBACE,MAAM,CAAC,GAAe;oBACpB,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,UAAU,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;oBACpE,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;iBACvB;aACF;cACD;gBACE,MAAM,CAAC,GAAe;oBACpB,IAAI,GAAG,GAAG,EAAE,CAAC;oBACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACnC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;qBACpC;oBACD,OAAO,GAAG,CAAC;iBACZ;aACF,CAAC;aAEQ,MAAM,CAAC,QAAgB;QACrC,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;QACjF,MAAM,OAAO,GAAsB,EAAE,CAAC;QAEtC,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,GAAG;YACD,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YACtC,MAAM,IAAI,GAAkB,EAAE,CAAC;YAC/B,IAAI,MAAM,GAAG,IAAI,CAAC;YAClB,IAAI,OAAO,GAAG,CAAC,CAAC;YAChB,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YAEb,KAAK,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;gBACjC,IAAI,GAAqB,CAAC;gBAE1B,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBACzC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;gBACrB,IAAI,GAAG,GAAG,OAAO;oBAAE,MAAM,GAAG,KAAK,CAAC;gBAClC,OAAO,GAAG,GAAG,CAAC;gBAEd,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;oBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBAEzC,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;wBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;wBACzC,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;qBACrD;yBAAM;wBACL,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;qBAC3C;iBACF;qBAAM;oBACL,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;iBACb;gBAED,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAChB;YAED,IAAI,CAAC,MAAM;gBAAE,IAAI,CAAC,IAAI,CAAC,CAAC;YACxB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACnB,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC;SAClB,QAAQ,KAAK,IAAI,QAAQ,CAAC,MAAM,EAAE;QAEnC,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,SAAS,OAAO,CAAC,QAAgB,EAAE,KAAa;QAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACzC,OAAO,GAAG,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;IAC5C,CAAC;IAED,SAAS,aAAa,CAAC,QAAgB,EAAE,GAAW,EAAE,KAAuB,EAAE,CAAS;QACtF,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,CAAC,CAAC;QAEhB,GAAG;YACD,MAAM,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,CAAC;YACrC,OAAO,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;YACvB,KAAK,IAAI,CAAC,OAAO,GAAG,EAAE,KAAK,KAAK,CAAC;YACjC,KAAK,IAAI,CAAC,CAAC;SACZ,QAAQ,OAAO,GAAG,EAAE,EAAE;QAEvB,MAAM,YAAY,GAAG,KAAK,GAAG,CAAC,CAAC;QAC/B,KAAK,MAAM,CAAC,CAAC;QAEb,IAAI,YAAY,EAAE;YAChB,KAAK,GAAG,CAAC,UAAU,GAAG,CAAC,KAAK,CAAC;SAC9B;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;QAClB,OAAO,GAAG,CAAC;IACb,CAAC;IAED,SAAS,UAAU,CAAC,QAAgB,EAAE,CAAS,EAAE,MAAc;QAC7D,IAAI,CAAC,IAAI,MAAM;YAAE,OAAO,KAAK,CAAC;QAC9B,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC;IAC1C,CAAC;IAED,SAAS,IAAI,CAAC,IAAwB;QACpC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,cAAc,CAAC,CAAmB,EAAE,CAAmB;QAC9D,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACrB,CAAC;aAIe,MAAM,CAAC,OAAoC;QACzD,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;QACjF,MAAM,SAAS,GAAG,IAAI,GAAG,EAAE,CAAC;QAC5B,MAAM,SAAS,GAAG,SAAS,GAAG,EAAE,CAAC;QACjC,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAC;QACtC,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;QACvC,IAAI,GAAG,GAAG,CAAC,CAAC;QACZ,IAAI,GAAG,GAAG,EAAE,CAAC;QAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YACxB,IAAI,CAAC,GAAG,CAAC,EAAE;gBACT,IAAI,GAAG,KAAK,SAAS,EAAE;oBACrB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;oBACtB,GAAG,GAAG,CAAC,CAAC;iBACT;gBACD,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC;aACxB;YACD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YAEhC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACpC,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;;;gBAGxB,IAAI,GAAG,GAAG,SAAS,EAAE;oBACnB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;oBACtB,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;oBAClC,GAAG,IAAI,SAAS,CAAC;iBAClB;gBACD,IAAI,CAAC,GAAG,CAAC;oBAAE,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,KAAK,CAAC;gBAE9B,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;aAClD;SACF;QAED,OAAO,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,SAAS,aAAa,CACpB,GAAe,EACf,GAAW,EACX,KAAuB,EACvB,OAAyB,EACzB,CAAS;QAET,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACxB,IAAI,GAAG,GAAG,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAC1B,KAAK,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;QAEhB,GAAG,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC;QAC3C,GAAG;YACD,IAAI,OAAO,GAAG,GAAG,GAAG,QAAQ,CAAC;YAC7B,GAAG,MAAM,CAAC,CAAC;YACX,IAAI,GAAG,GAAG,CAAC;gBAAE,OAAO,IAAI,QAAQ,CAAC;YACjC,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;SACjC,QAAQ,GAAG,GAAG,CAAC,EAAE;QAElB,OAAO,GAAG,CAAC;IACb;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts b/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts new file mode 100644 index 0000000..410d320 --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts @@ -0,0 +1,6 @@ +export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number]; +export declare type SourceMapLine = SourceMapSegment[]; +export declare type SourceMapMappings = SourceMapLine[]; +export declare function decode(mappings: string): SourceMapMappings; +export declare function encode(decoded: SourceMapMappings): string; +export declare function encode(decoded: Readonly): string; diff --git a/node_modules/@jridgewell/sourcemap-codec/package.json b/node_modules/@jridgewell/sourcemap-codec/package.json new file mode 100644 index 0000000..578448f --- /dev/null +++ b/node_modules/@jridgewell/sourcemap-codec/package.json @@ -0,0 +1,74 @@ +{ + "name": "@jridgewell/sourcemap-codec", + "version": "1.4.15", + "description": "Encode/decode sourcemap mappings", + "keywords": [ + "sourcemap", + "vlq" + ], + "main": "dist/sourcemap-codec.umd.js", + "module": "dist/sourcemap-codec.mjs", + "types": "dist/types/sourcemap-codec.d.ts", + "files": [ + "dist" + ], + "exports": { + ".": [ + { + "types": "./dist/types/sourcemap-codec.d.ts", + "browser": "./dist/sourcemap-codec.umd.js", + "require": "./dist/sourcemap-codec.umd.js", + "import": "./dist/sourcemap-codec.mjs" + }, + "./dist/sourcemap-codec.umd.js" + ], + "./package.json": "./package.json" + }, + "scripts": { + "benchmark": "run-s build:rollup benchmark:*", + "benchmark:install": "cd benchmark && npm install", + "benchmark:only": "node --expose-gc benchmark/index.js", + "build": "run-s -n build:*", + "build:rollup": "rollup -c rollup.config.js", + "build:ts": "tsc --project tsconfig.build.json", + "lint": "run-s -n lint:*", + "lint:prettier": "npm run test:lint:prettier -- --write", + "lint:ts": "npm run test:lint:ts -- --fix", + "prebuild": "rm -rf dist", + "prepublishOnly": "npm run preversion", + "preversion": "run-s test build", + "pretest": "run-s build:rollup", + "test": "run-s -n test:lint test:only", + "test:debug": "mocha --inspect-brk", + "test:lint": "run-s -n test:lint:*", + "test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", + "test:lint:ts": "eslint '{src,test}/**/*.ts'", + "test:only": "mocha", + "test:coverage": "c8 mocha", + "test:watch": "mocha --watch" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/jridgewell/sourcemap-codec.git" + }, + "author": "Rich Harris", + "license": "MIT", + "devDependencies": { + "@rollup/plugin-typescript": "8.3.0", + "@types/node": "17.0.15", + "@typescript-eslint/eslint-plugin": "5.10.0", + "@typescript-eslint/parser": "5.10.0", + "benchmark": "2.1.4", + "c8": "7.11.2", + "eslint": "8.7.0", + "eslint-config-prettier": "8.3.0", + "mocha": "9.2.0", + "npm-run-all": "4.1.5", + "prettier": "2.5.1", + "rollup": "2.64.0", + "source-map": "0.6.1", + "source-map-js": "1.0.2", + "sourcemap-codec": "1.4.8", + "typescript": "4.5.4" + } +} diff --git a/node_modules/@jridgewell/trace-mapping/LICENSE b/node_modules/@jridgewell/trace-mapping/LICENSE new file mode 100644 index 0000000..37bb488 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/LICENSE @@ -0,0 +1,19 @@ +Copyright 2022 Justin Ridgewell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@jridgewell/trace-mapping/README.md b/node_modules/@jridgewell/trace-mapping/README.md new file mode 100644 index 0000000..cc5e4f9 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/README.md @@ -0,0 +1,252 @@ +# @jridgewell/trace-mapping + +> Trace the original position through a source map + +`trace-mapping` allows you to take the line and column of an output file and trace it to the +original location in the source file through a source map. + +You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This +provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM. + +## Installation + +```sh +npm install @jridgewell/trace-mapping +``` + +## Usage + +```typescript +import { + TraceMap, + originalPositionFor, + generatedPositionFor, + sourceContentFor, +} from '@jridgewell/trace-mapping'; + +const tracer = new TraceMap({ + version: 3, + sources: ['input.js'], + sourcesContent: ['content of input.js'], + names: ['foo'], + mappings: 'KAyCIA', +}); + +// Lines start at line 1, columns at column 0. +const traced = originalPositionFor(tracer, { line: 1, column: 5 }); +assert.deepEqual(traced, { + source: 'input.js', + line: 42, + column: 4, + name: 'foo', +}); + +const content = sourceContentFor(tracer, traced.source); +assert.strictEqual(content, 'content for input.js'); + +const generated = generatedPositionFor(tracer, { + source: 'input.js', + line: 42, + column: 4, +}); +assert.deepEqual(generated, { + line: 1, + column: 5, +}); +``` + +We also provide a lower level API to get the actual segment that matches our line and column. Unlike +`originalPositionFor`, `traceSegment` uses a 0-base for `line`: + +```typescript +import { traceSegment } from '@jridgewell/trace-mapping'; + +// line is 0-base. +const traced = traceSegment(tracer, /* line */ 0, /* column */ 5); + +// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] +// Again, line is 0-base and so is sourceLine +assert.deepEqual(traced, [5, 0, 41, 4, 0]); +``` + +### SectionedSourceMaps + +The sourcemap spec defines a special `sections` field that's designed to handle concatenation of +output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool +produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap` +helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a +`TraceMap` instance: + +```typescript +import { AnyMap } from '@jridgewell/trace-mapping'; +const fooOutput = 'foo'; +const barOutput = 'bar'; +const output = [fooOutput, barOutput].join('\n'); + +const sectioned = new AnyMap({ + version: 3, + sections: [ + { + // 0-base line and column + offset: { line: 0, column: 0 }, + // fooOutput's sourcemap + map: { + version: 3, + sources: ['foo.js'], + names: ['foo'], + mappings: 'AAAAA', + }, + }, + { + // barOutput's sourcemap will not affect the first line, only the second + offset: { line: 1, column: 0 }, + map: { + version: 3, + sources: ['bar.js'], + names: ['bar'], + mappings: 'AAAAA', + }, + }, + ], +}); + +const traced = originalPositionFor(sectioned, { + line: 2, + column: 0, +}); + +assert.deepEqual(traced, { + source: 'bar.js', + line: 1, + column: 0, + name: 'bar', +}); +``` + +## Benchmarks + +``` +node v18.0.0 + +amp.js.map - 45120 segments + +Memory Usage: +trace-mapping decoded 562400 bytes +trace-mapping encoded 5706544 bytes +source-map-js 10717664 bytes +source-map-0.6.1 17446384 bytes +source-map-0.8.0 9701757 bytes +Smallest memory usage is trace-mapping decoded + +Init speed: +trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled) +trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled) +trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled) +trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled) +source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled) +source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled) +Fastest is trace-mapping: decoded Object input + +Trace speed: +trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled) +trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled) +source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled) +source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled) +source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled) +Fastest is trace-mapping: decoded originalPositionFor + + +*** + + +babel.min.js.map - 347793 segments + +Memory Usage: +trace-mapping decoded 89832 bytes +trace-mapping encoded 35474640 bytes +source-map-js 51257176 bytes +source-map-0.6.1 63515664 bytes +source-map-0.8.0 42933752 bytes +Smallest memory usage is trace-mapping decoded + +Init speed: +trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled) +trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled) +trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled) +trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled) +source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled) +source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled) +Fastest is trace-mapping: decoded Object input + +Trace speed: +trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled) +trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled) +source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled) +source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled) +source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled) +Fastest is trace-mapping: decoded originalPositionFor + + +*** + + +preact.js.map - 1992 segments + +Memory Usage: +trace-mapping decoded 37128 bytes +trace-mapping encoded 247280 bytes +source-map-js 1143536 bytes +source-map-0.6.1 1290992 bytes +source-map-0.8.0 96544 bytes +Smallest memory usage is trace-mapping decoded + +Init speed: +trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled) +trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled) +trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled) +trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled) +source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled) +source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled) +Fastest is trace-mapping: decoded Object input + +Trace speed: +trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled) +trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled) +source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled) +source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled) +source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled) +Fastest is trace-mapping: decoded originalPositionFor + + +*** + + +react.js.map - 5726 segments + +Memory Usage: +trace-mapping decoded 16176 bytes +trace-mapping encoded 681552 bytes +source-map-js 2418352 bytes +source-map-0.6.1 2443672 bytes +source-map-0.8.0 111768 bytes +Smallest memory usage is trace-mapping decoded + +Init speed: +trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled) +trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled) +trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled) +trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled) +source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled) +source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled) +Fastest is trace-mapping: decoded Object input + +Trace speed: +trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled) +trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled) +source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled) +source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled) +source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled) +Fastest is trace-mapping: decoded originalPositionFor +``` + +[source-map]: https://www.npmjs.com/package/source-map diff --git a/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs new file mode 100644 index 0000000..917a330 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs @@ -0,0 +1,552 @@ +import { encode, decode } from '@jridgewell/sourcemap-codec'; +import resolveUri from '@jridgewell/resolve-uri'; + +function resolve(input, base) { + // The base is always treated as a directory, if it's not empty. + // https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327 + // https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401 + if (base && !base.endsWith('/')) + base += '/'; + return resolveUri(input, base); +} + +/** + * Removes everything after the last "/", but leaves the slash. + */ +function stripFilename(path) { + if (!path) + return ''; + const index = path.lastIndexOf('/'); + return path.slice(0, index + 1); +} + +const COLUMN = 0; +const SOURCES_INDEX = 1; +const SOURCE_LINE = 2; +const SOURCE_COLUMN = 3; +const NAMES_INDEX = 4; +const REV_GENERATED_LINE = 1; +const REV_GENERATED_COLUMN = 2; + +function maybeSort(mappings, owned) { + const unsortedIndex = nextUnsortedSegmentLine(mappings, 0); + if (unsortedIndex === mappings.length) + return mappings; + // If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If + // not, we do not want to modify the consumer's input array. + if (!owned) + mappings = mappings.slice(); + for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) { + mappings[i] = sortSegments(mappings[i], owned); + } + return mappings; +} +function nextUnsortedSegmentLine(mappings, start) { + for (let i = start; i < mappings.length; i++) { + if (!isSorted(mappings[i])) + return i; + } + return mappings.length; +} +function isSorted(line) { + for (let j = 1; j < line.length; j++) { + if (line[j][COLUMN] < line[j - 1][COLUMN]) { + return false; + } + } + return true; +} +function sortSegments(line, owned) { + if (!owned) + line = line.slice(); + return line.sort(sortComparator); +} +function sortComparator(a, b) { + return a[COLUMN] - b[COLUMN]; +} + +let found = false; +/** + * A binary search implementation that returns the index if a match is found. + * If no match is found, then the left-index (the index associated with the item that comes just + * before the desired index) is returned. To maintain proper sort order, a splice would happen at + * the next index: + * + * ```js + * const array = [1, 3]; + * const needle = 2; + * const index = binarySearch(array, needle, (item, needle) => item - needle); + * + * assert.equal(index, 0); + * array.splice(index + 1, 0, needle); + * assert.deepEqual(array, [1, 2, 3]); + * ``` + */ +function binarySearch(haystack, needle, low, high) { + while (low <= high) { + const mid = low + ((high - low) >> 1); + const cmp = haystack[mid][COLUMN] - needle; + if (cmp === 0) { + found = true; + return mid; + } + if (cmp < 0) { + low = mid + 1; + } + else { + high = mid - 1; + } + } + found = false; + return low - 1; +} +function upperBound(haystack, needle, index) { + for (let i = index + 1; i < haystack.length; index = i++) { + if (haystack[i][COLUMN] !== needle) + break; + } + return index; +} +function lowerBound(haystack, needle, index) { + for (let i = index - 1; i >= 0; index = i--) { + if (haystack[i][COLUMN] !== needle) + break; + } + return index; +} +function memoizedState() { + return { + lastKey: -1, + lastNeedle: -1, + lastIndex: -1, + }; +} +/** + * This overly complicated beast is just to record the last tested line/column and the resulting + * index, allowing us to skip a few tests if mappings are monotonically increasing. + */ +function memoizedBinarySearch(haystack, needle, state, key) { + const { lastKey, lastNeedle, lastIndex } = state; + let low = 0; + let high = haystack.length - 1; + if (key === lastKey) { + if (needle === lastNeedle) { + found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle; + return lastIndex; + } + if (needle >= lastNeedle) { + // lastIndex may be -1 if the previous needle was not found. + low = lastIndex === -1 ? 0 : lastIndex; + } + else { + high = lastIndex; + } + } + state.lastKey = key; + state.lastNeedle = needle; + return (state.lastIndex = binarySearch(haystack, needle, low, high)); +} + +// Rebuilds the original source files, with mappings that are ordered by source line/column instead +// of generated line/column. +function buildBySources(decoded, memos) { + const sources = memos.map(buildNullArray); + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + if (seg.length === 1) + continue; + const sourceIndex = seg[SOURCES_INDEX]; + const sourceLine = seg[SOURCE_LINE]; + const sourceColumn = seg[SOURCE_COLUMN]; + const originalSource = sources[sourceIndex]; + const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = [])); + const memo = memos[sourceIndex]; + // The binary search either found a match, or it found the left-index just before where the + // segment should go. Either way, we want to insert after that. And there may be multiple + // generated segments associated with an original location, so there may need to move several + // indexes before we find where we need to insert. + const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine)); + insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]); + } + } + return sources; +} +function insert(array, index, value) { + for (let i = array.length; i > index; i--) { + array[i] = array[i - 1]; + } + array[index] = value; +} +// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like +// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations. +// Numeric properties on objects are magically sorted in ascending order by the engine regardless of +// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending +// order when iterating with for-in. +function buildNullArray() { + return { __proto__: null }; +} + +const AnyMap = function (map, mapUrl) { + const parsed = typeof map === 'string' ? JSON.parse(map) : map; + if (!('sections' in parsed)) + return new TraceMap(parsed, mapUrl); + const mappings = []; + const sources = []; + const sourcesContent = []; + const names = []; + recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity); + const joined = { + version: 3, + file: parsed.file, + names, + sources, + sourcesContent, + mappings, + }; + return presortedDecodedMap(joined); +}; +function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { + const { sections } = input; + for (let i = 0; i < sections.length; i++) { + const { map, offset } = sections[i]; + let sl = stopLine; + let sc = stopColumn; + if (i + 1 < sections.length) { + const nextOffset = sections[i + 1].offset; + sl = Math.min(stopLine, lineOffset + nextOffset.line); + if (sl === stopLine) { + sc = Math.min(stopColumn, columnOffset + nextOffset.column); + } + else if (sl < stopLine) { + sc = columnOffset + nextOffset.column; + } + } + addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc); + } +} +function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { + if ('sections' in input) + return recurse(...arguments); + const map = new TraceMap(input, mapUrl); + const sourcesOffset = sources.length; + const namesOffset = names.length; + const decoded = decodedMappings(map); + const { resolvedSources, sourcesContent: contents } = map; + append(sources, resolvedSources); + append(names, map.names); + if (contents) + append(sourcesContent, contents); + else + for (let i = 0; i < resolvedSources.length; i++) + sourcesContent.push(null); + for (let i = 0; i < decoded.length; i++) { + const lineI = lineOffset + i; + // We can only add so many lines before we step into the range that the next section's map + // controls. When we get to the last line, then we'll start checking the segments to see if + // they've crossed into the column range. But it may not have any columns that overstep, so we + // still need to check that we don't overstep lines, too. + if (lineI > stopLine) + return; + // The out line may already exist in mappings (if we're continuing the line started by a + // previous section). Or, we may have jumped ahead several lines to start this section. + const out = getLine(mappings, lineI); + // On the 0th loop, the section's column offset shifts us forward. On all other lines (since the + // map can be multiple lines), it doesn't. + const cOffset = i === 0 ? columnOffset : 0; + const line = decoded[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + const column = cOffset + seg[COLUMN]; + // If this segment steps into the column range that the next section's map controls, we need + // to stop early. + if (lineI === stopLine && column >= stopColumn) + return; + if (seg.length === 1) { + out.push([column]); + continue; + } + const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX]; + const sourceLine = seg[SOURCE_LINE]; + const sourceColumn = seg[SOURCE_COLUMN]; + out.push(seg.length === 4 + ? [column, sourcesIndex, sourceLine, sourceColumn] + : [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]); + } + } +} +function append(arr, other) { + for (let i = 0; i < other.length; i++) + arr.push(other[i]); +} +function getLine(arr, index) { + for (let i = arr.length; i <= index; i++) + arr[i] = []; + return arr[index]; +} + +const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)'; +const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)'; +const LEAST_UPPER_BOUND = -1; +const GREATEST_LOWER_BOUND = 1; +/** + * Returns the encoded (VLQ string) form of the SourceMap's mappings field. + */ +let encodedMappings; +/** + * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. + */ +let decodedMappings; +/** + * A low-level API to find the segment associated with a generated line/column (think, from a + * stack trace). Line and column here are 0-based, unlike `originalPositionFor`. + */ +let traceSegment; +/** + * A higher-level API to find the source/line/column associated with a generated line/column + * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in + * `source-map` library. + */ +let originalPositionFor; +/** + * Finds the generated line/column position of the provided source/line/column source position. + */ +let generatedPositionFor; +/** + * Finds all generated line/column positions of the provided source/line/column source position. + */ +let allGeneratedPositionsFor; +/** + * Iterates each mapping in generated position order. + */ +let eachMapping; +/** + * Retrieves the source content for a particular source, if its found. Returns null if not. + */ +let sourceContentFor; +/** + * A helper that skips sorting of the input map's mappings array, which can be expensive for larger + * maps. + */ +let presortedDecodedMap; +/** + * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +let decodedMap; +/** + * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +let encodedMap; +class TraceMap { + constructor(map, mapUrl) { + const isString = typeof map === 'string'; + if (!isString && map._decodedMemo) + return map; + const parsed = (isString ? JSON.parse(map) : map); + const { version, file, names, sourceRoot, sources, sourcesContent } = parsed; + this.version = version; + this.file = file; + this.names = names; + this.sourceRoot = sourceRoot; + this.sources = sources; + this.sourcesContent = sourcesContent; + const from = resolve(sourceRoot || '', stripFilename(mapUrl)); + this.resolvedSources = sources.map((s) => resolve(s || '', from)); + const { mappings } = parsed; + if (typeof mappings === 'string') { + this._encoded = mappings; + this._decoded = undefined; + } + else { + this._encoded = undefined; + this._decoded = maybeSort(mappings, isString); + } + this._decodedMemo = memoizedState(); + this._bySources = undefined; + this._bySourceMemos = undefined; + } +} +(() => { + encodedMappings = (map) => { + var _a; + return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded))); + }; + decodedMappings = (map) => { + return (map._decoded || (map._decoded = decode(map._encoded))); + }; + traceSegment = (map, line, column) => { + const decoded = decodedMappings(map); + // It's common for parent source maps to have pointers to lines that have no + // mapping (like a "//# sourceMappingURL=") at the end of the child file. + if (line >= decoded.length) + return null; + const segments = decoded[line]; + const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND); + return index === -1 ? null : segments[index]; + }; + originalPositionFor = (map, { line, column, bias }) => { + line--; + if (line < 0) + throw new Error(LINE_GTR_ZERO); + if (column < 0) + throw new Error(COL_GTR_EQ_ZERO); + const decoded = decodedMappings(map); + // It's common for parent source maps to have pointers to lines that have no + // mapping (like a "//# sourceMappingURL=") at the end of the child file. + if (line >= decoded.length) + return OMapping(null, null, null, null); + const segments = decoded[line]; + const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND); + if (index === -1) + return OMapping(null, null, null, null); + const segment = segments[index]; + if (segment.length === 1) + return OMapping(null, null, null, null); + const { names, resolvedSources } = map; + return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null); + }; + allGeneratedPositionsFor = (map, { source, line, column, bias }) => { + // SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit. + return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true); + }; + generatedPositionFor = (map, { source, line, column, bias }) => { + return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false); + }; + eachMapping = (map, cb) => { + const decoded = decodedMappings(map); + const { names, resolvedSources } = map; + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + const generatedLine = i + 1; + const generatedColumn = seg[0]; + let source = null; + let originalLine = null; + let originalColumn = null; + let name = null; + if (seg.length !== 1) { + source = resolvedSources[seg[1]]; + originalLine = seg[2] + 1; + originalColumn = seg[3]; + } + if (seg.length === 5) + name = names[seg[4]]; + cb({ + generatedLine, + generatedColumn, + source, + originalLine, + originalColumn, + name, + }); + } + } + }; + sourceContentFor = (map, source) => { + const { sources, resolvedSources, sourcesContent } = map; + if (sourcesContent == null) + return null; + let index = sources.indexOf(source); + if (index === -1) + index = resolvedSources.indexOf(source); + return index === -1 ? null : sourcesContent[index]; + }; + presortedDecodedMap = (map, mapUrl) => { + const tracer = new TraceMap(clone(map, []), mapUrl); + tracer._decoded = map.mappings; + return tracer; + }; + decodedMap = (map) => { + return clone(map, decodedMappings(map)); + }; + encodedMap = (map) => { + return clone(map, encodedMappings(map)); + }; + function generatedPosition(map, source, line, column, bias, all) { + line--; + if (line < 0) + throw new Error(LINE_GTR_ZERO); + if (column < 0) + throw new Error(COL_GTR_EQ_ZERO); + const { sources, resolvedSources } = map; + let sourceIndex = sources.indexOf(source); + if (sourceIndex === -1) + sourceIndex = resolvedSources.indexOf(source); + if (sourceIndex === -1) + return all ? [] : GMapping(null, null); + const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState))))); + const segments = generated[sourceIndex][line]; + if (segments == null) + return all ? [] : GMapping(null, null); + const memo = map._bySourceMemos[sourceIndex]; + if (all) + return sliceGeneratedPositions(segments, memo, line, column, bias); + const index = traceSegmentInternal(segments, memo, line, column, bias); + if (index === -1) + return GMapping(null, null); + const segment = segments[index]; + return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]); + } +})(); +function clone(map, mappings) { + return { + version: map.version, + file: map.file, + names: map.names, + sourceRoot: map.sourceRoot, + sources: map.sources, + sourcesContent: map.sourcesContent, + mappings, + }; +} +function OMapping(source, line, column, name) { + return { source, line, column, name }; +} +function GMapping(line, column) { + return { line, column }; +} +function traceSegmentInternal(segments, memo, line, column, bias) { + let index = memoizedBinarySearch(segments, column, memo, line); + if (found) { + index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index); + } + else if (bias === LEAST_UPPER_BOUND) + index++; + if (index === -1 || index === segments.length) + return -1; + return index; +} +function sliceGeneratedPositions(segments, memo, line, column, bias) { + let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND); + // We ignored the bias when tracing the segment so that we're guarnateed to find the first (in + // insertion order) segment that matched. Even if we did respect the bias when tracing, we would + // still need to call `lowerBound()` to find the first segment, which is slower than just looking + // for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the + // binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to + // match LEAST_UPPER_BOUND. + if (!found && bias === LEAST_UPPER_BOUND) + min++; + if (min === -1 || min === segments.length) + return []; + // We may have found the segment that started at an earlier column. If this is the case, then we + // need to slice all generated segments that match _that_ column, because all such segments span + // to our desired column. + const matchedColumn = found ? column : segments[min][COLUMN]; + // The binary search is not guaranteed to find the lower bound when a match wasn't found. + if (!found) + min = lowerBound(segments, matchedColumn, min); + const max = upperBound(segments, matchedColumn, min); + const result = []; + for (; min <= max; min++) { + const segment = segments[min]; + result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN])); + } + return result; +} + +export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment }; +//# sourceMappingURL=trace-mapping.mjs.map diff --git a/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map new file mode 100644 index 0000000..4d40aa1 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"trace-mapping.mjs","sources":["../src/resolve.ts","../src/strip-filename.ts","../src/sourcemap-segment.ts","../src/sort.ts","../src/binary-search.ts","../src/by-source.ts","../src/any-map.ts","../src/trace-mapping.ts"],"sourcesContent":["import resolveUri from '@jridgewell/resolve-uri';\n\nexport default function resolve(input: string, base: string | undefined): string {\n // The base is always treated as a directory, if it's not empty.\n // https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327\n // https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401\n if (base && !base.endsWith('/')) base += '/';\n\n return resolveUri(input, base);\n}\n","/**\n * Removes everything after the last \"/\", but leaves the slash.\n */\nexport default function stripFilename(path: string | undefined | null): string {\n if (!path) return '';\n const index = path.lastIndexOf('/');\n return path.slice(0, index + 1);\n}\n","type GeneratedColumn = number;\ntype SourcesIndex = number;\ntype SourceLine = number;\ntype SourceColumn = number;\ntype NamesIndex = number;\n\ntype GeneratedLine = number;\n\nexport type SourceMapSegment =\n | [GeneratedColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];\n\nexport type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];\n\nexport const COLUMN = 0;\nexport const SOURCES_INDEX = 1;\nexport const SOURCE_LINE = 2;\nexport const SOURCE_COLUMN = 3;\nexport const NAMES_INDEX = 4;\n\nexport const REV_GENERATED_LINE = 1;\nexport const REV_GENERATED_COLUMN = 2;\n","import { COLUMN } from './sourcemap-segment';\n\nimport type { SourceMapSegment } from './sourcemap-segment';\n\nexport default function maybeSort(\n mappings: SourceMapSegment[][],\n owned: boolean,\n): SourceMapSegment[][] {\n const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);\n if (unsortedIndex === mappings.length) return mappings;\n\n // If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If\n // not, we do not want to modify the consumer's input array.\n if (!owned) mappings = mappings.slice();\n\n for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {\n mappings[i] = sortSegments(mappings[i], owned);\n }\n return mappings;\n}\n\nfunction nextUnsortedSegmentLine(mappings: SourceMapSegment[][], start: number): number {\n for (let i = start; i < mappings.length; i++) {\n if (!isSorted(mappings[i])) return i;\n }\n return mappings.length;\n}\n\nfunction isSorted(line: SourceMapSegment[]): boolean {\n for (let j = 1; j < line.length; j++) {\n if (line[j][COLUMN] < line[j - 1][COLUMN]) {\n return false;\n }\n }\n return true;\n}\n\nfunction sortSegments(line: SourceMapSegment[], owned: boolean): SourceMapSegment[] {\n if (!owned) line = line.slice();\n return line.sort(sortComparator);\n}\n\nfunction sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {\n return a[COLUMN] - b[COLUMN];\n}\n","import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';\nimport { COLUMN } from './sourcemap-segment';\n\nexport type MemoState = {\n lastKey: number;\n lastNeedle: number;\n lastIndex: number;\n};\n\nexport let found = false;\n\n/**\n * A binary search implementation that returns the index if a match is found.\n * If no match is found, then the left-index (the index associated with the item that comes just\n * before the desired index) is returned. To maintain proper sort order, a splice would happen at\n * the next index:\n *\n * ```js\n * const array = [1, 3];\n * const needle = 2;\n * const index = binarySearch(array, needle, (item, needle) => item - needle);\n *\n * assert.equal(index, 0);\n * array.splice(index + 1, 0, needle);\n * assert.deepEqual(array, [1, 2, 3]);\n * ```\n */\nexport function binarySearch(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n low: number,\n high: number,\n): number {\n while (low <= high) {\n const mid = low + ((high - low) >> 1);\n const cmp = haystack[mid][COLUMN] - needle;\n\n if (cmp === 0) {\n found = true;\n return mid;\n }\n\n if (cmp < 0) {\n low = mid + 1;\n } else {\n high = mid - 1;\n }\n }\n\n found = false;\n return low - 1;\n}\n\nexport function upperBound(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n index: number,\n): number {\n for (let i = index + 1; i < haystack.length; index = i++) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\n\nexport function lowerBound(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n index: number,\n): number {\n for (let i = index - 1; i >= 0; index = i--) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\n\nexport function memoizedState(): MemoState {\n return {\n lastKey: -1,\n lastNeedle: -1,\n lastIndex: -1,\n };\n}\n\n/**\n * This overly complicated beast is just to record the last tested line/column and the resulting\n * index, allowing us to skip a few tests if mappings are monotonically increasing.\n */\nexport function memoizedBinarySearch(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n state: MemoState,\n key: number,\n): number {\n const { lastKey, lastNeedle, lastIndex } = state;\n\n let low = 0;\n let high = haystack.length - 1;\n if (key === lastKey) {\n if (needle === lastNeedle) {\n found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;\n return lastIndex;\n }\n\n if (needle >= lastNeedle) {\n // lastIndex may be -1 if the previous needle was not found.\n low = lastIndex === -1 ? 0 : lastIndex;\n } else {\n high = lastIndex;\n }\n }\n state.lastKey = key;\n state.lastNeedle = needle;\n\n return (state.lastIndex = binarySearch(haystack, needle, low, high));\n}\n","import { COLUMN, SOURCES_INDEX, SOURCE_LINE, SOURCE_COLUMN } from './sourcemap-segment';\nimport { memoizedBinarySearch, upperBound } from './binary-search';\n\nimport type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';\nimport type { MemoState } from './binary-search';\n\nexport type Source = {\n __proto__: null;\n [line: number]: Exclude[];\n};\n\n// Rebuilds the original source files, with mappings that are ordered by source line/column instead\n// of generated line/column.\nexport default function buildBySources(\n decoded: readonly SourceMapSegment[][],\n memos: MemoState[],\n): Source[] {\n const sources: Source[] = memos.map(buildNullArray);\n\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n if (seg.length === 1) continue;\n\n const sourceIndex = seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n const originalSource = sources[sourceIndex];\n const originalLine = (originalSource[sourceLine] ||= []);\n const memo = memos[sourceIndex];\n\n // The binary search either found a match, or it found the left-index just before where the\n // segment should go. Either way, we want to insert after that. And there may be multiple\n // generated segments associated with an original location, so there may need to move several\n // indexes before we find where we need to insert.\n const index = upperBound(\n originalLine,\n sourceColumn,\n memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine),\n );\n\n insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);\n }\n }\n\n return sources;\n}\n\nfunction insert(array: T[], index: number, value: T) {\n for (let i = array.length; i > index; i--) {\n array[i] = array[i - 1];\n }\n array[index] = value;\n}\n\n// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like\n// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.\n// Numeric properties on objects are magically sorted in ascending order by the engine regardless of\n// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending\n// order when iterating with for-in.\nfunction buildNullArray(): T {\n return { __proto__: null } as T;\n}\n","import { TraceMap, presortedDecodedMap, decodedMappings } from './trace-mapping';\nimport {\n COLUMN,\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n} from './sourcemap-segment';\n\nimport type {\n Section,\n SectionedSourceMap,\n DecodedSourceMap,\n SectionedSourceMapInput,\n Ro,\n} from './types';\nimport type { SourceMapSegment } from './sourcemap-segment';\n\ntype AnyMap = {\n new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;\n (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;\n};\n\nexport const AnyMap: AnyMap = function (map, mapUrl) {\n const parsed =\n typeof map === 'string' ? (JSON.parse(map) as Exclude) : map;\n\n if (!('sections' in parsed)) return new TraceMap(parsed, mapUrl);\n\n const mappings: SourceMapSegment[][] = [];\n const sources: string[] = [];\n const sourcesContent: (string | null)[] = [];\n const names: string[] = [];\n\n recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);\n\n const joined: DecodedSourceMap = {\n version: 3,\n file: parsed.file,\n names,\n sources,\n sourcesContent,\n mappings,\n };\n\n return presortedDecodedMap(joined);\n} as AnyMap;\n\nfunction recurse(\n input: Ro,\n mapUrl: string | null | undefined,\n mappings: SourceMapSegment[][],\n sources: string[],\n sourcesContent: (string | null)[],\n names: string[],\n lineOffset: number,\n columnOffset: number,\n stopLine: number,\n stopColumn: number,\n) {\n const { sections } = input;\n for (let i = 0; i < sections.length; i++) {\n const { map, offset } = sections[i];\n\n let sl = stopLine;\n let sc = stopColumn;\n if (i + 1 < sections.length) {\n const nextOffset = sections[i + 1].offset;\n sl = Math.min(stopLine, lineOffset + nextOffset.line);\n\n if (sl === stopLine) {\n sc = Math.min(stopColumn, columnOffset + nextOffset.column);\n } else if (sl < stopLine) {\n sc = columnOffset + nextOffset.column;\n }\n }\n\n addSection(\n map,\n mapUrl,\n mappings,\n sources,\n sourcesContent,\n names,\n lineOffset + offset.line,\n columnOffset + offset.column,\n sl,\n sc,\n );\n }\n}\n\nfunction addSection(\n input: Ro,\n mapUrl: string | null | undefined,\n mappings: SourceMapSegment[][],\n sources: string[],\n sourcesContent: (string | null)[],\n names: string[],\n lineOffset: number,\n columnOffset: number,\n stopLine: number,\n stopColumn: number,\n) {\n if ('sections' in input) return recurse(...(arguments as unknown as Parameters));\n\n const map = new TraceMap(input, mapUrl);\n const sourcesOffset = sources.length;\n const namesOffset = names.length;\n const decoded = decodedMappings(map);\n const { resolvedSources, sourcesContent: contents } = map;\n\n append(sources, resolvedSources);\n append(names, map.names);\n if (contents) append(sourcesContent, contents);\n else for (let i = 0; i < resolvedSources.length; i++) sourcesContent.push(null);\n\n for (let i = 0; i < decoded.length; i++) {\n const lineI = lineOffset + i;\n\n // We can only add so many lines before we step into the range that the next section's map\n // controls. When we get to the last line, then we'll start checking the segments to see if\n // they've crossed into the column range. But it may not have any columns that overstep, so we\n // still need to check that we don't overstep lines, too.\n if (lineI > stopLine) return;\n\n // The out line may already exist in mappings (if we're continuing the line started by a\n // previous section). Or, we may have jumped ahead several lines to start this section.\n const out = getLine(mappings, lineI);\n // On the 0th loop, the section's column offset shifts us forward. On all other lines (since the\n // map can be multiple lines), it doesn't.\n const cOffset = i === 0 ? columnOffset : 0;\n\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n const column = cOffset + seg[COLUMN];\n\n // If this segment steps into the column range that the next section's map controls, we need\n // to stop early.\n if (lineI === stopLine && column >= stopColumn) return;\n\n if (seg.length === 1) {\n out.push([column]);\n continue;\n }\n\n const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n out.push(\n seg.length === 4\n ? [column, sourcesIndex, sourceLine, sourceColumn]\n : [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]],\n );\n }\n }\n}\n\nfunction append(arr: T[], other: T[]) {\n for (let i = 0; i < other.length; i++) arr.push(other[i]);\n}\n\nfunction getLine(arr: T[][], index: number): T[] {\n for (let i = arr.length; i <= index; i++) arr[i] = [];\n return arr[index];\n}\n","import { encode, decode } from '@jridgewell/sourcemap-codec';\n\nimport resolve from './resolve';\nimport stripFilename from './strip-filename';\nimport maybeSort from './sort';\nimport buildBySources from './by-source';\nimport {\n memoizedState,\n memoizedBinarySearch,\n upperBound,\n lowerBound,\n found as bsFound,\n} from './binary-search';\nimport {\n COLUMN,\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n REV_GENERATED_LINE,\n REV_GENERATED_COLUMN,\n} from './sourcemap-segment';\n\nimport type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';\nimport type {\n SourceMapV3,\n DecodedSourceMap,\n EncodedSourceMap,\n InvalidOriginalMapping,\n OriginalMapping,\n InvalidGeneratedMapping,\n GeneratedMapping,\n SourceMapInput,\n Needle,\n SourceNeedle,\n SourceMap,\n EachMapping,\n Bias,\n} from './types';\nimport type { Source } from './by-source';\nimport type { MemoState } from './binary-search';\n\nexport type { SourceMapSegment } from './sourcemap-segment';\nexport type {\n SourceMapInput,\n SectionedSourceMapInput,\n DecodedSourceMap,\n EncodedSourceMap,\n SectionedSourceMap,\n InvalidOriginalMapping,\n OriginalMapping as Mapping,\n OriginalMapping,\n InvalidGeneratedMapping,\n GeneratedMapping,\n EachMapping,\n} from './types';\n\nconst LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';\nconst COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';\n\nexport const LEAST_UPPER_BOUND = -1;\nexport const GREATEST_LOWER_BOUND = 1;\n\n/**\n * Returns the encoded (VLQ string) form of the SourceMap's mappings field.\n */\nexport let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];\n\n/**\n * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.\n */\nexport let decodedMappings: (map: TraceMap) => Readonly;\n\n/**\n * A low-level API to find the segment associated with a generated line/column (think, from a\n * stack trace). Line and column here are 0-based, unlike `originalPositionFor`.\n */\nexport let traceSegment: (\n map: TraceMap,\n line: number,\n column: number,\n) => Readonly | null;\n\n/**\n * A higher-level API to find the source/line/column associated with a generated line/column\n * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in\n * `source-map` library.\n */\nexport let originalPositionFor: (\n map: TraceMap,\n needle: Needle,\n) => OriginalMapping | InvalidOriginalMapping;\n\n/**\n * Finds the generated line/column position of the provided source/line/column source position.\n */\nexport let generatedPositionFor: (\n map: TraceMap,\n needle: SourceNeedle,\n) => GeneratedMapping | InvalidGeneratedMapping;\n\n/**\n * Finds all generated line/column positions of the provided source/line/column source position.\n */\nexport let allGeneratedPositionsFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping[];\n\n/**\n * Iterates each mapping in generated position order.\n */\nexport let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;\n\n/**\n * Retrieves the source content for a particular source, if its found. Returns null if not.\n */\nexport let sourceContentFor: (map: TraceMap, source: string) => string | null;\n\n/**\n * A helper that skips sorting of the input map's mappings array, which can be expensive for larger\n * maps.\n */\nexport let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;\n\n/**\n * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let decodedMap: (\n map: TraceMap,\n) => Omit & { mappings: readonly SourceMapSegment[][] };\n\n/**\n * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let encodedMap: (map: TraceMap) => EncodedSourceMap;\n\nexport { AnyMap } from './any-map';\n\nexport class TraceMap implements SourceMap {\n declare version: SourceMapV3['version'];\n declare file: SourceMapV3['file'];\n declare names: SourceMapV3['names'];\n declare sourceRoot: SourceMapV3['sourceRoot'];\n declare sources: SourceMapV3['sources'];\n declare sourcesContent: SourceMapV3['sourcesContent'];\n\n declare resolvedSources: string[];\n private declare _encoded: string | undefined;\n\n private declare _decoded: SourceMapSegment[][] | undefined;\n private declare _decodedMemo: MemoState;\n\n private declare _bySources: Source[] | undefined;\n private declare _bySourceMemos: MemoState[] | undefined;\n\n constructor(map: SourceMapInput, mapUrl?: string | null) {\n const isString = typeof map === 'string';\n\n if (!isString && (map as unknown as { _decodedMemo: any })._decodedMemo) return map as TraceMap;\n\n const parsed = (isString ? JSON.parse(map) : map) as DecodedSourceMap | EncodedSourceMap;\n\n const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;\n this.version = version;\n this.file = file;\n this.names = names;\n this.sourceRoot = sourceRoot;\n this.sources = sources;\n this.sourcesContent = sourcesContent;\n\n const from = resolve(sourceRoot || '', stripFilename(mapUrl));\n this.resolvedSources = sources.map((s) => resolve(s || '', from));\n\n const { mappings } = parsed;\n if (typeof mappings === 'string') {\n this._encoded = mappings;\n this._decoded = undefined;\n } else {\n this._encoded = undefined;\n this._decoded = maybeSort(mappings, isString);\n }\n\n this._decodedMemo = memoizedState();\n this._bySources = undefined;\n this._bySourceMemos = undefined;\n }\n\n static {\n encodedMappings = (map) => {\n return (map._encoded ??= encode(map._decoded!));\n };\n\n decodedMappings = (map) => {\n return (map._decoded ||= decode(map._encoded!));\n };\n\n traceSegment = (map, line, column) => {\n const decoded = decodedMappings(map);\n\n // It's common for parent source maps to have pointers to lines that have no\n // mapping (like a \"//# sourceMappingURL=\") at the end of the child file.\n if (line >= decoded.length) return null;\n\n const segments = decoded[line];\n const index = traceSegmentInternal(\n segments,\n map._decodedMemo,\n line,\n column,\n GREATEST_LOWER_BOUND,\n );\n\n return index === -1 ? null : segments[index];\n };\n\n originalPositionFor = (map, { line, column, bias }) => {\n line--;\n if (line < 0) throw new Error(LINE_GTR_ZERO);\n if (column < 0) throw new Error(COL_GTR_EQ_ZERO);\n\n const decoded = decodedMappings(map);\n\n // It's common for parent source maps to have pointers to lines that have no\n // mapping (like a \"//# sourceMappingURL=\") at the end of the child file.\n if (line >= decoded.length) return OMapping(null, null, null, null);\n\n const segments = decoded[line];\n const index = traceSegmentInternal(\n segments,\n map._decodedMemo,\n line,\n column,\n bias || GREATEST_LOWER_BOUND,\n );\n\n if (index === -1) return OMapping(null, null, null, null);\n\n const segment = segments[index];\n if (segment.length === 1) return OMapping(null, null, null, null);\n\n const { names, resolvedSources } = map;\n return OMapping(\n resolvedSources[segment[SOURCES_INDEX]],\n segment[SOURCE_LINE] + 1,\n segment[SOURCE_COLUMN],\n segment.length === 5 ? names[segment[NAMES_INDEX]] : null,\n );\n };\n\n allGeneratedPositionsFor = (map, { source, line, column, bias }) => {\n // SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.\n return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);\n };\n\n generatedPositionFor = (map, { source, line, column, bias }) => {\n return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);\n };\n\n eachMapping = (map, cb) => {\n const decoded = decodedMappings(map);\n const { names, resolvedSources } = map;\n\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n\n const generatedLine = i + 1;\n const generatedColumn = seg[0];\n let source = null;\n let originalLine = null;\n let originalColumn = null;\n let name = null;\n if (seg.length !== 1) {\n source = resolvedSources[seg[1]];\n originalLine = seg[2] + 1;\n originalColumn = seg[3];\n }\n if (seg.length === 5) name = names[seg[4]];\n\n cb({\n generatedLine,\n generatedColumn,\n source,\n originalLine,\n originalColumn,\n name,\n } as EachMapping);\n }\n }\n };\n\n sourceContentFor = (map, source) => {\n const { sources, resolvedSources, sourcesContent } = map;\n if (sourcesContent == null) return null;\n\n let index = sources.indexOf(source);\n if (index === -1) index = resolvedSources.indexOf(source);\n\n return index === -1 ? null : sourcesContent[index];\n };\n\n presortedDecodedMap = (map, mapUrl) => {\n const tracer = new TraceMap(clone(map, []), mapUrl);\n tracer._decoded = map.mappings;\n return tracer;\n };\n\n decodedMap = (map) => {\n return clone(map, decodedMappings(map));\n };\n\n encodedMap = (map) => {\n return clone(map, encodedMappings(map));\n };\n\n function generatedPosition(\n map: TraceMap,\n source: string,\n line: number,\n column: number,\n bias: Bias,\n all: false,\n ): GeneratedMapping | InvalidGeneratedMapping;\n function generatedPosition(\n map: TraceMap,\n source: string,\n line: number,\n column: number,\n bias: Bias,\n all: true,\n ): GeneratedMapping[];\n function generatedPosition(\n map: TraceMap,\n source: string,\n line: number,\n column: number,\n bias: Bias,\n all: boolean,\n ): GeneratedMapping | InvalidGeneratedMapping | GeneratedMapping[] {\n line--;\n if (line < 0) throw new Error(LINE_GTR_ZERO);\n if (column < 0) throw new Error(COL_GTR_EQ_ZERO);\n\n const { sources, resolvedSources } = map;\n let sourceIndex = sources.indexOf(source);\n if (sourceIndex === -1) sourceIndex = resolvedSources.indexOf(source);\n if (sourceIndex === -1) return all ? [] : GMapping(null, null);\n\n const generated = (map._bySources ||= buildBySources(\n decodedMappings(map),\n (map._bySourceMemos = sources.map(memoizedState)),\n ));\n\n const segments = generated[sourceIndex][line];\n if (segments == null) return all ? [] : GMapping(null, null);\n\n const memo = map._bySourceMemos![sourceIndex];\n\n if (all) return sliceGeneratedPositions(segments, memo, line, column, bias);\n\n const index = traceSegmentInternal(segments, memo, line, column, bias);\n if (index === -1) return GMapping(null, null);\n\n const segment = segments[index];\n return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);\n }\n }\n}\n\nfunction clone(\n map: TraceMap | DecodedSourceMap | EncodedSourceMap,\n mappings: T,\n): T extends string ? EncodedSourceMap : DecodedSourceMap {\n return {\n version: map.version,\n file: map.file,\n names: map.names,\n sourceRoot: map.sourceRoot,\n sources: map.sources,\n sourcesContent: map.sourcesContent,\n mappings,\n } as any;\n}\n\nfunction OMapping(source: null, line: null, column: null, name: null): InvalidOriginalMapping;\nfunction OMapping(\n source: string,\n line: number,\n column: number,\n name: string | null,\n): OriginalMapping;\nfunction OMapping(\n source: string | null,\n line: number | null,\n column: number | null,\n name: string | null,\n): OriginalMapping | InvalidOriginalMapping {\n return { source, line, column, name } as any;\n}\n\nfunction GMapping(line: null, column: null): InvalidGeneratedMapping;\nfunction GMapping(line: number, column: number): GeneratedMapping;\nfunction GMapping(\n line: number | null,\n column: number | null,\n): GeneratedMapping | InvalidGeneratedMapping {\n return { line, column } as any;\n}\n\nfunction traceSegmentInternal(\n segments: SourceMapSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): number;\nfunction traceSegmentInternal(\n segments: ReverseSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): number;\nfunction traceSegmentInternal(\n segments: SourceMapSegment[] | ReverseSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): number {\n let index = memoizedBinarySearch(segments, column, memo, line);\n if (bsFound) {\n index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);\n } else if (bias === LEAST_UPPER_BOUND) index++;\n\n if (index === -1 || index === segments.length) return -1;\n return index;\n}\n\nfunction sliceGeneratedPositions(\n segments: ReverseSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): GeneratedMapping[] {\n let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);\n\n // We ignored the bias when tracing the segment so that we're guarnateed to find the first (in\n // insertion order) segment that matched. Even if we did respect the bias when tracing, we would\n // still need to call `lowerBound()` to find the first segment, which is slower than just looking\n // for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the\n // binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to\n // match LEAST_UPPER_BOUND.\n if (!bsFound && bias === LEAST_UPPER_BOUND) min++;\n\n if (min === -1 || min === segments.length) return [];\n\n // We may have found the segment that started at an earlier column. If this is the case, then we\n // need to slice all generated segments that match _that_ column, because all such segments span\n // to our desired column.\n const matchedColumn = bsFound ? column : segments[min][COLUMN];\n\n // The binary search is not guaranteed to find the lower bound when a match wasn't found.\n if (!bsFound) min = lowerBound(segments, matchedColumn, min);\n const max = upperBound(segments, matchedColumn, min);\n\n const result = [];\n for (; min <= max; min++) {\n const segment = segments[min];\n result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));\n }\n return result;\n}\n"],"names":["bsFound"],"mappings":";;;AAEc,SAAU,OAAO,CAAC,KAAa,EAAE,IAAwB,EAAA;;;;IAIrE,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,IAAI,IAAI,GAAG,CAAC;AAE7C,IAAA,OAAO,UAAU,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;AACjC;;ACTA;;AAEG;AACqB,SAAA,aAAa,CAAC,IAA+B,EAAA;AACnE,IAAA,IAAI,CAAC,IAAI;AAAE,QAAA,OAAO,EAAE,CAAC;IACrB,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;IACpC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC;AAClC;;ACQO,MAAM,MAAM,GAAG,CAAC,CAAC;AACjB,MAAM,aAAa,GAAG,CAAC,CAAC;AACxB,MAAM,WAAW,GAAG,CAAC,CAAC;AACtB,MAAM,aAAa,GAAG,CAAC,CAAC;AACxB,MAAM,WAAW,GAAG,CAAC,CAAC;AAEtB,MAAM,kBAAkB,GAAG,CAAC,CAAC;AAC7B,MAAM,oBAAoB,GAAG,CAAC;;AClBvB,SAAU,SAAS,CAC/B,QAA8B,EAC9B,KAAc,EAAA;IAEd,MAAM,aAAa,GAAG,uBAAuB,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;AAC3D,IAAA,IAAI,aAAa,KAAK,QAAQ,CAAC,MAAM;AAAE,QAAA,OAAO,QAAQ,CAAC;;;AAIvD,IAAA,IAAI,CAAC,KAAK;AAAE,QAAA,QAAQ,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC;IAExC,KAAK,IAAI,CAAC,GAAG,aAAa,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,GAAG,uBAAuB,CAAC,QAAQ,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE;AAC7F,QAAA,QAAQ,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;AAChD,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,uBAAuB,CAAC,QAA8B,EAAE,KAAa,EAAA;AAC5E,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AAC5C,QAAA,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;AAAE,YAAA,OAAO,CAAC,CAAC;AACtC,KAAA;IACD,OAAO,QAAQ,CAAC,MAAM,CAAC;AACzB,CAAC;AAED,SAAS,QAAQ,CAAC,IAAwB,EAAA;AACxC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpC,QAAA,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;AACzC,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AACF,KAAA;AACD,IAAA,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAS,YAAY,CAAC,IAAwB,EAAE,KAAc,EAAA;AAC5D,IAAA,IAAI,CAAC,KAAK;AAAE,QAAA,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC;AAChC,IAAA,OAAO,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,cAAc,CAAC,CAAmB,EAAE,CAAmB,EAAA;IAC9D,OAAO,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;AAC/B;;ACnCO,IAAI,KAAK,GAAG,KAAK,CAAC;AAEzB;;;;;;;;;;;;;;;AAeG;AACG,SAAU,YAAY,CAC1B,QAA+C,EAC/C,MAAc,EACd,GAAW,EACX,IAAY,EAAA;IAEZ,OAAO,GAAG,IAAI,IAAI,EAAE;AAClB,QAAA,MAAM,GAAG,GAAG,GAAG,IAAI,CAAC,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC;QACtC,MAAM,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC;QAE3C,IAAI,GAAG,KAAK,CAAC,EAAE;YACb,KAAK,GAAG,IAAI,CAAC;AACb,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;QAED,IAAI,GAAG,GAAG,CAAC,EAAE;AACX,YAAA,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;AACf,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,GAAG,GAAG,GAAG,CAAC,CAAC;AAChB,SAAA;AACF,KAAA;IAED,KAAK,GAAG,KAAK,CAAC;IACd,OAAO,GAAG,GAAG,CAAC,CAAC;AACjB,CAAC;SAEe,UAAU,CACxB,QAA+C,EAC/C,MAAc,EACd,KAAa,EAAA;AAEb,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,KAAK,GAAG,CAAC,EAAE,EAAE;QACxD,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM;YAAE,MAAM;AAC3C,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;SAEe,UAAU,CACxB,QAA+C,EAC/C,MAAc,EACd,KAAa,EAAA;AAEb,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,KAAK,GAAG,CAAC,EAAE,EAAE;QAC3C,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM;YAAE,MAAM;AAC3C,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;SAEe,aAAa,GAAA;IAC3B,OAAO;QACL,OAAO,EAAE,CAAC,CAAC;QACX,UAAU,EAAE,CAAC,CAAC;QACd,SAAS,EAAE,CAAC,CAAC;KACd,CAAC;AACJ,CAAC;AAED;;;AAGG;AACG,SAAU,oBAAoB,CAClC,QAA+C,EAC/C,MAAc,EACd,KAAgB,EAChB,GAAW,EAAA;IAEX,MAAM,EAAE,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,GAAG,KAAK,CAAC;IAEjD,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAA,IAAI,IAAI,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;IAC/B,IAAI,GAAG,KAAK,OAAO,EAAE;QACnB,IAAI,MAAM,KAAK,UAAU,EAAE;AACzB,YAAA,KAAK,GAAG,SAAS,KAAK,CAAC,CAAC,IAAI,QAAQ,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM,CAAC;AACnE,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;QAED,IAAI,MAAM,IAAI,UAAU,EAAE;;AAExB,YAAA,GAAG,GAAG,SAAS,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;AACxC,SAAA;AAAM,aAAA;YACL,IAAI,GAAG,SAAS,CAAC;AAClB,SAAA;AACF,KAAA;AACD,IAAA,KAAK,CAAC,OAAO,GAAG,GAAG,CAAC;AACpB,IAAA,KAAK,CAAC,UAAU,GAAG,MAAM,CAAC;AAE1B,IAAA,QAAQ,KAAK,CAAC,SAAS,GAAG,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;AACvE;;ACvGA;AACA;AACc,SAAU,cAAc,CACpC,OAAsC,EACtC,KAAkB,EAAA;IAElB,MAAM,OAAO,GAAa,KAAK,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;AAEpD,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACvC,QAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;AACxB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpC,YAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;AACpB,YAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;AAE/B,YAAA,MAAM,WAAW,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;AACvC,YAAA,MAAM,UAAU,GAAG,GAAG,CAAC,WAAW,CAAC,CAAC;AACpC,YAAA,MAAM,YAAY,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;AACxC,YAAA,MAAM,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;AAC5C,YAAA,MAAM,YAAY,IAAI,cAAc,CAAC,UAAU,CAAzB,KAAA,cAAc,CAAC,UAAU,CAAM,GAAA,EAAE,EAAC,CAAC;AACzD,YAAA,MAAM,IAAI,GAAG,KAAK,CAAC,WAAW,CAAC,CAAC;;;;;AAMhC,YAAA,MAAM,KAAK,GAAG,UAAU,CACtB,YAAY,EACZ,YAAY,EACZ,oBAAoB,CAAC,YAAY,EAAE,YAAY,EAAE,IAAI,EAAE,UAAU,CAAC,CACnE,CAAC;YAEF,MAAM,CAAC,YAAY,GAAG,IAAI,CAAC,SAAS,GAAG,KAAK,GAAG,CAAC,GAAG,CAAC,YAAY,EAAE,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AACpF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,MAAM,CAAI,KAAU,EAAE,KAAa,EAAE,KAAQ,EAAA;AACpD,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QACzC,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;AACzB,KAAA;AACD,IAAA,KAAK,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;AACvB,CAAC;AAED;AACA;AACA;AACA;AACA;AACA,SAAS,cAAc,GAAA;AACrB,IAAA,OAAO,EAAE,SAAS,EAAE,IAAI,EAAO,CAAC;AAClC;;ACxCa,MAAA,MAAM,GAAW,UAAU,GAAG,EAAE,MAAM,EAAA;AACjD,IAAA,MAAM,MAAM,GACV,OAAO,GAAG,KAAK,QAAQ,GAAI,IAAI,CAAC,KAAK,CAAC,GAAG,CAA8C,GAAG,GAAG,CAAC;AAEhG,IAAA,IAAI,EAAE,UAAU,IAAI,MAAM,CAAC;AAAE,QAAA,OAAO,IAAI,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAEjE,MAAM,QAAQ,GAAyB,EAAE,CAAC;IAC1C,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,MAAM,cAAc,GAAsB,EAAE,CAAC;IAC7C,MAAM,KAAK,GAAa,EAAE,CAAC;IAE3B,OAAO,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,QAAQ,EAAE,QAAQ,CAAC,CAAC;AAE5F,IAAA,MAAM,MAAM,GAAqB;AAC/B,QAAA,OAAO,EAAE,CAAC;QACV,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK;QACL,OAAO;QACP,cAAc;QACd,QAAQ;KACT,CAAC;AAEF,IAAA,OAAO,mBAAmB,CAAC,MAAM,CAAC,CAAC;AACrC,EAAY;AAEZ,SAAS,OAAO,CACd,KAA6B,EAC7B,MAAiC,EACjC,QAA8B,EAC9B,OAAiB,EACjB,cAAiC,EACjC,KAAe,EACf,UAAkB,EAClB,YAAoB,EACpB,QAAgB,EAChB,UAAkB,EAAA;AAElB,IAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK,CAAC;AAC3B,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACxC,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;QAEpC,IAAI,EAAE,GAAG,QAAQ,CAAC;QAClB,IAAI,EAAE,GAAG,UAAU,CAAC;AACpB,QAAA,IAAI,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE;YAC3B,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC;AAC1C,YAAA,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;YAEtD,IAAI,EAAE,KAAK,QAAQ,EAAE;AACnB,gBAAA,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,YAAY,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC;AAC7D,aAAA;iBAAM,IAAI,EAAE,GAAG,QAAQ,EAAE;AACxB,gBAAA,EAAE,GAAG,YAAY,GAAG,UAAU,CAAC,MAAM,CAAC;AACvC,aAAA;AACF,SAAA;AAED,QAAA,UAAU,CACR,GAAG,EACH,MAAM,EACN,QAAQ,EACR,OAAO,EACP,cAAc,EACd,KAAK,EACL,UAAU,GAAG,MAAM,CAAC,IAAI,EACxB,YAAY,GAAG,MAAM,CAAC,MAAM,EAC5B,EAAE,EACF,EAAE,CACH,CAAC;AACH,KAAA;AACH,CAAC;AAED,SAAS,UAAU,CACjB,KAAyB,EACzB,MAAiC,EACjC,QAA8B,EAC9B,OAAiB,EACjB,cAAiC,EACjC,KAAe,EACf,UAAkB,EAClB,YAAoB,EACpB,QAAgB,EAChB,UAAkB,EAAA;IAElB,IAAI,UAAU,IAAI,KAAK;AAAE,QAAA,OAAO,OAAO,CAAC,GAAI,SAAmD,CAAC,CAAC;IAEjG,MAAM,GAAG,GAAG,IAAI,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AACxC,IAAA,MAAM,aAAa,GAAG,OAAO,CAAC,MAAM,CAAC;AACrC,IAAA,MAAM,WAAW,GAAG,KAAK,CAAC,MAAM,CAAC;AACjC,IAAA,MAAM,OAAO,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC;IACrC,MAAM,EAAE,eAAe,EAAE,cAAc,EAAE,QAAQ,EAAE,GAAG,GAAG,CAAC;AAE1D,IAAA,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;AACjC,IAAA,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC;AACzB,IAAA,IAAI,QAAQ;AAAE,QAAA,MAAM,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;;AAC1C,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,MAAM,EAAE,CAAC,EAAE;AAAE,YAAA,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAEhF,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACvC,QAAA,MAAM,KAAK,GAAG,UAAU,GAAG,CAAC,CAAC;;;;;QAM7B,IAAI,KAAK,GAAG,QAAQ;YAAE,OAAO;;;QAI7B,MAAM,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;;;AAGrC,QAAA,MAAM,OAAO,GAAG,CAAC,KAAK,CAAC,GAAG,YAAY,GAAG,CAAC,CAAC;AAE3C,QAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;AACxB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpC,YAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;YACpB,MAAM,MAAM,GAAG,OAAO,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC;;;AAIrC,YAAA,IAAI,KAAK,KAAK,QAAQ,IAAI,MAAM,IAAI,UAAU;gBAAE,OAAO;AAEvD,YAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;AACpB,gBAAA,GAAG,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;gBACnB,SAAS;AACV,aAAA;YAED,MAAM,YAAY,GAAG,aAAa,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;AACxD,YAAA,MAAM,UAAU,GAAG,GAAG,CAAC,WAAW,CAAC,CAAC;AACpC,YAAA,MAAM,YAAY,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;AACxC,YAAA,GAAG,CAAC,IAAI,CACN,GAAG,CAAC,MAAM,KAAK,CAAC;kBACZ,CAAC,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC;AAClD,kBAAE,CAAC,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,WAAW,GAAG,GAAG,CAAC,WAAW,CAAC,CAAC,CACrF,CAAC;AACH,SAAA;AACF,KAAA;AACH,CAAC;AAED,SAAS,MAAM,CAAI,GAAQ,EAAE,KAAU,EAAA;AACrC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;QAAE,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5D,CAAC;AAED,SAAS,OAAO,CAAI,GAAU,EAAE,KAAa,EAAA;AAC3C,IAAA,KAAK,IAAI,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,EAAE;AAAE,QAAA,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;AACtD,IAAA,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;AACpB;;AC7GA,MAAM,aAAa,GAAG,uDAAuD,CAAC;AAC9E,MAAM,eAAe,GAAG,yEAAyE,CAAC;AAErF,MAAA,iBAAiB,GAAG,CAAC,EAAE;AAC7B,MAAM,oBAAoB,GAAG,EAAE;AAEtC;;AAEG;AACQ,IAAA,gBAAiE;AAE5E;;AAEG;AACQ,IAAA,gBAA2E;AAEtF;;;AAGG;AACQ,IAAA,aAI4B;AAEvC;;;;AAIG;AACQ,IAAA,oBAGmC;AAE9C;;AAEG;AACQ,IAAA,qBAGqC;AAEhD;;AAEG;AACQ,IAAA,yBAAsF;AAEjG;;AAEG;AACQ,IAAA,YAAyE;AAEpF;;AAEG;AACQ,IAAA,iBAAmE;AAE9E;;;AAGG;AACQ,IAAA,oBAA0E;AAErF;;;AAGG;AACQ,IAAA,WAE2E;AAEtF;;;AAGG;AACQ,IAAA,WAAgD;MAI9C,QAAQ,CAAA;IAiBnB,WAAY,CAAA,GAAmB,EAAE,MAAsB,EAAA;AACrD,QAAA,MAAM,QAAQ,GAAG,OAAO,GAAG,KAAK,QAAQ,CAAC;AAEzC,QAAA,IAAI,CAAC,QAAQ,IAAK,GAAwC,CAAC,YAAY;AAAE,YAAA,OAAO,GAAe,CAAC;AAEhG,QAAA,MAAM,MAAM,IAAI,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,GAAG,CAAwC,CAAC;AAEzF,QAAA,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,MAAM,CAAC;AAC7E,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;AACnB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC;AAErC,QAAA,MAAM,IAAI,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,EAAE,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC;QAC9D,IAAI,CAAC,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,IAAI,CAAC,CAAC,CAAC;AAElE,QAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;AAC5B,QAAA,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAChC,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;AAC3B,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC1B,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;AAC/C,SAAA;AAED,QAAA,IAAI,CAAC,YAAY,GAAG,aAAa,EAAE,CAAC;AACpC,QAAA,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;AAC5B,QAAA,IAAI,CAAC,cAAc,GAAG,SAAS,CAAC;KACjC;AAuLF,CAAA;AArLC,CAAA,MAAA;AACE,IAAA,eAAe,GAAG,CAAC,GAAG,KAAI;;AACxB,QAAA,cAAQ,GAAG,CAAC,QAAQ,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,IAAZ,GAAG,CAAC,QAAQ,GAAK,MAAM,CAAC,GAAG,CAAC,QAAS,CAAC,GAAE;AAClD,KAAC,CAAC;AAEF,IAAA,eAAe,GAAG,CAAC,GAAG,KAAI;AACxB,QAAA,QAAQ,GAAG,CAAC,QAAQ,KAAZ,GAAG,CAAC,QAAQ,GAAK,MAAM,CAAC,GAAG,CAAC,QAAS,CAAC,GAAE;AAClD,KAAC,CAAC;IAEF,YAAY,GAAG,CAAC,GAAG,EAAE,IAAI,EAAE,MAAM,KAAI;AACnC,QAAA,MAAM,OAAO,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC;;;AAIrC,QAAA,IAAI,IAAI,IAAI,OAAO,CAAC,MAAM;AAAE,YAAA,OAAO,IAAI,CAAC;AAExC,QAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AAC/B,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAChC,QAAQ,EACR,GAAG,CAAC,YAAY,EAChB,IAAI,EACJ,MAAM,EACN,oBAAoB,CACrB,CAAC;AAEF,QAAA,OAAO,KAAK,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;AAC/C,KAAC,CAAC;AAEF,IAAA,mBAAmB,GAAG,CAAC,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAI;AACpD,QAAA,IAAI,EAAE,CAAC;QACP,IAAI,IAAI,GAAG,CAAC;AAAE,YAAA,MAAM,IAAI,KAAK,CAAC,aAAa,CAAC,CAAC;QAC7C,IAAI,MAAM,GAAG,CAAC;AAAE,YAAA,MAAM,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC;AAEjD,QAAA,MAAM,OAAO,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC;;;AAIrC,QAAA,IAAI,IAAI,IAAI,OAAO,CAAC,MAAM;YAAE,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AAEpE,QAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AAC/B,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAChC,QAAQ,EACR,GAAG,CAAC,YAAY,EAChB,IAAI,EACJ,MAAM,EACN,IAAI,IAAI,oBAAoB,CAC7B,CAAC;QAEF,IAAI,KAAK,KAAK,CAAC,CAAC;YAAE,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AAE1D,QAAA,MAAM,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;AAChC,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AAElE,QAAA,MAAM,EAAE,KAAK,EAAE,eAAe,EAAE,GAAG,GAAG,CAAC;AACvC,QAAA,OAAO,QAAQ,CACb,eAAe,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,EACvC,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,EACxB,OAAO,CAAC,aAAa,CAAC,EACtB,OAAO,CAAC,MAAM,KAAK,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC,GAAG,IAAI,CAC1D,CAAC;AACJ,KAAC,CAAC;AAEF,IAAA,wBAAwB,GAAG,CAAC,GAAG,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAI;;AAEjE,QAAA,OAAO,iBAAiB,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,IAAI,iBAAiB,EAAE,IAAI,CAAC,CAAC;AACvF,KAAC,CAAC;AAEF,IAAA,oBAAoB,GAAG,CAAC,GAAG,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAI;AAC7D,QAAA,OAAO,iBAAiB,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,IAAI,oBAAoB,EAAE,KAAK,CAAC,CAAC;AAC3F,KAAC,CAAC;AAEF,IAAA,WAAW,GAAG,CAAC,GAAG,EAAE,EAAE,KAAI;AACxB,QAAA,MAAM,OAAO,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC;AACrC,QAAA,MAAM,EAAE,KAAK,EAAE,eAAe,EAAE,GAAG,GAAG,CAAC;AAEvC,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACvC,YAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;AACxB,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACpC,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;AAEpB,gBAAA,MAAM,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC;AAC5B,gBAAA,MAAM,eAAe,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;gBAC/B,IAAI,MAAM,GAAG,IAAI,CAAC;gBAClB,IAAI,YAAY,GAAG,IAAI,CAAC;gBACxB,IAAI,cAAc,GAAG,IAAI,CAAC;gBAC1B,IAAI,IAAI,GAAG,IAAI,CAAC;AAChB,gBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;oBACpB,MAAM,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACjC,oBAAA,YAAY,GAAG,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AAC1B,oBAAA,cAAc,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;AACzB,iBAAA;AACD,gBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;oBAAE,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAE3C,gBAAA,EAAE,CAAC;oBACD,aAAa;oBACb,eAAe;oBACf,MAAM;oBACN,YAAY;oBACZ,cAAc;oBACd,IAAI;AACU,iBAAA,CAAC,CAAC;AACnB,aAAA;AACF,SAAA;AACH,KAAC,CAAC;AAEF,IAAA,gBAAgB,GAAG,CAAC,GAAG,EAAE,MAAM,KAAI;QACjC,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,GAAG,GAAG,CAAC;QACzD,IAAI,cAAc,IAAI,IAAI;AAAE,YAAA,OAAO,IAAI,CAAC;QAExC,IAAI,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACpC,IAAI,KAAK,KAAK,CAAC,CAAC;AAAE,YAAA,KAAK,GAAG,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AAE1D,QAAA,OAAO,KAAK,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;AACrD,KAAC,CAAC;AAEF,IAAA,mBAAmB,GAAG,CAAC,GAAG,EAAE,MAAM,KAAI;AACpC,QAAA,MAAM,MAAM,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;AACpD,QAAA,MAAM,CAAC,QAAQ,GAAG,GAAG,CAAC,QAAQ,CAAC;AAC/B,QAAA,OAAO,MAAM,CAAC;AAChB,KAAC,CAAC;AAEF,IAAA,UAAU,GAAG,CAAC,GAAG,KAAI;QACnB,OAAO,KAAK,CAAC,GAAG,EAAE,eAAe,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1C,KAAC,CAAC;AAEF,IAAA,UAAU,GAAG,CAAC,GAAG,KAAI;QACnB,OAAO,KAAK,CAAC,GAAG,EAAE,eAAe,CAAC,GAAG,CAAC,CAAC,CAAC;AAC1C,KAAC,CAAC;AAkBF,IAAA,SAAS,iBAAiB,CACxB,GAAa,EACb,MAAc,EACd,IAAY,EACZ,MAAc,EACd,IAAU,EACV,GAAY,EAAA;AAEZ,QAAA,IAAI,EAAE,CAAC;QACP,IAAI,IAAI,GAAG,CAAC;AAAE,YAAA,MAAM,IAAI,KAAK,CAAC,aAAa,CAAC,CAAC;QAC7C,IAAI,MAAM,GAAG,CAAC;AAAE,YAAA,MAAM,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC;AAEjD,QAAA,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,GAAG,CAAC;QACzC,IAAI,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC1C,IAAI,WAAW,KAAK,CAAC,CAAC;AAAE,YAAA,WAAW,GAAG,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACtE,IAAI,WAAW,KAAK,CAAC,CAAC;AAAE,YAAA,OAAO,GAAG,GAAG,EAAE,GAAG,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAE/D,QAAA,MAAM,SAAS,IAAI,GAAG,CAAC,UAAU,KAAd,GAAG,CAAC,UAAU,GAAK,cAAc,CAClD,eAAe,CAAC,GAAG,CAAC,GACnB,GAAG,CAAC,cAAc,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,EACjD,EAAC,CAAC;QAEH,MAAM,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,CAAC;QAC9C,IAAI,QAAQ,IAAI,IAAI;AAAE,YAAA,OAAO,GAAG,GAAG,EAAE,GAAG,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;QAE7D,MAAM,IAAI,GAAG,GAAG,CAAC,cAAe,CAAC,WAAW,CAAC,CAAC;AAE9C,QAAA,IAAI,GAAG;AAAE,YAAA,OAAO,uBAAuB,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;AAE5E,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;QACvE,IAAI,KAAK,KAAK,CAAC,CAAC;AAAE,YAAA,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;AAE9C,QAAA,MAAM,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;AAChC,QAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,kBAAkB,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,oBAAoB,CAAC,CAAC,CAAC;KACjF;AACH,CAAC,GAAA,CAAA;AAGH,SAAS,KAAK,CACZ,GAAmD,EACnD,QAAW,EAAA;IAEX,OAAO;QACL,OAAO,EAAE,GAAG,CAAC,OAAO;QACpB,IAAI,EAAE,GAAG,CAAC,IAAI;QACd,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,UAAU,EAAE,GAAG,CAAC,UAAU;QAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;QACpB,cAAc,EAAE,GAAG,CAAC,cAAc;QAClC,QAAQ;KACF,CAAC;AACX,CAAC;AASD,SAAS,QAAQ,CACf,MAAqB,EACrB,IAAmB,EACnB,MAAqB,EACrB,IAAmB,EAAA;IAEnB,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAS,CAAC;AAC/C,CAAC;AAID,SAAS,QAAQ,CACf,IAAmB,EACnB,MAAqB,EAAA;AAErB,IAAA,OAAO,EAAE,IAAI,EAAE,MAAM,EAAS,CAAC;AACjC,CAAC;AAgBD,SAAS,oBAAoB,CAC3B,QAA+C,EAC/C,IAAe,EACf,IAAY,EACZ,MAAc,EACd,IAAU,EAAA;AAEV,IAAA,IAAI,KAAK,GAAG,oBAAoB,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AAC/D,IAAA,IAAIA,KAAO,EAAE;QACX,KAAK,GAAG,CAAC,IAAI,KAAK,iBAAiB,GAAG,UAAU,GAAG,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,CAAC;AACzF,KAAA;SAAM,IAAI,IAAI,KAAK,iBAAiB;AAAE,QAAA,KAAK,EAAE,CAAC;IAE/C,IAAI,KAAK,KAAK,CAAC,CAAC,IAAI,KAAK,KAAK,QAAQ,CAAC,MAAM;QAAE,OAAO,CAAC,CAAC,CAAC;AACzD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,uBAAuB,CAC9B,QAA0B,EAC1B,IAAe,EACf,IAAY,EACZ,MAAc,EACd,IAAU,EAAA;AAEV,IAAA,IAAI,GAAG,GAAG,oBAAoB,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,oBAAoB,CAAC,CAAC;;;;;;;AAQnF,IAAA,IAAI,CAACA,KAAO,IAAI,IAAI,KAAK,iBAAiB;AAAE,QAAA,GAAG,EAAE,CAAC;IAElD,IAAI,GAAG,KAAK,CAAC,CAAC,IAAI,GAAG,KAAK,QAAQ,CAAC,MAAM;AAAE,QAAA,OAAO,EAAE,CAAC;;;;AAKrD,IAAA,MAAM,aAAa,GAAGA,KAAO,GAAG,MAAM,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;;AAG/D,IAAA,IAAI,CAACA,KAAO;QAAE,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,CAAC,CAAC;IAC7D,MAAM,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,CAAC,CAAC;IAErD,MAAM,MAAM,GAAG,EAAE,CAAC;AAClB,IAAA,OAAO,GAAG,IAAI,GAAG,EAAE,GAAG,EAAE,EAAE;AACxB,QAAA,MAAM,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC;AAC9B,QAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,kBAAkB,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC;AACvF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js new file mode 100644 index 0000000..a3251f1 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js @@ -0,0 +1,566 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) : + typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI)); +})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict'; + + function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + + var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri); + + function resolve(input, base) { + // The base is always treated as a directory, if it's not empty. + // https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327 + // https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401 + if (base && !base.endsWith('/')) + base += '/'; + return resolveUri__default["default"](input, base); + } + + /** + * Removes everything after the last "/", but leaves the slash. + */ + function stripFilename(path) { + if (!path) + return ''; + const index = path.lastIndexOf('/'); + return path.slice(0, index + 1); + } + + const COLUMN = 0; + const SOURCES_INDEX = 1; + const SOURCE_LINE = 2; + const SOURCE_COLUMN = 3; + const NAMES_INDEX = 4; + const REV_GENERATED_LINE = 1; + const REV_GENERATED_COLUMN = 2; + + function maybeSort(mappings, owned) { + const unsortedIndex = nextUnsortedSegmentLine(mappings, 0); + if (unsortedIndex === mappings.length) + return mappings; + // If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If + // not, we do not want to modify the consumer's input array. + if (!owned) + mappings = mappings.slice(); + for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) { + mappings[i] = sortSegments(mappings[i], owned); + } + return mappings; + } + function nextUnsortedSegmentLine(mappings, start) { + for (let i = start; i < mappings.length; i++) { + if (!isSorted(mappings[i])) + return i; + } + return mappings.length; + } + function isSorted(line) { + for (let j = 1; j < line.length; j++) { + if (line[j][COLUMN] < line[j - 1][COLUMN]) { + return false; + } + } + return true; + } + function sortSegments(line, owned) { + if (!owned) + line = line.slice(); + return line.sort(sortComparator); + } + function sortComparator(a, b) { + return a[COLUMN] - b[COLUMN]; + } + + let found = false; + /** + * A binary search implementation that returns the index if a match is found. + * If no match is found, then the left-index (the index associated with the item that comes just + * before the desired index) is returned. To maintain proper sort order, a splice would happen at + * the next index: + * + * ```js + * const array = [1, 3]; + * const needle = 2; + * const index = binarySearch(array, needle, (item, needle) => item - needle); + * + * assert.equal(index, 0); + * array.splice(index + 1, 0, needle); + * assert.deepEqual(array, [1, 2, 3]); + * ``` + */ + function binarySearch(haystack, needle, low, high) { + while (low <= high) { + const mid = low + ((high - low) >> 1); + const cmp = haystack[mid][COLUMN] - needle; + if (cmp === 0) { + found = true; + return mid; + } + if (cmp < 0) { + low = mid + 1; + } + else { + high = mid - 1; + } + } + found = false; + return low - 1; + } + function upperBound(haystack, needle, index) { + for (let i = index + 1; i < haystack.length; index = i++) { + if (haystack[i][COLUMN] !== needle) + break; + } + return index; + } + function lowerBound(haystack, needle, index) { + for (let i = index - 1; i >= 0; index = i--) { + if (haystack[i][COLUMN] !== needle) + break; + } + return index; + } + function memoizedState() { + return { + lastKey: -1, + lastNeedle: -1, + lastIndex: -1, + }; + } + /** + * This overly complicated beast is just to record the last tested line/column and the resulting + * index, allowing us to skip a few tests if mappings are monotonically increasing. + */ + function memoizedBinarySearch(haystack, needle, state, key) { + const { lastKey, lastNeedle, lastIndex } = state; + let low = 0; + let high = haystack.length - 1; + if (key === lastKey) { + if (needle === lastNeedle) { + found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle; + return lastIndex; + } + if (needle >= lastNeedle) { + // lastIndex may be -1 if the previous needle was not found. + low = lastIndex === -1 ? 0 : lastIndex; + } + else { + high = lastIndex; + } + } + state.lastKey = key; + state.lastNeedle = needle; + return (state.lastIndex = binarySearch(haystack, needle, low, high)); + } + + // Rebuilds the original source files, with mappings that are ordered by source line/column instead + // of generated line/column. + function buildBySources(decoded, memos) { + const sources = memos.map(buildNullArray); + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + if (seg.length === 1) + continue; + const sourceIndex = seg[SOURCES_INDEX]; + const sourceLine = seg[SOURCE_LINE]; + const sourceColumn = seg[SOURCE_COLUMN]; + const originalSource = sources[sourceIndex]; + const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = [])); + const memo = memos[sourceIndex]; + // The binary search either found a match, or it found the left-index just before where the + // segment should go. Either way, we want to insert after that. And there may be multiple + // generated segments associated with an original location, so there may need to move several + // indexes before we find where we need to insert. + const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine)); + insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]); + } + } + return sources; + } + function insert(array, index, value) { + for (let i = array.length; i > index; i--) { + array[i] = array[i - 1]; + } + array[index] = value; + } + // Null arrays allow us to use ordered index keys without actually allocating contiguous memory like + // a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations. + // Numeric properties on objects are magically sorted in ascending order by the engine regardless of + // the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending + // order when iterating with for-in. + function buildNullArray() { + return { __proto__: null }; + } + + const AnyMap = function (map, mapUrl) { + const parsed = typeof map === 'string' ? JSON.parse(map) : map; + if (!('sections' in parsed)) + return new TraceMap(parsed, mapUrl); + const mappings = []; + const sources = []; + const sourcesContent = []; + const names = []; + recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity); + const joined = { + version: 3, + file: parsed.file, + names, + sources, + sourcesContent, + mappings, + }; + return exports.presortedDecodedMap(joined); + }; + function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { + const { sections } = input; + for (let i = 0; i < sections.length; i++) { + const { map, offset } = sections[i]; + let sl = stopLine; + let sc = stopColumn; + if (i + 1 < sections.length) { + const nextOffset = sections[i + 1].offset; + sl = Math.min(stopLine, lineOffset + nextOffset.line); + if (sl === stopLine) { + sc = Math.min(stopColumn, columnOffset + nextOffset.column); + } + else if (sl < stopLine) { + sc = columnOffset + nextOffset.column; + } + } + addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc); + } + } + function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { + if ('sections' in input) + return recurse(...arguments); + const map = new TraceMap(input, mapUrl); + const sourcesOffset = sources.length; + const namesOffset = names.length; + const decoded = exports.decodedMappings(map); + const { resolvedSources, sourcesContent: contents } = map; + append(sources, resolvedSources); + append(names, map.names); + if (contents) + append(sourcesContent, contents); + else + for (let i = 0; i < resolvedSources.length; i++) + sourcesContent.push(null); + for (let i = 0; i < decoded.length; i++) { + const lineI = lineOffset + i; + // We can only add so many lines before we step into the range that the next section's map + // controls. When we get to the last line, then we'll start checking the segments to see if + // they've crossed into the column range. But it may not have any columns that overstep, so we + // still need to check that we don't overstep lines, too. + if (lineI > stopLine) + return; + // The out line may already exist in mappings (if we're continuing the line started by a + // previous section). Or, we may have jumped ahead several lines to start this section. + const out = getLine(mappings, lineI); + // On the 0th loop, the section's column offset shifts us forward. On all other lines (since the + // map can be multiple lines), it doesn't. + const cOffset = i === 0 ? columnOffset : 0; + const line = decoded[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + const column = cOffset + seg[COLUMN]; + // If this segment steps into the column range that the next section's map controls, we need + // to stop early. + if (lineI === stopLine && column >= stopColumn) + return; + if (seg.length === 1) { + out.push([column]); + continue; + } + const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX]; + const sourceLine = seg[SOURCE_LINE]; + const sourceColumn = seg[SOURCE_COLUMN]; + out.push(seg.length === 4 + ? [column, sourcesIndex, sourceLine, sourceColumn] + : [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]); + } + } + } + function append(arr, other) { + for (let i = 0; i < other.length; i++) + arr.push(other[i]); + } + function getLine(arr, index) { + for (let i = arr.length; i <= index; i++) + arr[i] = []; + return arr[index]; + } + + const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)'; + const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)'; + const LEAST_UPPER_BOUND = -1; + const GREATEST_LOWER_BOUND = 1; + /** + * Returns the encoded (VLQ string) form of the SourceMap's mappings field. + */ + exports.encodedMappings = void 0; + /** + * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. + */ + exports.decodedMappings = void 0; + /** + * A low-level API to find the segment associated with a generated line/column (think, from a + * stack trace). Line and column here are 0-based, unlike `originalPositionFor`. + */ + exports.traceSegment = void 0; + /** + * A higher-level API to find the source/line/column associated with a generated line/column + * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in + * `source-map` library. + */ + exports.originalPositionFor = void 0; + /** + * Finds the generated line/column position of the provided source/line/column source position. + */ + exports.generatedPositionFor = void 0; + /** + * Finds all generated line/column positions of the provided source/line/column source position. + */ + exports.allGeneratedPositionsFor = void 0; + /** + * Iterates each mapping in generated position order. + */ + exports.eachMapping = void 0; + /** + * Retrieves the source content for a particular source, if its found. Returns null if not. + */ + exports.sourceContentFor = void 0; + /** + * A helper that skips sorting of the input map's mappings array, which can be expensive for larger + * maps. + */ + exports.presortedDecodedMap = void 0; + /** + * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ + exports.decodedMap = void 0; + /** + * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ + exports.encodedMap = void 0; + class TraceMap { + constructor(map, mapUrl) { + const isString = typeof map === 'string'; + if (!isString && map._decodedMemo) + return map; + const parsed = (isString ? JSON.parse(map) : map); + const { version, file, names, sourceRoot, sources, sourcesContent } = parsed; + this.version = version; + this.file = file; + this.names = names; + this.sourceRoot = sourceRoot; + this.sources = sources; + this.sourcesContent = sourcesContent; + const from = resolve(sourceRoot || '', stripFilename(mapUrl)); + this.resolvedSources = sources.map((s) => resolve(s || '', from)); + const { mappings } = parsed; + if (typeof mappings === 'string') { + this._encoded = mappings; + this._decoded = undefined; + } + else { + this._encoded = undefined; + this._decoded = maybeSort(mappings, isString); + } + this._decodedMemo = memoizedState(); + this._bySources = undefined; + this._bySourceMemos = undefined; + } + } + (() => { + exports.encodedMappings = (map) => { + var _a; + return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded))); + }; + exports.decodedMappings = (map) => { + return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded))); + }; + exports.traceSegment = (map, line, column) => { + const decoded = exports.decodedMappings(map); + // It's common for parent source maps to have pointers to lines that have no + // mapping (like a "//# sourceMappingURL=") at the end of the child file. + if (line >= decoded.length) + return null; + const segments = decoded[line]; + const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND); + return index === -1 ? null : segments[index]; + }; + exports.originalPositionFor = (map, { line, column, bias }) => { + line--; + if (line < 0) + throw new Error(LINE_GTR_ZERO); + if (column < 0) + throw new Error(COL_GTR_EQ_ZERO); + const decoded = exports.decodedMappings(map); + // It's common for parent source maps to have pointers to lines that have no + // mapping (like a "//# sourceMappingURL=") at the end of the child file. + if (line >= decoded.length) + return OMapping(null, null, null, null); + const segments = decoded[line]; + const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND); + if (index === -1) + return OMapping(null, null, null, null); + const segment = segments[index]; + if (segment.length === 1) + return OMapping(null, null, null, null); + const { names, resolvedSources } = map; + return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null); + }; + exports.allGeneratedPositionsFor = (map, { source, line, column, bias }) => { + // SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit. + return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true); + }; + exports.generatedPositionFor = (map, { source, line, column, bias }) => { + return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false); + }; + exports.eachMapping = (map, cb) => { + const decoded = exports.decodedMappings(map); + const { names, resolvedSources } = map; + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + for (let j = 0; j < line.length; j++) { + const seg = line[j]; + const generatedLine = i + 1; + const generatedColumn = seg[0]; + let source = null; + let originalLine = null; + let originalColumn = null; + let name = null; + if (seg.length !== 1) { + source = resolvedSources[seg[1]]; + originalLine = seg[2] + 1; + originalColumn = seg[3]; + } + if (seg.length === 5) + name = names[seg[4]]; + cb({ + generatedLine, + generatedColumn, + source, + originalLine, + originalColumn, + name, + }); + } + } + }; + exports.sourceContentFor = (map, source) => { + const { sources, resolvedSources, sourcesContent } = map; + if (sourcesContent == null) + return null; + let index = sources.indexOf(source); + if (index === -1) + index = resolvedSources.indexOf(source); + return index === -1 ? null : sourcesContent[index]; + }; + exports.presortedDecodedMap = (map, mapUrl) => { + const tracer = new TraceMap(clone(map, []), mapUrl); + tracer._decoded = map.mappings; + return tracer; + }; + exports.decodedMap = (map) => { + return clone(map, exports.decodedMappings(map)); + }; + exports.encodedMap = (map) => { + return clone(map, exports.encodedMappings(map)); + }; + function generatedPosition(map, source, line, column, bias, all) { + line--; + if (line < 0) + throw new Error(LINE_GTR_ZERO); + if (column < 0) + throw new Error(COL_GTR_EQ_ZERO); + const { sources, resolvedSources } = map; + let sourceIndex = sources.indexOf(source); + if (sourceIndex === -1) + sourceIndex = resolvedSources.indexOf(source); + if (sourceIndex === -1) + return all ? [] : GMapping(null, null); + const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState))))); + const segments = generated[sourceIndex][line]; + if (segments == null) + return all ? [] : GMapping(null, null); + const memo = map._bySourceMemos[sourceIndex]; + if (all) + return sliceGeneratedPositions(segments, memo, line, column, bias); + const index = traceSegmentInternal(segments, memo, line, column, bias); + if (index === -1) + return GMapping(null, null); + const segment = segments[index]; + return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]); + } + })(); + function clone(map, mappings) { + return { + version: map.version, + file: map.file, + names: map.names, + sourceRoot: map.sourceRoot, + sources: map.sources, + sourcesContent: map.sourcesContent, + mappings, + }; + } + function OMapping(source, line, column, name) { + return { source, line, column, name }; + } + function GMapping(line, column) { + return { line, column }; + } + function traceSegmentInternal(segments, memo, line, column, bias) { + let index = memoizedBinarySearch(segments, column, memo, line); + if (found) { + index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index); + } + else if (bias === LEAST_UPPER_BOUND) + index++; + if (index === -1 || index === segments.length) + return -1; + return index; + } + function sliceGeneratedPositions(segments, memo, line, column, bias) { + let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND); + // We ignored the bias when tracing the segment so that we're guarnateed to find the first (in + // insertion order) segment that matched. Even if we did respect the bias when tracing, we would + // still need to call `lowerBound()` to find the first segment, which is slower than just looking + // for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the + // binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to + // match LEAST_UPPER_BOUND. + if (!found && bias === LEAST_UPPER_BOUND) + min++; + if (min === -1 || min === segments.length) + return []; + // We may have found the segment that started at an earlier column. If this is the case, then we + // need to slice all generated segments that match _that_ column, because all such segments span + // to our desired column. + const matchedColumn = found ? column : segments[min][COLUMN]; + // The binary search is not guaranteed to find the lower bound when a match wasn't found. + if (!found) + min = lowerBound(segments, matchedColumn, min); + const max = upperBound(segments, matchedColumn, min); + const result = []; + for (; min <= max; min++) { + const segment = segments[min]; + result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN])); + } + return result; + } + + exports.AnyMap = AnyMap; + exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND; + exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND; + exports.TraceMap = TraceMap; + + Object.defineProperty(exports, '__esModule', { value: true }); + +})); +//# sourceMappingURL=trace-mapping.umd.js.map diff --git a/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map new file mode 100644 index 0000000..fee1219 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace-mapping.umd.js","sources":["../src/resolve.ts","../src/strip-filename.ts","../src/sourcemap-segment.ts","../src/sort.ts","../src/binary-search.ts","../src/by-source.ts","../src/any-map.ts","../src/trace-mapping.ts"],"sourcesContent":["import resolveUri from '@jridgewell/resolve-uri';\n\nexport default function resolve(input: string, base: string | undefined): string {\n // The base is always treated as a directory, if it's not empty.\n // https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327\n // https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401\n if (base && !base.endsWith('/')) base += '/';\n\n return resolveUri(input, base);\n}\n","/**\n * Removes everything after the last \"/\", but leaves the slash.\n */\nexport default function stripFilename(path: string | undefined | null): string {\n if (!path) return '';\n const index = path.lastIndexOf('/');\n return path.slice(0, index + 1);\n}\n","type GeneratedColumn = number;\ntype SourcesIndex = number;\ntype SourceLine = number;\ntype SourceColumn = number;\ntype NamesIndex = number;\n\ntype GeneratedLine = number;\n\nexport type SourceMapSegment =\n | [GeneratedColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]\n | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];\n\nexport type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];\n\nexport const COLUMN = 0;\nexport const SOURCES_INDEX = 1;\nexport const SOURCE_LINE = 2;\nexport const SOURCE_COLUMN = 3;\nexport const NAMES_INDEX = 4;\n\nexport const REV_GENERATED_LINE = 1;\nexport const REV_GENERATED_COLUMN = 2;\n","import { COLUMN } from './sourcemap-segment';\n\nimport type { SourceMapSegment } from './sourcemap-segment';\n\nexport default function maybeSort(\n mappings: SourceMapSegment[][],\n owned: boolean,\n): SourceMapSegment[][] {\n const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);\n if (unsortedIndex === mappings.length) return mappings;\n\n // If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If\n // not, we do not want to modify the consumer's input array.\n if (!owned) mappings = mappings.slice();\n\n for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {\n mappings[i] = sortSegments(mappings[i], owned);\n }\n return mappings;\n}\n\nfunction nextUnsortedSegmentLine(mappings: SourceMapSegment[][], start: number): number {\n for (let i = start; i < mappings.length; i++) {\n if (!isSorted(mappings[i])) return i;\n }\n return mappings.length;\n}\n\nfunction isSorted(line: SourceMapSegment[]): boolean {\n for (let j = 1; j < line.length; j++) {\n if (line[j][COLUMN] < line[j - 1][COLUMN]) {\n return false;\n }\n }\n return true;\n}\n\nfunction sortSegments(line: SourceMapSegment[], owned: boolean): SourceMapSegment[] {\n if (!owned) line = line.slice();\n return line.sort(sortComparator);\n}\n\nfunction sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {\n return a[COLUMN] - b[COLUMN];\n}\n","import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';\nimport { COLUMN } from './sourcemap-segment';\n\nexport type MemoState = {\n lastKey: number;\n lastNeedle: number;\n lastIndex: number;\n};\n\nexport let found = false;\n\n/**\n * A binary search implementation that returns the index if a match is found.\n * If no match is found, then the left-index (the index associated with the item that comes just\n * before the desired index) is returned. To maintain proper sort order, a splice would happen at\n * the next index:\n *\n * ```js\n * const array = [1, 3];\n * const needle = 2;\n * const index = binarySearch(array, needle, (item, needle) => item - needle);\n *\n * assert.equal(index, 0);\n * array.splice(index + 1, 0, needle);\n * assert.deepEqual(array, [1, 2, 3]);\n * ```\n */\nexport function binarySearch(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n low: number,\n high: number,\n): number {\n while (low <= high) {\n const mid = low + ((high - low) >> 1);\n const cmp = haystack[mid][COLUMN] - needle;\n\n if (cmp === 0) {\n found = true;\n return mid;\n }\n\n if (cmp < 0) {\n low = mid + 1;\n } else {\n high = mid - 1;\n }\n }\n\n found = false;\n return low - 1;\n}\n\nexport function upperBound(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n index: number,\n): number {\n for (let i = index + 1; i < haystack.length; index = i++) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\n\nexport function lowerBound(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n index: number,\n): number {\n for (let i = index - 1; i >= 0; index = i--) {\n if (haystack[i][COLUMN] !== needle) break;\n }\n return index;\n}\n\nexport function memoizedState(): MemoState {\n return {\n lastKey: -1,\n lastNeedle: -1,\n lastIndex: -1,\n };\n}\n\n/**\n * This overly complicated beast is just to record the last tested line/column and the resulting\n * index, allowing us to skip a few tests if mappings are monotonically increasing.\n */\nexport function memoizedBinarySearch(\n haystack: SourceMapSegment[] | ReverseSegment[],\n needle: number,\n state: MemoState,\n key: number,\n): number {\n const { lastKey, lastNeedle, lastIndex } = state;\n\n let low = 0;\n let high = haystack.length - 1;\n if (key === lastKey) {\n if (needle === lastNeedle) {\n found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;\n return lastIndex;\n }\n\n if (needle >= lastNeedle) {\n // lastIndex may be -1 if the previous needle was not found.\n low = lastIndex === -1 ? 0 : lastIndex;\n } else {\n high = lastIndex;\n }\n }\n state.lastKey = key;\n state.lastNeedle = needle;\n\n return (state.lastIndex = binarySearch(haystack, needle, low, high));\n}\n","import { COLUMN, SOURCES_INDEX, SOURCE_LINE, SOURCE_COLUMN } from './sourcemap-segment';\nimport { memoizedBinarySearch, upperBound } from './binary-search';\n\nimport type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';\nimport type { MemoState } from './binary-search';\n\nexport type Source = {\n __proto__: null;\n [line: number]: Exclude[];\n};\n\n// Rebuilds the original source files, with mappings that are ordered by source line/column instead\n// of generated line/column.\nexport default function buildBySources(\n decoded: readonly SourceMapSegment[][],\n memos: MemoState[],\n): Source[] {\n const sources: Source[] = memos.map(buildNullArray);\n\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n if (seg.length === 1) continue;\n\n const sourceIndex = seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n const originalSource = sources[sourceIndex];\n const originalLine = (originalSource[sourceLine] ||= []);\n const memo = memos[sourceIndex];\n\n // The binary search either found a match, or it found the left-index just before where the\n // segment should go. Either way, we want to insert after that. And there may be multiple\n // generated segments associated with an original location, so there may need to move several\n // indexes before we find where we need to insert.\n const index = upperBound(\n originalLine,\n sourceColumn,\n memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine),\n );\n\n insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);\n }\n }\n\n return sources;\n}\n\nfunction insert(array: T[], index: number, value: T) {\n for (let i = array.length; i > index; i--) {\n array[i] = array[i - 1];\n }\n array[index] = value;\n}\n\n// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like\n// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.\n// Numeric properties on objects are magically sorted in ascending order by the engine regardless of\n// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending\n// order when iterating with for-in.\nfunction buildNullArray(): T {\n return { __proto__: null } as T;\n}\n","import { TraceMap, presortedDecodedMap, decodedMappings } from './trace-mapping';\nimport {\n COLUMN,\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n} from './sourcemap-segment';\n\nimport type {\n Section,\n SectionedSourceMap,\n DecodedSourceMap,\n SectionedSourceMapInput,\n Ro,\n} from './types';\nimport type { SourceMapSegment } from './sourcemap-segment';\n\ntype AnyMap = {\n new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;\n (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;\n};\n\nexport const AnyMap: AnyMap = function (map, mapUrl) {\n const parsed =\n typeof map === 'string' ? (JSON.parse(map) as Exclude) : map;\n\n if (!('sections' in parsed)) return new TraceMap(parsed, mapUrl);\n\n const mappings: SourceMapSegment[][] = [];\n const sources: string[] = [];\n const sourcesContent: (string | null)[] = [];\n const names: string[] = [];\n\n recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);\n\n const joined: DecodedSourceMap = {\n version: 3,\n file: parsed.file,\n names,\n sources,\n sourcesContent,\n mappings,\n };\n\n return presortedDecodedMap(joined);\n} as AnyMap;\n\nfunction recurse(\n input: Ro,\n mapUrl: string | null | undefined,\n mappings: SourceMapSegment[][],\n sources: string[],\n sourcesContent: (string | null)[],\n names: string[],\n lineOffset: number,\n columnOffset: number,\n stopLine: number,\n stopColumn: number,\n) {\n const { sections } = input;\n for (let i = 0; i < sections.length; i++) {\n const { map, offset } = sections[i];\n\n let sl = stopLine;\n let sc = stopColumn;\n if (i + 1 < sections.length) {\n const nextOffset = sections[i + 1].offset;\n sl = Math.min(stopLine, lineOffset + nextOffset.line);\n\n if (sl === stopLine) {\n sc = Math.min(stopColumn, columnOffset + nextOffset.column);\n } else if (sl < stopLine) {\n sc = columnOffset + nextOffset.column;\n }\n }\n\n addSection(\n map,\n mapUrl,\n mappings,\n sources,\n sourcesContent,\n names,\n lineOffset + offset.line,\n columnOffset + offset.column,\n sl,\n sc,\n );\n }\n}\n\nfunction addSection(\n input: Ro,\n mapUrl: string | null | undefined,\n mappings: SourceMapSegment[][],\n sources: string[],\n sourcesContent: (string | null)[],\n names: string[],\n lineOffset: number,\n columnOffset: number,\n stopLine: number,\n stopColumn: number,\n) {\n if ('sections' in input) return recurse(...(arguments as unknown as Parameters));\n\n const map = new TraceMap(input, mapUrl);\n const sourcesOffset = sources.length;\n const namesOffset = names.length;\n const decoded = decodedMappings(map);\n const { resolvedSources, sourcesContent: contents } = map;\n\n append(sources, resolvedSources);\n append(names, map.names);\n if (contents) append(sourcesContent, contents);\n else for (let i = 0; i < resolvedSources.length; i++) sourcesContent.push(null);\n\n for (let i = 0; i < decoded.length; i++) {\n const lineI = lineOffset + i;\n\n // We can only add so many lines before we step into the range that the next section's map\n // controls. When we get to the last line, then we'll start checking the segments to see if\n // they've crossed into the column range. But it may not have any columns that overstep, so we\n // still need to check that we don't overstep lines, too.\n if (lineI > stopLine) return;\n\n // The out line may already exist in mappings (if we're continuing the line started by a\n // previous section). Or, we may have jumped ahead several lines to start this section.\n const out = getLine(mappings, lineI);\n // On the 0th loop, the section's column offset shifts us forward. On all other lines (since the\n // map can be multiple lines), it doesn't.\n const cOffset = i === 0 ? columnOffset : 0;\n\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n const column = cOffset + seg[COLUMN];\n\n // If this segment steps into the column range that the next section's map controls, we need\n // to stop early.\n if (lineI === stopLine && column >= stopColumn) return;\n\n if (seg.length === 1) {\n out.push([column]);\n continue;\n }\n\n const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];\n const sourceLine = seg[SOURCE_LINE];\n const sourceColumn = seg[SOURCE_COLUMN];\n out.push(\n seg.length === 4\n ? [column, sourcesIndex, sourceLine, sourceColumn]\n : [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]],\n );\n }\n }\n}\n\nfunction append(arr: T[], other: T[]) {\n for (let i = 0; i < other.length; i++) arr.push(other[i]);\n}\n\nfunction getLine(arr: T[][], index: number): T[] {\n for (let i = arr.length; i <= index; i++) arr[i] = [];\n return arr[index];\n}\n","import { encode, decode } from '@jridgewell/sourcemap-codec';\n\nimport resolve from './resolve';\nimport stripFilename from './strip-filename';\nimport maybeSort from './sort';\nimport buildBySources from './by-source';\nimport {\n memoizedState,\n memoizedBinarySearch,\n upperBound,\n lowerBound,\n found as bsFound,\n} from './binary-search';\nimport {\n COLUMN,\n SOURCES_INDEX,\n SOURCE_LINE,\n SOURCE_COLUMN,\n NAMES_INDEX,\n REV_GENERATED_LINE,\n REV_GENERATED_COLUMN,\n} from './sourcemap-segment';\n\nimport type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';\nimport type {\n SourceMapV3,\n DecodedSourceMap,\n EncodedSourceMap,\n InvalidOriginalMapping,\n OriginalMapping,\n InvalidGeneratedMapping,\n GeneratedMapping,\n SourceMapInput,\n Needle,\n SourceNeedle,\n SourceMap,\n EachMapping,\n Bias,\n} from './types';\nimport type { Source } from './by-source';\nimport type { MemoState } from './binary-search';\n\nexport type { SourceMapSegment } from './sourcemap-segment';\nexport type {\n SourceMapInput,\n SectionedSourceMapInput,\n DecodedSourceMap,\n EncodedSourceMap,\n SectionedSourceMap,\n InvalidOriginalMapping,\n OriginalMapping as Mapping,\n OriginalMapping,\n InvalidGeneratedMapping,\n GeneratedMapping,\n EachMapping,\n} from './types';\n\nconst LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';\nconst COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';\n\nexport const LEAST_UPPER_BOUND = -1;\nexport const GREATEST_LOWER_BOUND = 1;\n\n/**\n * Returns the encoded (VLQ string) form of the SourceMap's mappings field.\n */\nexport let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];\n\n/**\n * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.\n */\nexport let decodedMappings: (map: TraceMap) => Readonly;\n\n/**\n * A low-level API to find the segment associated with a generated line/column (think, from a\n * stack trace). Line and column here are 0-based, unlike `originalPositionFor`.\n */\nexport let traceSegment: (\n map: TraceMap,\n line: number,\n column: number,\n) => Readonly | null;\n\n/**\n * A higher-level API to find the source/line/column associated with a generated line/column\n * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in\n * `source-map` library.\n */\nexport let originalPositionFor: (\n map: TraceMap,\n needle: Needle,\n) => OriginalMapping | InvalidOriginalMapping;\n\n/**\n * Finds the generated line/column position of the provided source/line/column source position.\n */\nexport let generatedPositionFor: (\n map: TraceMap,\n needle: SourceNeedle,\n) => GeneratedMapping | InvalidGeneratedMapping;\n\n/**\n * Finds all generated line/column positions of the provided source/line/column source position.\n */\nexport let allGeneratedPositionsFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping[];\n\n/**\n * Iterates each mapping in generated position order.\n */\nexport let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;\n\n/**\n * Retrieves the source content for a particular source, if its found. Returns null if not.\n */\nexport let sourceContentFor: (map: TraceMap, source: string) => string | null;\n\n/**\n * A helper that skips sorting of the input map's mappings array, which can be expensive for larger\n * maps.\n */\nexport let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;\n\n/**\n * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let decodedMap: (\n map: TraceMap,\n) => Omit & { mappings: readonly SourceMapSegment[][] };\n\n/**\n * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects\n * a sourcemap, or to JSON.stringify.\n */\nexport let encodedMap: (map: TraceMap) => EncodedSourceMap;\n\nexport { AnyMap } from './any-map';\n\nexport class TraceMap implements SourceMap {\n declare version: SourceMapV3['version'];\n declare file: SourceMapV3['file'];\n declare names: SourceMapV3['names'];\n declare sourceRoot: SourceMapV3['sourceRoot'];\n declare sources: SourceMapV3['sources'];\n declare sourcesContent: SourceMapV3['sourcesContent'];\n\n declare resolvedSources: string[];\n private declare _encoded: string | undefined;\n\n private declare _decoded: SourceMapSegment[][] | undefined;\n private declare _decodedMemo: MemoState;\n\n private declare _bySources: Source[] | undefined;\n private declare _bySourceMemos: MemoState[] | undefined;\n\n constructor(map: SourceMapInput, mapUrl?: string | null) {\n const isString = typeof map === 'string';\n\n if (!isString && (map as unknown as { _decodedMemo: any })._decodedMemo) return map as TraceMap;\n\n const parsed = (isString ? JSON.parse(map) : map) as DecodedSourceMap | EncodedSourceMap;\n\n const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;\n this.version = version;\n this.file = file;\n this.names = names;\n this.sourceRoot = sourceRoot;\n this.sources = sources;\n this.sourcesContent = sourcesContent;\n\n const from = resolve(sourceRoot || '', stripFilename(mapUrl));\n this.resolvedSources = sources.map((s) => resolve(s || '', from));\n\n const { mappings } = parsed;\n if (typeof mappings === 'string') {\n this._encoded = mappings;\n this._decoded = undefined;\n } else {\n this._encoded = undefined;\n this._decoded = maybeSort(mappings, isString);\n }\n\n this._decodedMemo = memoizedState();\n this._bySources = undefined;\n this._bySourceMemos = undefined;\n }\n\n static {\n encodedMappings = (map) => {\n return (map._encoded ??= encode(map._decoded!));\n };\n\n decodedMappings = (map) => {\n return (map._decoded ||= decode(map._encoded!));\n };\n\n traceSegment = (map, line, column) => {\n const decoded = decodedMappings(map);\n\n // It's common for parent source maps to have pointers to lines that have no\n // mapping (like a \"//# sourceMappingURL=\") at the end of the child file.\n if (line >= decoded.length) return null;\n\n const segments = decoded[line];\n const index = traceSegmentInternal(\n segments,\n map._decodedMemo,\n line,\n column,\n GREATEST_LOWER_BOUND,\n );\n\n return index === -1 ? null : segments[index];\n };\n\n originalPositionFor = (map, { line, column, bias }) => {\n line--;\n if (line < 0) throw new Error(LINE_GTR_ZERO);\n if (column < 0) throw new Error(COL_GTR_EQ_ZERO);\n\n const decoded = decodedMappings(map);\n\n // It's common for parent source maps to have pointers to lines that have no\n // mapping (like a \"//# sourceMappingURL=\") at the end of the child file.\n if (line >= decoded.length) return OMapping(null, null, null, null);\n\n const segments = decoded[line];\n const index = traceSegmentInternal(\n segments,\n map._decodedMemo,\n line,\n column,\n bias || GREATEST_LOWER_BOUND,\n );\n\n if (index === -1) return OMapping(null, null, null, null);\n\n const segment = segments[index];\n if (segment.length === 1) return OMapping(null, null, null, null);\n\n const { names, resolvedSources } = map;\n return OMapping(\n resolvedSources[segment[SOURCES_INDEX]],\n segment[SOURCE_LINE] + 1,\n segment[SOURCE_COLUMN],\n segment.length === 5 ? names[segment[NAMES_INDEX]] : null,\n );\n };\n\n allGeneratedPositionsFor = (map, { source, line, column, bias }) => {\n // SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.\n return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);\n };\n\n generatedPositionFor = (map, { source, line, column, bias }) => {\n return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);\n };\n\n eachMapping = (map, cb) => {\n const decoded = decodedMappings(map);\n const { names, resolvedSources } = map;\n\n for (let i = 0; i < decoded.length; i++) {\n const line = decoded[i];\n for (let j = 0; j < line.length; j++) {\n const seg = line[j];\n\n const generatedLine = i + 1;\n const generatedColumn = seg[0];\n let source = null;\n let originalLine = null;\n let originalColumn = null;\n let name = null;\n if (seg.length !== 1) {\n source = resolvedSources[seg[1]];\n originalLine = seg[2] + 1;\n originalColumn = seg[3];\n }\n if (seg.length === 5) name = names[seg[4]];\n\n cb({\n generatedLine,\n generatedColumn,\n source,\n originalLine,\n originalColumn,\n name,\n } as EachMapping);\n }\n }\n };\n\n sourceContentFor = (map, source) => {\n const { sources, resolvedSources, sourcesContent } = map;\n if (sourcesContent == null) return null;\n\n let index = sources.indexOf(source);\n if (index === -1) index = resolvedSources.indexOf(source);\n\n return index === -1 ? null : sourcesContent[index];\n };\n\n presortedDecodedMap = (map, mapUrl) => {\n const tracer = new TraceMap(clone(map, []), mapUrl);\n tracer._decoded = map.mappings;\n return tracer;\n };\n\n decodedMap = (map) => {\n return clone(map, decodedMappings(map));\n };\n\n encodedMap = (map) => {\n return clone(map, encodedMappings(map));\n };\n\n function generatedPosition(\n map: TraceMap,\n source: string,\n line: number,\n column: number,\n bias: Bias,\n all: false,\n ): GeneratedMapping | InvalidGeneratedMapping;\n function generatedPosition(\n map: TraceMap,\n source: string,\n line: number,\n column: number,\n bias: Bias,\n all: true,\n ): GeneratedMapping[];\n function generatedPosition(\n map: TraceMap,\n source: string,\n line: number,\n column: number,\n bias: Bias,\n all: boolean,\n ): GeneratedMapping | InvalidGeneratedMapping | GeneratedMapping[] {\n line--;\n if (line < 0) throw new Error(LINE_GTR_ZERO);\n if (column < 0) throw new Error(COL_GTR_EQ_ZERO);\n\n const { sources, resolvedSources } = map;\n let sourceIndex = sources.indexOf(source);\n if (sourceIndex === -1) sourceIndex = resolvedSources.indexOf(source);\n if (sourceIndex === -1) return all ? [] : GMapping(null, null);\n\n const generated = (map._bySources ||= buildBySources(\n decodedMappings(map),\n (map._bySourceMemos = sources.map(memoizedState)),\n ));\n\n const segments = generated[sourceIndex][line];\n if (segments == null) return all ? [] : GMapping(null, null);\n\n const memo = map._bySourceMemos![sourceIndex];\n\n if (all) return sliceGeneratedPositions(segments, memo, line, column, bias);\n\n const index = traceSegmentInternal(segments, memo, line, column, bias);\n if (index === -1) return GMapping(null, null);\n\n const segment = segments[index];\n return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);\n }\n }\n}\n\nfunction clone(\n map: TraceMap | DecodedSourceMap | EncodedSourceMap,\n mappings: T,\n): T extends string ? EncodedSourceMap : DecodedSourceMap {\n return {\n version: map.version,\n file: map.file,\n names: map.names,\n sourceRoot: map.sourceRoot,\n sources: map.sources,\n sourcesContent: map.sourcesContent,\n mappings,\n } as any;\n}\n\nfunction OMapping(source: null, line: null, column: null, name: null): InvalidOriginalMapping;\nfunction OMapping(\n source: string,\n line: number,\n column: number,\n name: string | null,\n): OriginalMapping;\nfunction OMapping(\n source: string | null,\n line: number | null,\n column: number | null,\n name: string | null,\n): OriginalMapping | InvalidOriginalMapping {\n return { source, line, column, name } as any;\n}\n\nfunction GMapping(line: null, column: null): InvalidGeneratedMapping;\nfunction GMapping(line: number, column: number): GeneratedMapping;\nfunction GMapping(\n line: number | null,\n column: number | null,\n): GeneratedMapping | InvalidGeneratedMapping {\n return { line, column } as any;\n}\n\nfunction traceSegmentInternal(\n segments: SourceMapSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): number;\nfunction traceSegmentInternal(\n segments: ReverseSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): number;\nfunction traceSegmentInternal(\n segments: SourceMapSegment[] | ReverseSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): number {\n let index = memoizedBinarySearch(segments, column, memo, line);\n if (bsFound) {\n index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);\n } else if (bias === LEAST_UPPER_BOUND) index++;\n\n if (index === -1 || index === segments.length) return -1;\n return index;\n}\n\nfunction sliceGeneratedPositions(\n segments: ReverseSegment[],\n memo: MemoState,\n line: number,\n column: number,\n bias: Bias,\n): GeneratedMapping[] {\n let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);\n\n // We ignored the bias when tracing the segment so that we're guarnateed to find the first (in\n // insertion order) segment that matched. Even if we did respect the bias when tracing, we would\n // still need to call `lowerBound()` to find the first segment, which is slower than just looking\n // for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the\n // binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to\n // match LEAST_UPPER_BOUND.\n if (!bsFound && bias === LEAST_UPPER_BOUND) min++;\n\n if (min === -1 || min === segments.length) return [];\n\n // We may have found the segment that started at an earlier column. If this is the case, then we\n // need to slice all generated segments that match _that_ column, because all such segments span\n // to our desired column.\n const matchedColumn = bsFound ? column : segments[min][COLUMN];\n\n // The binary search is not guaranteed to find the lower bound when a match wasn't found.\n if (!bsFound) min = lowerBound(segments, matchedColumn, min);\n const max = upperBound(segments, matchedColumn, min);\n\n const result = [];\n for (; min <= max; min++) {\n const segment = segments[min];\n result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));\n }\n return result;\n}\n"],"names":["resolveUri","presortedDecodedMap","decodedMappings","encodedMappings","traceSegment","originalPositionFor","generatedPositionFor","allGeneratedPositionsFor","eachMapping","sourceContentFor","decodedMap","encodedMap","encode","decode","bsFound"],"mappings":";;;;;;;;;;IAEc,SAAU,OAAO,CAAC,KAAa,EAAE,IAAwB,EAAA;;;;QAIrE,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC;YAAE,IAAI,IAAI,GAAG,CAAC;IAE7C,IAAA,OAAOA,8BAAU,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IACjC;;ICTA;;IAEG;IACqB,SAAA,aAAa,CAAC,IAA+B,EAAA;IACnE,IAAA,IAAI,CAAC,IAAI;IAAE,QAAA,OAAO,EAAE,CAAC;QACrB,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QACpC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC;IAClC;;ICQO,MAAM,MAAM,GAAG,CAAC,CAAC;IACjB,MAAM,aAAa,GAAG,CAAC,CAAC;IACxB,MAAM,WAAW,GAAG,CAAC,CAAC;IACtB,MAAM,aAAa,GAAG,CAAC,CAAC;IACxB,MAAM,WAAW,GAAG,CAAC,CAAC;IAEtB,MAAM,kBAAkB,GAAG,CAAC,CAAC;IAC7B,MAAM,oBAAoB,GAAG,CAAC;;IClBvB,SAAU,SAAS,CAC/B,QAA8B,EAC9B,KAAc,EAAA;QAEd,MAAM,aAAa,GAAG,uBAAuB,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC;IAC3D,IAAA,IAAI,aAAa,KAAK,QAAQ,CAAC,MAAM;IAAE,QAAA,OAAO,QAAQ,CAAC;;;IAIvD,IAAA,IAAI,CAAC,KAAK;IAAE,QAAA,QAAQ,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC;QAExC,KAAK,IAAI,CAAC,GAAG,aAAa,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,GAAG,uBAAuB,CAAC,QAAQ,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE;IAC7F,QAAA,QAAQ,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;IAChD,KAAA;IACD,IAAA,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,SAAS,uBAAuB,CAAC,QAA8B,EAAE,KAAa,EAAA;IAC5E,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IAC5C,QAAA,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;IAAE,YAAA,OAAO,CAAC,CAAC;IACtC,KAAA;QACD,OAAO,QAAQ,CAAC,MAAM,CAAC;IACzB,CAAC;IAED,SAAS,QAAQ,CAAC,IAAwB,EAAA;IACxC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACpC,QAAA,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,EAAE;IACzC,YAAA,OAAO,KAAK,CAAC;IACd,SAAA;IACF,KAAA;IACD,IAAA,OAAO,IAAI,CAAC;IACd,CAAC;IAED,SAAS,YAAY,CAAC,IAAwB,EAAE,KAAc,EAAA;IAC5D,IAAA,IAAI,CAAC,KAAK;IAAE,QAAA,IAAI,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC;IAChC,IAAA,OAAO,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACnC,CAAC;IAED,SAAS,cAAc,CAAC,CAAmB,EAAE,CAAmB,EAAA;QAC9D,OAAO,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;IAC/B;;ICnCO,IAAI,KAAK,GAAG,KAAK,CAAC;IAEzB;;;;;;;;;;;;;;;IAeG;IACG,SAAU,YAAY,CAC1B,QAA+C,EAC/C,MAAc,EACd,GAAW,EACX,IAAY,EAAA;QAEZ,OAAO,GAAG,IAAI,IAAI,EAAE;IAClB,QAAA,MAAM,GAAG,GAAG,GAAG,IAAI,CAAC,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC;YACtC,MAAM,GAAG,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC;YAE3C,IAAI,GAAG,KAAK,CAAC,EAAE;gBACb,KAAK,GAAG,IAAI,CAAC;IACb,YAAA,OAAO,GAAG,CAAC;IACZ,SAAA;YAED,IAAI,GAAG,GAAG,CAAC,EAAE;IACX,YAAA,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;IACf,SAAA;IAAM,aAAA;IACL,YAAA,IAAI,GAAG,GAAG,GAAG,CAAC,CAAC;IAChB,SAAA;IACF,KAAA;QAED,KAAK,GAAG,KAAK,CAAC;QACd,OAAO,GAAG,GAAG,CAAC,CAAC;IACjB,CAAC;aAEe,UAAU,CACxB,QAA+C,EAC/C,MAAc,EACd,KAAa,EAAA;IAEb,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,KAAK,GAAG,CAAC,EAAE,EAAE;YACxD,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM;gBAAE,MAAM;IAC3C,KAAA;IACD,IAAA,OAAO,KAAK,CAAC;IACf,CAAC;aAEe,UAAU,CACxB,QAA+C,EAC/C,MAAc,EACd,KAAa,EAAA;IAEb,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,KAAK,GAAG,CAAC,EAAE,EAAE;YAC3C,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM;gBAAE,MAAM;IAC3C,KAAA;IACD,IAAA,OAAO,KAAK,CAAC;IACf,CAAC;aAEe,aAAa,GAAA;QAC3B,OAAO;YACL,OAAO,EAAE,CAAC,CAAC;YACX,UAAU,EAAE,CAAC,CAAC;YACd,SAAS,EAAE,CAAC,CAAC;SACd,CAAC;IACJ,CAAC;IAED;;;IAGG;IACG,SAAU,oBAAoB,CAClC,QAA+C,EAC/C,MAAc,EACd,KAAgB,EAChB,GAAW,EAAA;QAEX,MAAM,EAAE,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,GAAG,KAAK,CAAC;QAEjD,IAAI,GAAG,GAAG,CAAC,CAAC;IACZ,IAAA,IAAI,IAAI,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;QAC/B,IAAI,GAAG,KAAK,OAAO,EAAE;YACnB,IAAI,MAAM,KAAK,UAAU,EAAE;IACzB,YAAA,KAAK,GAAG,SAAS,KAAK,CAAC,CAAC,IAAI,QAAQ,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,KAAK,MAAM,CAAC;IACnE,YAAA,OAAO,SAAS,CAAC;IAClB,SAAA;YAED,IAAI,MAAM,IAAI,UAAU,EAAE;;IAExB,YAAA,GAAG,GAAG,SAAS,KAAK,CAAC,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACxC,SAAA;IAAM,aAAA;gBACL,IAAI,GAAG,SAAS,CAAC;IAClB,SAAA;IACF,KAAA;IACD,IAAA,KAAK,CAAC,OAAO,GAAG,GAAG,CAAC;IACpB,IAAA,KAAK,CAAC,UAAU,GAAG,MAAM,CAAC;IAE1B,IAAA,QAAQ,KAAK,CAAC,SAAS,GAAG,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IACvE;;ICvGA;IACA;IACc,SAAU,cAAc,CACpC,OAAsC,EACtC,KAAkB,EAAA;QAElB,MAAM,OAAO,GAAa,KAAK,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;IAEpD,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACvC,QAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;IACxB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACpC,YAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IACpB,YAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;IAE/B,YAAA,MAAM,WAAW,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;IACvC,YAAA,MAAM,UAAU,GAAG,GAAG,CAAC,WAAW,CAAC,CAAC;IACpC,YAAA,MAAM,YAAY,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;IACxC,YAAA,MAAM,cAAc,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;IAC5C,YAAA,MAAM,YAAY,IAAI,cAAc,CAAC,UAAU,CAAzB,KAAA,cAAc,CAAC,UAAU,CAAM,GAAA,EAAE,EAAC,CAAC;IACzD,YAAA,MAAM,IAAI,GAAG,KAAK,CAAC,WAAW,CAAC,CAAC;;;;;IAMhC,YAAA,MAAM,KAAK,GAAG,UAAU,CACtB,YAAY,EACZ,YAAY,EACZ,oBAAoB,CAAC,YAAY,EAAE,YAAY,EAAE,IAAI,EAAE,UAAU,CAAC,CACnE,CAAC;gBAEF,MAAM,CAAC,YAAY,GAAG,IAAI,CAAC,SAAS,GAAG,KAAK,GAAG,CAAC,GAAG,CAAC,YAAY,EAAE,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IACpF,SAAA;IACF,KAAA;IAED,IAAA,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,SAAS,MAAM,CAAI,KAAU,EAAE,KAAa,EAAE,KAAQ,EAAA;IACpD,IAAA,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;YACzC,KAAK,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;IACzB,KAAA;IACD,IAAA,KAAK,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;IAED;IACA;IACA;IACA;IACA;IACA,SAAS,cAAc,GAAA;IACrB,IAAA,OAAO,EAAE,SAAS,EAAE,IAAI,EAAO,CAAC;IAClC;;ACxCa,UAAA,MAAM,GAAW,UAAU,GAAG,EAAE,MAAM,EAAA;IACjD,IAAA,MAAM,MAAM,GACV,OAAO,GAAG,KAAK,QAAQ,GAAI,IAAI,CAAC,KAAK,CAAC,GAAG,CAA8C,GAAG,GAAG,CAAC;IAEhG,IAAA,IAAI,EAAE,UAAU,IAAI,MAAM,CAAC;IAAE,QAAA,OAAO,IAAI,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjE,MAAM,QAAQ,GAAyB,EAAE,CAAC;QAC1C,MAAM,OAAO,GAAa,EAAE,CAAC;QAC7B,MAAM,cAAc,GAAsB,EAAE,CAAC;QAC7C,MAAM,KAAK,GAAa,EAAE,CAAC;QAE3B,OAAO,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,QAAQ,EAAE,QAAQ,CAAC,CAAC;IAE5F,IAAA,MAAM,MAAM,GAAqB;IAC/B,QAAA,OAAO,EAAE,CAAC;YACV,IAAI,EAAE,MAAM,CAAC,IAAI;YACjB,KAAK;YACL,OAAO;YACP,cAAc;YACd,QAAQ;SACT,CAAC;IAEF,IAAA,OAAOC,2BAAmB,CAAC,MAAM,CAAC,CAAC;IACrC,EAAY;IAEZ,SAAS,OAAO,CACd,KAA6B,EAC7B,MAAiC,EACjC,QAA8B,EAC9B,OAAiB,EACjB,cAAiC,EACjC,KAAe,EACf,UAAkB,EAClB,YAAoB,EACpB,QAAgB,EAChB,UAAkB,EAAA;IAElB,IAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,KAAK,CAAC;IAC3B,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACxC,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;YAEpC,IAAI,EAAE,GAAG,QAAQ,CAAC;YAClB,IAAI,EAAE,GAAG,UAAU,CAAC;IACpB,QAAA,IAAI,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE;gBAC3B,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC;IAC1C,YAAA,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;gBAEtD,IAAI,EAAE,KAAK,QAAQ,EAAE;IACnB,gBAAA,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,YAAY,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC;IAC7D,aAAA;qBAAM,IAAI,EAAE,GAAG,QAAQ,EAAE;IACxB,gBAAA,EAAE,GAAG,YAAY,GAAG,UAAU,CAAC,MAAM,CAAC;IACvC,aAAA;IACF,SAAA;IAED,QAAA,UAAU,CACR,GAAG,EACH,MAAM,EACN,QAAQ,EACR,OAAO,EACP,cAAc,EACd,KAAK,EACL,UAAU,GAAG,MAAM,CAAC,IAAI,EACxB,YAAY,GAAG,MAAM,CAAC,MAAM,EAC5B,EAAE,EACF,EAAE,CACH,CAAC;IACH,KAAA;IACH,CAAC;IAED,SAAS,UAAU,CACjB,KAAyB,EACzB,MAAiC,EACjC,QAA8B,EAC9B,OAAiB,EACjB,cAAiC,EACjC,KAAe,EACf,UAAkB,EAClB,YAAoB,EACpB,QAAgB,EAChB,UAAkB,EAAA;QAElB,IAAI,UAAU,IAAI,KAAK;IAAE,QAAA,OAAO,OAAO,CAAC,GAAI,SAAmD,CAAC,CAAC;QAEjG,MAAM,GAAG,GAAG,IAAI,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;IACxC,IAAA,MAAM,aAAa,GAAG,OAAO,CAAC,MAAM,CAAC;IACrC,IAAA,MAAM,WAAW,GAAG,KAAK,CAAC,MAAM,CAAC;IACjC,IAAA,MAAM,OAAO,GAAGC,uBAAe,CAAC,GAAG,CAAC,CAAC;QACrC,MAAM,EAAE,eAAe,EAAE,cAAc,EAAE,QAAQ,EAAE,GAAG,GAAG,CAAC;IAE1D,IAAA,MAAM,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;IACjC,IAAA,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,CAAC;IACzB,IAAA,IAAI,QAAQ;IAAE,QAAA,MAAM,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;;IAC1C,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,MAAM,EAAE,CAAC,EAAE;IAAE,YAAA,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEhF,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACvC,QAAA,MAAM,KAAK,GAAG,UAAU,GAAG,CAAC,CAAC;;;;;YAM7B,IAAI,KAAK,GAAG,QAAQ;gBAAE,OAAO;;;YAI7B,MAAM,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;;;IAGrC,QAAA,MAAM,OAAO,GAAG,CAAC,KAAK,CAAC,GAAG,YAAY,GAAG,CAAC,CAAC;IAE3C,QAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;IACxB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACpC,YAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;gBACpB,MAAM,MAAM,GAAG,OAAO,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC;;;IAIrC,YAAA,IAAI,KAAK,KAAK,QAAQ,IAAI,MAAM,IAAI,UAAU;oBAAE,OAAO;IAEvD,YAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;IACpB,gBAAA,GAAG,CAAC,IAAI,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;oBACnB,SAAS;IACV,aAAA;gBAED,MAAM,YAAY,GAAG,aAAa,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;IACxD,YAAA,MAAM,UAAU,GAAG,GAAG,CAAC,WAAW,CAAC,CAAC;IACpC,YAAA,MAAM,YAAY,GAAG,GAAG,CAAC,aAAa,CAAC,CAAC;IACxC,YAAA,GAAG,CAAC,IAAI,CACN,GAAG,CAAC,MAAM,KAAK,CAAC;sBACZ,CAAC,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,CAAC;IAClD,kBAAE,CAAC,MAAM,EAAE,YAAY,EAAE,UAAU,EAAE,YAAY,EAAE,WAAW,GAAG,GAAG,CAAC,WAAW,CAAC,CAAC,CACrF,CAAC;IACH,SAAA;IACF,KAAA;IACH,CAAC;IAED,SAAS,MAAM,CAAI,GAAQ,EAAE,KAAU,EAAA;IACrC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;YAAE,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;IAC5D,CAAC;IAED,SAAS,OAAO,CAAI,GAAU,EAAE,KAAa,EAAA;IAC3C,IAAA,KAAK,IAAI,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,EAAE;IAAE,QAAA,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;IACtD,IAAA,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;IACpB;;IC7GA,MAAM,aAAa,GAAG,uDAAuD,CAAC;IAC9E,MAAM,eAAe,GAAG,yEAAyE,CAAC;AAErF,UAAA,iBAAiB,GAAG,CAAC,EAAE;AAC7B,UAAM,oBAAoB,GAAG,EAAE;IAEtC;;IAEG;AACQC,qCAAiE;IAE5E;;IAEG;AACQD,qCAA2E;IAEtF;;;IAGG;AACQE,kCAI4B;IAEvC;;;;IAIG;AACQC,yCAGmC;IAE9C;;IAEG;AACQC,0CAGqC;IAEhD;;IAEG;AACQC,8CAAsF;IAEjG;;IAEG;AACQC,iCAAyE;IAEpF;;IAEG;AACQC,sCAAmE;IAE9E;;;IAGG;AACQR,yCAA0E;IAErF;;;IAGG;AACQS,gCAE2E;IAEtF;;;IAGG;AACQC,gCAAgD;UAI9C,QAAQ,CAAA;QAiBnB,WAAY,CAAA,GAAmB,EAAE,MAAsB,EAAA;IACrD,QAAA,MAAM,QAAQ,GAAG,OAAO,GAAG,KAAK,QAAQ,CAAC;IAEzC,QAAA,IAAI,CAAC,QAAQ,IAAK,GAAwC,CAAC,YAAY;IAAE,YAAA,OAAO,GAAe,CAAC;IAEhG,QAAA,MAAM,MAAM,IAAI,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,GAAG,CAAwC,CAAC;IAEzF,QAAA,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,UAAU,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,MAAM,CAAC;IAC7E,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACvB,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACjB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACnB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;IAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACvB,QAAA,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC;IAErC,QAAA,MAAM,IAAI,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,EAAE,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC;YAC9D,IAAI,CAAC,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE,IAAI,CAAC,CAAC,CAAC;IAElE,QAAA,MAAM,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;IAC5B,QAAA,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;IAChC,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;IACzB,YAAA,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;IAC3B,SAAA;IAAM,aAAA;IACL,YAAA,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;gBAC1B,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;IAC/C,SAAA;IAED,QAAA,IAAI,CAAC,YAAY,GAAG,aAAa,EAAE,CAAC;IACpC,QAAA,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC5B,QAAA,IAAI,CAAC,cAAc,GAAG,SAAS,CAAC;SACjC;IAuLF,CAAA;IArLC,CAAA,MAAA;IACE,IAAAR,uBAAe,GAAG,CAAC,GAAG,KAAI;;IACxB,QAAA,cAAQ,GAAG,CAAC,QAAQ,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,IAAZ,GAAG,CAAC,QAAQ,GAAKS,qBAAM,CAAC,GAAG,CAAC,QAAS,CAAC,GAAE;IAClD,KAAC,CAAC;IAEF,IAAAV,uBAAe,GAAG,CAAC,GAAG,KAAI;IACxB,QAAA,QAAQ,GAAG,CAAC,QAAQ,KAAZ,GAAG,CAAC,QAAQ,GAAKW,qBAAM,CAAC,GAAG,CAAC,QAAS,CAAC,GAAE;IAClD,KAAC,CAAC;QAEFT,oBAAY,GAAG,CAAC,GAAG,EAAE,IAAI,EAAE,MAAM,KAAI;IACnC,QAAA,MAAM,OAAO,GAAGF,uBAAe,CAAC,GAAG,CAAC,CAAC;;;IAIrC,QAAA,IAAI,IAAI,IAAI,OAAO,CAAC,MAAM;IAAE,YAAA,OAAO,IAAI,CAAC;IAExC,QAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC/B,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAChC,QAAQ,EACR,GAAG,CAAC,YAAY,EAChB,IAAI,EACJ,MAAM,EACN,oBAAoB,CACrB,CAAC;IAEF,QAAA,OAAO,KAAK,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC/C,KAAC,CAAC;IAEF,IAAAG,2BAAmB,GAAG,CAAC,GAAG,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAI;IACpD,QAAA,IAAI,EAAE,CAAC;YACP,IAAI,IAAI,GAAG,CAAC;IAAE,YAAA,MAAM,IAAI,KAAK,CAAC,aAAa,CAAC,CAAC;YAC7C,IAAI,MAAM,GAAG,CAAC;IAAE,YAAA,MAAM,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC;IAEjD,QAAA,MAAM,OAAO,GAAGH,uBAAe,CAAC,GAAG,CAAC,CAAC;;;IAIrC,QAAA,IAAI,IAAI,IAAI,OAAO,CAAC,MAAM;gBAAE,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAEpE,QAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC/B,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAChC,QAAQ,EACR,GAAG,CAAC,YAAY,EAChB,IAAI,EACJ,MAAM,EACN,IAAI,IAAI,oBAAoB,CAC7B,CAAC;YAEF,IAAI,KAAK,KAAK,CAAC,CAAC;gBAAE,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAE1D,QAAA,MAAM,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;IAChC,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;gBAAE,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAElE,QAAA,MAAM,EAAE,KAAK,EAAE,eAAe,EAAE,GAAG,GAAG,CAAC;IACvC,QAAA,OAAO,QAAQ,CACb,eAAe,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,EACvC,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,EACxB,OAAO,CAAC,aAAa,CAAC,EACtB,OAAO,CAAC,MAAM,KAAK,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC,GAAG,IAAI,CAC1D,CAAC;IACJ,KAAC,CAAC;IAEF,IAAAK,gCAAwB,GAAG,CAAC,GAAG,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAI;;IAEjE,QAAA,OAAO,iBAAiB,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,IAAI,iBAAiB,EAAE,IAAI,CAAC,CAAC;IACvF,KAAC,CAAC;IAEF,IAAAD,4BAAoB,GAAG,CAAC,GAAG,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,KAAI;IAC7D,QAAA,OAAO,iBAAiB,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,IAAI,oBAAoB,EAAE,KAAK,CAAC,CAAC;IAC3F,KAAC,CAAC;IAEF,IAAAE,mBAAW,GAAG,CAAC,GAAG,EAAE,EAAE,KAAI;IACxB,QAAA,MAAM,OAAO,GAAGN,uBAAe,CAAC,GAAG,CAAC,CAAC;IACrC,QAAA,MAAM,EAAE,KAAK,EAAE,eAAe,EAAE,GAAG,GAAG,CAAC;IAEvC,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACvC,YAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;IACxB,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACpC,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IAEpB,gBAAA,MAAM,aAAa,GAAG,CAAC,GAAG,CAAC,CAAC;IAC5B,gBAAA,MAAM,eAAe,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;oBAC/B,IAAI,MAAM,GAAG,IAAI,CAAC;oBAClB,IAAI,YAAY,GAAG,IAAI,CAAC;oBACxB,IAAI,cAAc,GAAG,IAAI,CAAC;oBAC1B,IAAI,IAAI,GAAG,IAAI,CAAC;IAChB,gBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC,EAAE;wBACpB,MAAM,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACjC,oBAAA,YAAY,GAAG,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;IAC1B,oBAAA,cAAc,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;IACzB,iBAAA;IACD,gBAAA,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;wBAAE,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IAE3C,gBAAA,EAAE,CAAC;wBACD,aAAa;wBACb,eAAe;wBACf,MAAM;wBACN,YAAY;wBACZ,cAAc;wBACd,IAAI;IACU,iBAAA,CAAC,CAAC;IACnB,aAAA;IACF,SAAA;IACH,KAAC,CAAC;IAEF,IAAAO,wBAAgB,GAAG,CAAC,GAAG,EAAE,MAAM,KAAI;YACjC,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,GAAG,GAAG,CAAC;YACzD,IAAI,cAAc,IAAI,IAAI;IAAE,YAAA,OAAO,IAAI,CAAC;YAExC,IAAI,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACpC,IAAI,KAAK,KAAK,CAAC,CAAC;IAAE,YAAA,KAAK,GAAG,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;IAE1D,QAAA,OAAO,KAAK,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IACrD,KAAC,CAAC;IAEF,IAAAR,2BAAmB,GAAG,CAAC,GAAG,EAAE,MAAM,KAAI;IACpC,QAAA,MAAM,MAAM,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACpD,QAAA,MAAM,CAAC,QAAQ,GAAG,GAAG,CAAC,QAAQ,CAAC;IAC/B,QAAA,OAAO,MAAM,CAAC;IAChB,KAAC,CAAC;IAEF,IAAAS,kBAAU,GAAG,CAAC,GAAG,KAAI;YACnB,OAAO,KAAK,CAAC,GAAG,EAAER,uBAAe,CAAC,GAAG,CAAC,CAAC,CAAC;IAC1C,KAAC,CAAC;IAEF,IAAAS,kBAAU,GAAG,CAAC,GAAG,KAAI;YACnB,OAAO,KAAK,CAAC,GAAG,EAAER,uBAAe,CAAC,GAAG,CAAC,CAAC,CAAC;IAC1C,KAAC,CAAC;IAkBF,IAAA,SAAS,iBAAiB,CACxB,GAAa,EACb,MAAc,EACd,IAAY,EACZ,MAAc,EACd,IAAU,EACV,GAAY,EAAA;IAEZ,QAAA,IAAI,EAAE,CAAC;YACP,IAAI,IAAI,GAAG,CAAC;IAAE,YAAA,MAAM,IAAI,KAAK,CAAC,aAAa,CAAC,CAAC;YAC7C,IAAI,MAAM,GAAG,CAAC;IAAE,YAAA,MAAM,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC;IAEjD,QAAA,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,GAAG,CAAC;YACzC,IAAI,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC1C,IAAI,WAAW,KAAK,CAAC,CAAC;IAAE,YAAA,WAAW,GAAG,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACtE,IAAI,WAAW,KAAK,CAAC,CAAC;IAAE,YAAA,OAAO,GAAG,GAAG,EAAE,GAAG,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;IAE/D,QAAA,MAAM,SAAS,IAAI,GAAG,CAAC,UAAU,KAAd,GAAG,CAAC,UAAU,GAAK,cAAc,CAClDD,uBAAe,CAAC,GAAG,CAAC,GACnB,GAAG,CAAC,cAAc,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,EACjD,EAAC,CAAC;YAEH,MAAM,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,CAAC;YAC9C,IAAI,QAAQ,IAAI,IAAI;IAAE,YAAA,OAAO,GAAG,GAAG,EAAE,GAAG,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;YAE7D,MAAM,IAAI,GAAG,GAAG,CAAC,cAAe,CAAC,WAAW,CAAC,CAAC;IAE9C,QAAA,IAAI,GAAG;IAAE,YAAA,OAAO,uBAAuB,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;IAE5E,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;YACvE,IAAI,KAAK,KAAK,CAAC,CAAC;IAAE,YAAA,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;IAE9C,QAAA,MAAM,OAAO,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC;IAChC,QAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,kBAAkB,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,oBAAoB,CAAC,CAAC,CAAC;SACjF;IACH,CAAC,GAAA,CAAA;IAGH,SAAS,KAAK,CACZ,GAAmD,EACnD,QAAW,EAAA;QAEX,OAAO;YACL,OAAO,EAAE,GAAG,CAAC,OAAO;YACpB,IAAI,EAAE,GAAG,CAAC,IAAI;YACd,KAAK,EAAE,GAAG,CAAC,KAAK;YAChB,UAAU,EAAE,GAAG,CAAC,UAAU;YAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;YACpB,cAAc,EAAE,GAAG,CAAC,cAAc;YAClC,QAAQ;SACF,CAAC;IACX,CAAC;IASD,SAAS,QAAQ,CACf,MAAqB,EACrB,IAAmB,EACnB,MAAqB,EACrB,IAAmB,EAAA;QAEnB,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAS,CAAC;IAC/C,CAAC;IAID,SAAS,QAAQ,CACf,IAAmB,EACnB,MAAqB,EAAA;IAErB,IAAA,OAAO,EAAE,IAAI,EAAE,MAAM,EAAS,CAAC;IACjC,CAAC;IAgBD,SAAS,oBAAoB,CAC3B,QAA+C,EAC/C,IAAe,EACf,IAAY,EACZ,MAAc,EACd,IAAU,EAAA;IAEV,IAAA,IAAI,KAAK,GAAG,oBAAoB,CAAC,QAAQ,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IAC/D,IAAA,IAAIY,KAAO,EAAE;YACX,KAAK,GAAG,CAAC,IAAI,KAAK,iBAAiB,GAAG,UAAU,GAAG,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,KAAK,CAAC,CAAC;IACzF,KAAA;aAAM,IAAI,IAAI,KAAK,iBAAiB;IAAE,QAAA,KAAK,EAAE,CAAC;QAE/C,IAAI,KAAK,KAAK,CAAC,CAAC,IAAI,KAAK,KAAK,QAAQ,CAAC,MAAM;YAAE,OAAO,CAAC,CAAC,CAAC;IACzD,IAAA,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,uBAAuB,CAC9B,QAA0B,EAC1B,IAAe,EACf,IAAY,EACZ,MAAc,EACd,IAAU,EAAA;IAEV,IAAA,IAAI,GAAG,GAAG,oBAAoB,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,oBAAoB,CAAC,CAAC;;;;;;;IAQnF,IAAA,IAAI,CAACA,KAAO,IAAI,IAAI,KAAK,iBAAiB;IAAE,QAAA,GAAG,EAAE,CAAC;QAElD,IAAI,GAAG,KAAK,CAAC,CAAC,IAAI,GAAG,KAAK,QAAQ,CAAC,MAAM;IAAE,QAAA,OAAO,EAAE,CAAC;;;;IAKrD,IAAA,MAAM,aAAa,GAAGA,KAAO,GAAG,MAAM,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC;;IAG/D,IAAA,IAAI,CAACA,KAAO;YAAE,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,CAAC,CAAC;QAC7D,MAAM,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,aAAa,EAAE,GAAG,CAAC,CAAC;QAErD,MAAM,MAAM,GAAG,EAAE,CAAC;IAClB,IAAA,OAAO,GAAG,IAAI,GAAG,EAAE,GAAG,EAAE,EAAE;IACxB,QAAA,MAAM,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC;IAC9B,QAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,kBAAkB,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC;IACvF,KAAA;IACD,IAAA,OAAO,MAAM,CAAC;IAChB;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts new file mode 100644 index 0000000..08bca6b --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts @@ -0,0 +1,8 @@ +import { TraceMap } from './trace-mapping'; +import type { SectionedSourceMapInput } from './types'; +declare type AnyMap = { + new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap; + (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap; +}; +export declare const AnyMap: AnyMap; +export {}; diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts new file mode 100644 index 0000000..88820e5 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts @@ -0,0 +1,32 @@ +import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment'; +export declare type MemoState = { + lastKey: number; + lastNeedle: number; + lastIndex: number; +}; +export declare let found: boolean; +/** + * A binary search implementation that returns the index if a match is found. + * If no match is found, then the left-index (the index associated with the item that comes just + * before the desired index) is returned. To maintain proper sort order, a splice would happen at + * the next index: + * + * ```js + * const array = [1, 3]; + * const needle = 2; + * const index = binarySearch(array, needle, (item, needle) => item - needle); + * + * assert.equal(index, 0); + * array.splice(index + 1, 0, needle); + * assert.deepEqual(array, [1, 2, 3]); + * ``` + */ +export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number; +export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number; +export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number; +export declare function memoizedState(): MemoState; +/** + * This overly complicated beast is just to record the last tested line/column and the resulting + * index, allowing us to skip a few tests if mappings are monotonically increasing. + */ +export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number; diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts new file mode 100644 index 0000000..8d1e538 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts @@ -0,0 +1,7 @@ +import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment'; +import type { MemoState } from './binary-search'; +export declare type Source = { + __proto__: null; + [line: number]: Exclude[]; +}; +export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[]; diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts new file mode 100644 index 0000000..cf7d4f8 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts @@ -0,0 +1 @@ +export default function resolve(input: string, base: string | undefined): string; diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts new file mode 100644 index 0000000..2bfb5dc --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts @@ -0,0 +1,2 @@ +import type { SourceMapSegment } from './sourcemap-segment'; +export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][]; diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts new file mode 100644 index 0000000..6d70924 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts @@ -0,0 +1,16 @@ +declare type GeneratedColumn = number; +declare type SourcesIndex = number; +declare type SourceLine = number; +declare type SourceColumn = number; +declare type NamesIndex = number; +declare type GeneratedLine = number; +export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex]; +export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn]; +export declare const COLUMN = 0; +export declare const SOURCES_INDEX = 1; +export declare const SOURCE_LINE = 2; +export declare const SOURCE_COLUMN = 3; +export declare const NAMES_INDEX = 4; +export declare const REV_GENERATED_LINE = 1; +export declare const REV_GENERATED_COLUMN = 2; +export {}; diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts new file mode 100644 index 0000000..bead5c1 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts @@ -0,0 +1,4 @@ +/** + * Removes everything after the last "/", but leaves the slash. + */ +export default function stripFilename(path: string | undefined | null): string; diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts new file mode 100644 index 0000000..c125ead --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts @@ -0,0 +1,74 @@ +import type { SourceMapSegment } from './sourcemap-segment'; +import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types'; +export type { SourceMapSegment } from './sourcemap-segment'; +export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types'; +export declare const LEAST_UPPER_BOUND = -1; +export declare const GREATEST_LOWER_BOUND = 1; +/** + * Returns the encoded (VLQ string) form of the SourceMap's mappings field. + */ +export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings']; +/** + * Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. + */ +export declare let decodedMappings: (map: TraceMap) => Readonly; +/** + * A low-level API to find the segment associated with a generated line/column (think, from a + * stack trace). Line and column here are 0-based, unlike `originalPositionFor`. + */ +export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly | null; +/** + * A higher-level API to find the source/line/column associated with a generated line/column + * (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in + * `source-map` library. + */ +export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping; +/** + * Finds the generated line/column position of the provided source/line/column source position. + */ +export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping; +/** + * Finds all generated line/column positions of the provided source/line/column source position. + */ +export declare let allGeneratedPositionsFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping[]; +/** + * Iterates each mapping in generated position order. + */ +export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void; +/** + * Retrieves the source content for a particular source, if its found. Returns null if not. + */ +export declare let sourceContentFor: (map: TraceMap, source: string) => string | null; +/** + * A helper that skips sorting of the input map's mappings array, which can be expensive for larger + * maps. + */ +export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap; +/** + * Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +export declare let decodedMap: (map: TraceMap) => Omit & { + mappings: readonly SourceMapSegment[][]; +}; +/** + * Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects + * a sourcemap, or to JSON.stringify. + */ +export declare let encodedMap: (map: TraceMap) => EncodedSourceMap; +export { AnyMap } from './any-map'; +export declare class TraceMap implements SourceMap { + version: SourceMapV3['version']; + file: SourceMapV3['file']; + names: SourceMapV3['names']; + sourceRoot: SourceMapV3['sourceRoot']; + sources: SourceMapV3['sources']; + sourcesContent: SourceMapV3['sourcesContent']; + resolvedSources: string[]; + private _encoded; + private _decoded; + private _decodedMemo; + private _bySources; + private _bySourceMemos; + constructor(map: SourceMapInput, mapUrl?: string | null); +} diff --git a/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts b/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts new file mode 100644 index 0000000..2f4fd45 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts @@ -0,0 +1,92 @@ +import type { SourceMapSegment } from './sourcemap-segment'; +import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping'; +export interface SourceMapV3 { + file?: string | null; + names: string[]; + sourceRoot?: string; + sources: (string | null)[]; + sourcesContent?: (string | null)[]; + version: 3; +} +export interface EncodedSourceMap extends SourceMapV3 { + mappings: string; +} +export interface DecodedSourceMap extends SourceMapV3 { + mappings: SourceMapSegment[][]; +} +export interface Section { + offset: { + line: number; + column: number; + }; + map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap; +} +export interface SectionedSourceMap { + file?: string | null; + sections: Section[]; + version: 3; +} +export declare type OriginalMapping = { + source: string | null; + line: number; + column: number; + name: string | null; +}; +export declare type InvalidOriginalMapping = { + source: null; + line: null; + column: null; + name: null; +}; +export declare type GeneratedMapping = { + line: number; + column: number; +}; +export declare type InvalidGeneratedMapping = { + line: null; + column: null; +}; +export declare type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND; +export declare type SourceMapInput = string | Ro | Ro | TraceMap; +export declare type SectionedSourceMapInput = SourceMapInput | Ro; +export declare type Needle = { + line: number; + column: number; + bias?: Bias; +}; +export declare type SourceNeedle = { + source: string; + line: number; + column: number; + bias?: Bias; +}; +export declare type EachMapping = { + generatedLine: number; + generatedColumn: number; + source: null; + originalLine: null; + originalColumn: null; + name: null; +} | { + generatedLine: number; + generatedColumn: number; + source: string | null; + originalLine: number; + originalColumn: number; + name: string | null; +}; +export declare abstract class SourceMap { + version: SourceMapV3['version']; + file: SourceMapV3['file']; + names: SourceMapV3['names']; + sourceRoot: SourceMapV3['sourceRoot']; + sources: SourceMapV3['sources']; + sourcesContent: SourceMapV3['sourcesContent']; + resolvedSources: SourceMapV3['sources']; +} +export declare type Ro = T extends Array ? V[] | Readonly | RoArray | Readonly> : T extends object ? T | Readonly | RoObject | Readonly> : T; +declare type RoArray = Ro[]; +declare type RoObject = { + [K in keyof T]: T[K] | Ro; +}; +export {}; diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/LICENSE b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/LICENSE new file mode 100644 index 0000000..a331065 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2015 Rich Harris + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/README.md b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/README.md new file mode 100644 index 0000000..2b9e397 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/README.md @@ -0,0 +1,200 @@ +# sourcemap-codec + +Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit). + + +## Why? + +Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap. + +This package makes the process slightly easier. + + +## Installation + +```bash +npm install sourcemap-codec +``` + + +## Usage + +```js +import { encode, decode } from 'sourcemap-codec'; + +var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' ); + +assert.deepEqual( decoded, [ + // the first line (of the generated code) has no mappings, + // as shown by the starting semi-colon (which separates lines) + [], + + // the second line contains four (comma-separated) segments + [ + // segments are encoded as you'd expect: + // [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ] + + // i.e. the first segment begins at column 2, and maps back to the second column + // of the second line (both zero-based) of the 0th source, and uses the 0th + // name in the `map.names` array + [ 2, 0, 2, 2, 0 ], + + // the remaining segments are 4-length rather than 5-length, + // because they don't map a name + [ 4, 0, 2, 4 ], + [ 6, 0, 2, 5 ], + [ 7, 0, 2, 7 ] + ], + + // the final line contains two segments + [ + [ 2, 1, 10, 19 ], + [ 12, 1, 11, 20 ] + ] +]); + +var encoded = encode( decoded ); +assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' ); +``` + +## Benchmarks + +``` +node v18.0.0 + +amp.js.map - 45120 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 5479160 bytes +sourcemap-codec 5659336 bytes +source-map-0.6.1 17144440 bytes +source-map-0.8.0 6867424 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled) +decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled) +decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled) +decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled) +Fastest is decode: @jridgewell/sourcemap-codec + +Encode Memory Usage: +@jridgewell/sourcemap-codec 1261620 bytes +sourcemap-codec 9119248 bytes +source-map-0.6.1 8968560 bytes +source-map-0.8.0 8952952 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled) +encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled) +encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled) +encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec + + +*** + + +babel.min.js.map - 347793 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 35338184 bytes +sourcemap-codec 35922736 bytes +source-map-0.6.1 62366360 bytes +source-map-0.8.0 44337416 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled) +decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled) +decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled) +decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled) +Fastest is decode: source-map-0.8.0 + +Encode Memory Usage: +@jridgewell/sourcemap-codec 7212604 bytes +sourcemap-codec 21421456 bytes +source-map-0.6.1 25286888 bytes +source-map-0.8.0 25498744 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled) +encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled) +encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled) +encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec + + +*** + + +preact.js.map - 1992 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 500272 bytes +sourcemap-codec 516864 bytes +source-map-0.6.1 1596672 bytes +source-map-0.8.0 517272 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled) +decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled) +decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled) +decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled) +Fastest is decode: @jridgewell/sourcemap-codec + +Encode Memory Usage: +@jridgewell/sourcemap-codec 321026 bytes +sourcemap-codec 830832 bytes +source-map-0.6.1 586608 bytes +source-map-0.8.0 586680 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled) +encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled) +encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled) +encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec + + +*** + + +react.js.map - 5726 segments + +Decode Memory Usage: +@jridgewell/sourcemap-codec 734848 bytes +sourcemap-codec 954200 bytes +source-map-0.6.1 2276432 bytes +source-map-0.8.0 955488 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Decode speed: +decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled) +decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled) +decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled) +decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled) +Fastest is decode: @jridgewell/sourcemap-codec + +Encode Memory Usage: +@jridgewell/sourcemap-codec 638672 bytes +sourcemap-codec 1109840 bytes +source-map-0.6.1 1321224 bytes +source-map-0.8.0 1324448 bytes +Smallest memory usage is @jridgewell/sourcemap-codec + +Encode speed: +encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled) +encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled) +encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled) +encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled) +Fastest is encode: @jridgewell/sourcemap-codec +``` + +# License + +MIT diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs new file mode 100644 index 0000000..3dff372 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs @@ -0,0 +1,164 @@ +const comma = ','.charCodeAt(0); +const semicolon = ';'.charCodeAt(0); +const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; +const intToChar = new Uint8Array(64); // 64 possible chars. +const charToInt = new Uint8Array(128); // z is 122 in ASCII +for (let i = 0; i < chars.length; i++) { + const c = chars.charCodeAt(i); + intToChar[i] = c; + charToInt[c] = i; +} +// Provide a fallback for older environments. +const td = typeof TextDecoder !== 'undefined' + ? /* #__PURE__ */ new TextDecoder() + : typeof Buffer !== 'undefined' + ? { + decode(buf) { + const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + return out.toString(); + }, + } + : { + decode(buf) { + let out = ''; + for (let i = 0; i < buf.length; i++) { + out += String.fromCharCode(buf[i]); + } + return out; + }, + }; +function decode(mappings) { + const state = new Int32Array(5); + const decoded = []; + let index = 0; + do { + const semi = indexOf(mappings, index); + const line = []; + let sorted = true; + let lastCol = 0; + state[0] = 0; + for (let i = index; i < semi; i++) { + let seg; + i = decodeInteger(mappings, i, state, 0); // genColumn + const col = state[0]; + if (col < lastCol) + sorted = false; + lastCol = col; + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 1); // sourcesIndex + i = decodeInteger(mappings, i, state, 2); // sourceLine + i = decodeInteger(mappings, i, state, 3); // sourceColumn + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 4); // namesIndex + seg = [col, state[1], state[2], state[3], state[4]]; + } + else { + seg = [col, state[1], state[2], state[3]]; + } + } + else { + seg = [col]; + } + line.push(seg); + } + if (!sorted) + sort(line); + decoded.push(line); + index = semi + 1; + } while (index <= mappings.length); + return decoded; +} +function indexOf(mappings, index) { + const idx = mappings.indexOf(';', index); + return idx === -1 ? mappings.length : idx; +} +function decodeInteger(mappings, pos, state, j) { + let value = 0; + let shift = 0; + let integer = 0; + do { + const c = mappings.charCodeAt(pos++); + integer = charToInt[c]; + value |= (integer & 31) << shift; + shift += 5; + } while (integer & 32); + const shouldNegate = value & 1; + value >>>= 1; + if (shouldNegate) { + value = -0x80000000 | -value; + } + state[j] += value; + return pos; +} +function hasMoreVlq(mappings, i, length) { + if (i >= length) + return false; + return mappings.charCodeAt(i) !== comma; +} +function sort(line) { + line.sort(sortComparator); +} +function sortComparator(a, b) { + return a[0] - b[0]; +} +function encode(decoded) { + const state = new Int32Array(5); + const bufLength = 1024 * 16; + const subLength = bufLength - 36; + const buf = new Uint8Array(bufLength); + const sub = buf.subarray(0, subLength); + let pos = 0; + let out = ''; + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + if (i > 0) { + if (pos === bufLength) { + out += td.decode(buf); + pos = 0; + } + buf[pos++] = semicolon; + } + if (line.length === 0) + continue; + state[0] = 0; + for (let j = 0; j < line.length; j++) { + const segment = line[j]; + // We can push up to 5 ints, each int can take at most 7 chars, and we + // may push a comma. + if (pos > subLength) { + out += td.decode(sub); + buf.copyWithin(0, subLength, pos); + pos -= subLength; + } + if (j > 0) + buf[pos++] = comma; + pos = encodeInteger(buf, pos, state, segment, 0); // genColumn + if (segment.length === 1) + continue; + pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex + pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine + pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn + if (segment.length === 4) + continue; + pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex + } + } + return out + td.decode(buf.subarray(0, pos)); +} +function encodeInteger(buf, pos, state, segment, j) { + const next = segment[j]; + let num = next - state[j]; + state[j] = next; + num = num < 0 ? (-num << 1) | 1 : num << 1; + do { + let clamped = num & 0b011111; + num >>>= 5; + if (num > 0) + clamped |= 0b100000; + buf[pos++] = intToChar[clamped]; + } while (num > 0); + return pos; +} + +export { decode, encode }; +//# sourceMappingURL=sourcemap-codec.mjs.map diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map new file mode 100644 index 0000000..36d7249 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"sourcemap-codec.mjs","sources":["../src/sourcemap-codec.ts"],"sourcesContent":[null],"names":[],"mappings":"AAOA,MAAM,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AAChC,MAAM,SAAS,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACpC,MAAM,KAAK,GAAG,kEAAkE,CAAC;AACjF,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;AACrC,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC;AAEtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;IACrC,MAAM,CAAC,GAAG,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAC9B,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;IACjB,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;CAClB;AAED;AACA,MAAM,EAAE,GACN,OAAO,WAAW,KAAK,WAAW;sBACd,IAAI,WAAW,EAAE;MACjC,OAAO,MAAM,KAAK,WAAW;UAC7B;YACE,MAAM,CAAC,GAAe;gBACpB,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,UAAU,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;gBACpE,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;aACvB;SACF;UACD;YACE,MAAM,CAAC,GAAe;gBACpB,IAAI,GAAG,GAAG,EAAE,CAAC;gBACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;oBACnC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;iBACpC;gBACD,OAAO,GAAG,CAAC;aACZ;SACF,CAAC;SAEQ,MAAM,CAAC,QAAgB;IACrC,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;IACjF,MAAM,OAAO,GAAsB,EAAE,CAAC;IAEtC,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,GAAG;QACD,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QACtC,MAAM,IAAI,GAAkB,EAAE,CAAC;QAC/B,IAAI,MAAM,GAAG,IAAI,CAAC;QAClB,IAAI,OAAO,GAAG,CAAC,CAAC;QAChB,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QAEb,KAAK,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;YACjC,IAAI,GAAqB,CAAC;YAE1B,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;YACzC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACrB,IAAI,GAAG,GAAG,OAAO;gBAAE,MAAM,GAAG,KAAK,CAAC;YAClC,OAAO,GAAG,GAAG,CAAC;YAEd,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;gBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBAEzC,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;oBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBACzC,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;iBACrD;qBAAM;oBACL,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;iBAC3C;aACF;iBAAM;gBACL,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;aACb;YAED,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAChB;QAED,IAAI,CAAC,MAAM;YAAE,IAAI,CAAC,IAAI,CAAC,CAAC;QACxB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACnB,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC;KAClB,QAAQ,KAAK,IAAI,QAAQ,CAAC,MAAM,EAAE;IAEnC,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,OAAO,CAAC,QAAgB,EAAE,KAAa;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IACzC,OAAO,GAAG,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;AAC5C,CAAC;AAED,SAAS,aAAa,CAAC,QAAgB,EAAE,GAAW,EAAE,KAAuB,EAAE,CAAS;IACtF,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,KAAK,GAAG,CAAC,CAAC;IACd,IAAI,OAAO,GAAG,CAAC,CAAC;IAEhB,GAAG;QACD,MAAM,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,CAAC;QACrC,OAAO,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;QACvB,KAAK,IAAI,CAAC,OAAO,GAAG,EAAE,KAAK,KAAK,CAAC;QACjC,KAAK,IAAI,CAAC,CAAC;KACZ,QAAQ,OAAO,GAAG,EAAE,EAAE;IAEvB,MAAM,YAAY,GAAG,KAAK,GAAG,CAAC,CAAC;IAC/B,KAAK,MAAM,CAAC,CAAC;IAEb,IAAI,YAAY,EAAE;QAChB,KAAK,GAAG,CAAC,UAAU,GAAG,CAAC,KAAK,CAAC;KAC9B;IAED,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;IAClB,OAAO,GAAG,CAAC;AACb,CAAC;AAED,SAAS,UAAU,CAAC,QAAgB,EAAE,CAAS,EAAE,MAAc;IAC7D,IAAI,CAAC,IAAI,MAAM;QAAE,OAAO,KAAK,CAAC;IAC9B,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC;AAC1C,CAAC;AAED,SAAS,IAAI,CAAC,IAAwB;IACpC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,cAAc,CAAC,CAAmB,EAAE,CAAmB;IAC9D,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACrB,CAAC;SAIe,MAAM,CAAC,OAAoC;IACzD,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;IACjF,MAAM,SAAS,GAAG,IAAI,GAAG,EAAE,CAAC;IAC5B,MAAM,SAAS,GAAG,SAAS,GAAG,EAAE,CAAC;IACjC,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAC;IACtC,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;IACvC,IAAI,GAAG,GAAG,CAAC,CAAC;IACZ,IAAI,GAAG,GAAG,EAAE,CAAC;IAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACvC,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACxB,IAAI,CAAC,GAAG,CAAC,EAAE;YACT,IAAI,GAAG,KAAK,SAAS,EAAE;gBACrB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;gBACtB,GAAG,GAAG,CAAC,CAAC;aACT;YACD,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC;SACxB;QACD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YAAE,SAAS;QAEhC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACpC,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;;;YAGxB,IAAI,GAAG,GAAG,SAAS,EAAE;gBACnB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;gBACtB,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;gBAClC,GAAG,IAAI,SAAS,CAAC;aAClB;YACD,IAAI,CAAC,GAAG,CAAC;gBAAE,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,KAAK,CAAC;YAE9B,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;YAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;SAClD;KACF;IAED,OAAO,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC;AAC/C,CAAC;AAED,SAAS,aAAa,CACpB,GAAe,EACf,GAAW,EACX,KAAuB,EACvB,OAAyB,EACzB,CAAS;IAET,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAC1B,KAAK,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;IAEhB,GAAG,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC;IAC3C,GAAG;QACD,IAAI,OAAO,GAAG,GAAG,GAAG,QAAQ,CAAC;QAC7B,GAAG,MAAM,CAAC,CAAC;QACX,IAAI,GAAG,GAAG,CAAC;YAAE,OAAO,IAAI,QAAQ,CAAC;QACjC,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;KACjC,QAAQ,GAAG,GAAG,CAAC,EAAE;IAElB,OAAO,GAAG,CAAC;AACb;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js new file mode 100644 index 0000000..bec92a9 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js @@ -0,0 +1,175 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {})); +})(this, (function (exports) { 'use strict'; + + const comma = ','.charCodeAt(0); + const semicolon = ';'.charCodeAt(0); + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; + const intToChar = new Uint8Array(64); // 64 possible chars. + const charToInt = new Uint8Array(128); // z is 122 in ASCII + for (let i = 0; i < chars.length; i++) { + const c = chars.charCodeAt(i); + intToChar[i] = c; + charToInt[c] = i; + } + // Provide a fallback for older environments. + const td = typeof TextDecoder !== 'undefined' + ? /* #__PURE__ */ new TextDecoder() + : typeof Buffer !== 'undefined' + ? { + decode(buf) { + const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + return out.toString(); + }, + } + : { + decode(buf) { + let out = ''; + for (let i = 0; i < buf.length; i++) { + out += String.fromCharCode(buf[i]); + } + return out; + }, + }; + function decode(mappings) { + const state = new Int32Array(5); + const decoded = []; + let index = 0; + do { + const semi = indexOf(mappings, index); + const line = []; + let sorted = true; + let lastCol = 0; + state[0] = 0; + for (let i = index; i < semi; i++) { + let seg; + i = decodeInteger(mappings, i, state, 0); // genColumn + const col = state[0]; + if (col < lastCol) + sorted = false; + lastCol = col; + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 1); // sourcesIndex + i = decodeInteger(mappings, i, state, 2); // sourceLine + i = decodeInteger(mappings, i, state, 3); // sourceColumn + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 4); // namesIndex + seg = [col, state[1], state[2], state[3], state[4]]; + } + else { + seg = [col, state[1], state[2], state[3]]; + } + } + else { + seg = [col]; + } + line.push(seg); + } + if (!sorted) + sort(line); + decoded.push(line); + index = semi + 1; + } while (index <= mappings.length); + return decoded; + } + function indexOf(mappings, index) { + const idx = mappings.indexOf(';', index); + return idx === -1 ? mappings.length : idx; + } + function decodeInteger(mappings, pos, state, j) { + let value = 0; + let shift = 0; + let integer = 0; + do { + const c = mappings.charCodeAt(pos++); + integer = charToInt[c]; + value |= (integer & 31) << shift; + shift += 5; + } while (integer & 32); + const shouldNegate = value & 1; + value >>>= 1; + if (shouldNegate) { + value = -0x80000000 | -value; + } + state[j] += value; + return pos; + } + function hasMoreVlq(mappings, i, length) { + if (i >= length) + return false; + return mappings.charCodeAt(i) !== comma; + } + function sort(line) { + line.sort(sortComparator); + } + function sortComparator(a, b) { + return a[0] - b[0]; + } + function encode(decoded) { + const state = new Int32Array(5); + const bufLength = 1024 * 16; + const subLength = bufLength - 36; + const buf = new Uint8Array(bufLength); + const sub = buf.subarray(0, subLength); + let pos = 0; + let out = ''; + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + if (i > 0) { + if (pos === bufLength) { + out += td.decode(buf); + pos = 0; + } + buf[pos++] = semicolon; + } + if (line.length === 0) + continue; + state[0] = 0; + for (let j = 0; j < line.length; j++) { + const segment = line[j]; + // We can push up to 5 ints, each int can take at most 7 chars, and we + // may push a comma. + if (pos > subLength) { + out += td.decode(sub); + buf.copyWithin(0, subLength, pos); + pos -= subLength; + } + if (j > 0) + buf[pos++] = comma; + pos = encodeInteger(buf, pos, state, segment, 0); // genColumn + if (segment.length === 1) + continue; + pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex + pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine + pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn + if (segment.length === 4) + continue; + pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex + } + } + return out + td.decode(buf.subarray(0, pos)); + } + function encodeInteger(buf, pos, state, segment, j) { + const next = segment[j]; + let num = next - state[j]; + state[j] = next; + num = num < 0 ? (-num << 1) | 1 : num << 1; + do { + let clamped = num & 0b011111; + num >>>= 5; + if (num > 0) + clamped |= 0b100000; + buf[pos++] = intToChar[clamped]; + } while (num > 0); + return pos; + } + + exports.decode = decode; + exports.encode = encode; + + Object.defineProperty(exports, '__esModule', { value: true }); + +})); +//# sourceMappingURL=sourcemap-codec.umd.js.map diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map new file mode 100644 index 0000000..a7a4628 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sourcemap-codec.umd.js","sources":["../src/sourcemap-codec.ts"],"sourcesContent":[null],"names":[],"mappings":";;;;;;IAOA,MAAM,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChC,MAAM,SAAS,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACpC,MAAM,KAAK,GAAG,kEAAkE,CAAC;IACjF,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;IACrC,MAAM,SAAS,GAAG,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC;IAEtC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,MAAM,CAAC,GAAG,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAC9B,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;KAClB;IAED;IACA,MAAM,EAAE,GACN,OAAO,WAAW,KAAK,WAAW;0BACd,IAAI,WAAW,EAAE;UACjC,OAAO,MAAM,KAAK,WAAW;cAC7B;gBACE,MAAM,CAAC,GAAe;oBACpB,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,UAAU,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;oBACpE,OAAO,GAAG,CAAC,QAAQ,EAAE,CAAC;iBACvB;aACF;cACD;gBACE,MAAM,CAAC,GAAe;oBACpB,IAAI,GAAG,GAAG,EAAE,CAAC;oBACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACnC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;qBACpC;oBACD,OAAO,GAAG,CAAC;iBACZ;aACF,CAAC;aAEQ,MAAM,CAAC,QAAgB;QACrC,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;QACjF,MAAM,OAAO,GAAsB,EAAE,CAAC;QAEtC,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,GAAG;YACD,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;YACtC,MAAM,IAAI,GAAkB,EAAE,CAAC;YAC/B,IAAI,MAAM,GAAG,IAAI,CAAC;YAClB,IAAI,OAAO,GAAG,CAAC,CAAC;YAChB,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YAEb,KAAK,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;gBACjC,IAAI,GAAqB,CAAC;gBAE1B,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;gBACzC,MAAM,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;gBACrB,IAAI,GAAG,GAAG,OAAO;oBAAE,MAAM,GAAG,KAAK,CAAC;gBAClC,OAAO,GAAG,GAAG,CAAC;gBAEd,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;oBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBACzC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;oBAEzC,IAAI,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;wBACjC,CAAC,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;wBACzC,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;qBACrD;yBAAM;wBACL,GAAG,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;qBAC3C;iBACF;qBAAM;oBACL,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;iBACb;gBAED,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAChB;YAED,IAAI,CAAC,MAAM;gBAAE,IAAI,CAAC,IAAI,CAAC,CAAC;YACxB,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACnB,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC;SAClB,QAAQ,KAAK,IAAI,QAAQ,CAAC,MAAM,EAAE;QAEnC,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,SAAS,OAAO,CAAC,QAAgB,EAAE,KAAa;QAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACzC,OAAO,GAAG,KAAK,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;IAC5C,CAAC;IAED,SAAS,aAAa,CAAC,QAAgB,EAAE,GAAW,EAAE,KAAuB,EAAE,CAAS;QACtF,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,CAAC,CAAC;QAEhB,GAAG;YACD,MAAM,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,CAAC;YACrC,OAAO,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;YACvB,KAAK,IAAI,CAAC,OAAO,GAAG,EAAE,KAAK,KAAK,CAAC;YACjC,KAAK,IAAI,CAAC,CAAC;SACZ,QAAQ,OAAO,GAAG,EAAE,EAAE;QAEvB,MAAM,YAAY,GAAG,KAAK,GAAG,CAAC,CAAC;QAC/B,KAAK,MAAM,CAAC,CAAC;QAEb,IAAI,YAAY,EAAE;YAChB,KAAK,GAAG,CAAC,UAAU,GAAG,CAAC,KAAK,CAAC;SAC9B;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC;QAClB,OAAO,GAAG,CAAC;IACb,CAAC;IAED,SAAS,UAAU,CAAC,QAAgB,EAAE,CAAS,EAAE,MAAc;QAC7D,IAAI,CAAC,IAAI,MAAM;YAAE,OAAO,KAAK,CAAC;QAC9B,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC;IAC1C,CAAC;IAED,SAAS,IAAI,CAAC,IAAwB;QACpC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,cAAc,CAAC,CAAmB,EAAE,CAAmB;QAC9D,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACrB,CAAC;aAIe,MAAM,CAAC,OAAoC;QACzD,MAAM,KAAK,GAA6C,IAAI,UAAU,CAAC,CAAC,CAAQ,CAAC;QACjF,MAAM,SAAS,GAAG,IAAI,GAAG,EAAE,CAAC;QAC5B,MAAM,SAAS,GAAG,SAAS,GAAG,EAAE,CAAC;QACjC,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAC;QACtC,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC;QACvC,IAAI,GAAG,GAAG,CAAC,CAAC;QACZ,IAAI,GAAG,GAAG,EAAE,CAAC;QAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YACxB,IAAI,CAAC,GAAG,CAAC,EAAE;gBACT,IAAI,GAAG,KAAK,SAAS,EAAE;oBACrB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;oBACtB,GAAG,GAAG,CAAC,CAAC;iBACT;gBACD,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC;aACxB;YACD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;gBAAE,SAAS;YAEhC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YAEb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACpC,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;;;gBAGxB,IAAI,GAAG,GAAG,SAAS,EAAE;oBACnB,GAAG,IAAI,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;oBACtB,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;oBAClC,GAAG,IAAI,SAAS,CAAC;iBAClB;gBACD,IAAI,CAAC,GAAG,CAAC;oBAAE,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,KAAK,CAAC;gBAE9B,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBACjD,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;gBAEjD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACnC,GAAG,GAAG,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC;aAClD;SACF;QAED,OAAO,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,SAAS,aAAa,CACpB,GAAe,EACf,GAAW,EACX,KAAuB,EACvB,OAAyB,EACzB,CAAS;QAET,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACxB,IAAI,GAAG,GAAG,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAC1B,KAAK,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC;QAEhB,GAAG,GAAG,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC;QAC3C,GAAG;YACD,IAAI,OAAO,GAAG,GAAG,GAAG,QAAQ,CAAC;YAC7B,GAAG,MAAM,CAAC,CAAC;YACX,IAAI,GAAG,GAAG,CAAC;gBAAE,OAAO,IAAI,QAAQ,CAAC;YACjC,GAAG,CAAC,GAAG,EAAE,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;SACjC,QAAQ,GAAG,GAAG,CAAC,EAAE;QAElB,OAAO,GAAG,CAAC;IACb;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts new file mode 100644 index 0000000..410d320 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts @@ -0,0 +1,6 @@ +export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number]; +export declare type SourceMapLine = SourceMapSegment[]; +export declare type SourceMapMappings = SourceMapLine[]; +export declare function decode(mappings: string): SourceMapMappings; +export declare function encode(decoded: SourceMapMappings): string; +export declare function encode(decoded: Readonly): string; diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/package.json b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/package.json new file mode 100644 index 0000000..5945072 --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/package.json @@ -0,0 +1,75 @@ +{ + "name": "@jridgewell/sourcemap-codec", + "version": "1.4.14", + "description": "Encode/decode sourcemap mappings", + "keywords": [ + "sourcemap", + "vlq" + ], + "main": "dist/sourcemap-codec.umd.js", + "module": "dist/sourcemap-codec.mjs", + "typings": "dist/types/sourcemap-codec.d.ts", + "files": [ + "dist", + "src" + ], + "exports": { + ".": [ + { + "types": "./dist/types/sourcemap-codec.d.ts", + "browser": "./dist/sourcemap-codec.umd.js", + "import": "./dist/sourcemap-codec.mjs", + "require": "./dist/sourcemap-codec.umd.js" + }, + "./dist/sourcemap-codec.umd.js" + ], + "./package.json": "./package.json" + }, + "scripts": { + "benchmark": "run-s build:rollup benchmark:*", + "benchmark:install": "cd benchmark && npm install", + "benchmark:only": "node --expose-gc benchmark/index.js", + "build": "run-s -n build:*", + "build:rollup": "rollup -c rollup.config.js", + "build:ts": "tsc --project tsconfig.build.json", + "lint": "run-s -n lint:*", + "lint:prettier": "npm run test:lint:prettier -- --write", + "lint:ts": "npm run test:lint:ts -- --fix", + "prebuild": "rm -rf dist", + "prepublishOnly": "npm run preversion", + "preversion": "run-s test build", + "pretest": "run-s build:rollup", + "test": "run-s -n test:lint test:only", + "test:debug": "mocha --inspect-brk", + "test:lint": "run-s -n test:lint:*", + "test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", + "test:lint:ts": "eslint '{src,test}/**/*.ts'", + "test:only": "mocha", + "test:coverage": "c8 mocha", + "test:watch": "mocha --watch" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/jridgewell/sourcemap-codec.git" + }, + "author": "Rich Harris", + "license": "MIT", + "devDependencies": { + "@rollup/plugin-typescript": "8.3.0", + "@types/node": "17.0.15", + "@typescript-eslint/eslint-plugin": "5.10.0", + "@typescript-eslint/parser": "5.10.0", + "benchmark": "2.1.4", + "c8": "7.11.2", + "eslint": "8.7.0", + "eslint-config-prettier": "8.3.0", + "mocha": "9.2.0", + "npm-run-all": "4.1.5", + "prettier": "2.5.1", + "rollup": "2.64.0", + "source-map": "0.6.1", + "source-map-js": "1.0.2", + "sourcemap-codec": "1.4.8", + "typescript": "4.5.4" + } +} diff --git a/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/src/sourcemap-codec.ts b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/src/sourcemap-codec.ts new file mode 100644 index 0000000..cafd90e --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec/src/sourcemap-codec.ts @@ -0,0 +1,198 @@ +export type SourceMapSegment = + | [number] + | [number, number, number, number] + | [number, number, number, number, number]; +export type SourceMapLine = SourceMapSegment[]; +export type SourceMapMappings = SourceMapLine[]; + +const comma = ','.charCodeAt(0); +const semicolon = ';'.charCodeAt(0); +const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; +const intToChar = new Uint8Array(64); // 64 possible chars. +const charToInt = new Uint8Array(128); // z is 122 in ASCII + +for (let i = 0; i < chars.length; i++) { + const c = chars.charCodeAt(i); + intToChar[i] = c; + charToInt[c] = i; +} + +// Provide a fallback for older environments. +const td = + typeof TextDecoder !== 'undefined' + ? /* #__PURE__ */ new TextDecoder() + : typeof Buffer !== 'undefined' + ? { + decode(buf: Uint8Array) { + const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + return out.toString(); + }, + } + : { + decode(buf: Uint8Array) { + let out = ''; + for (let i = 0; i < buf.length; i++) { + out += String.fromCharCode(buf[i]); + } + return out; + }, + }; + +export function decode(mappings: string): SourceMapMappings { + const state: [number, number, number, number, number] = new Int32Array(5) as any; + const decoded: SourceMapMappings = []; + + let index = 0; + do { + const semi = indexOf(mappings, index); + const line: SourceMapLine = []; + let sorted = true; + let lastCol = 0; + state[0] = 0; + + for (let i = index; i < semi; i++) { + let seg: SourceMapSegment; + + i = decodeInteger(mappings, i, state, 0); // genColumn + const col = state[0]; + if (col < lastCol) sorted = false; + lastCol = col; + + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 1); // sourcesIndex + i = decodeInteger(mappings, i, state, 2); // sourceLine + i = decodeInteger(mappings, i, state, 3); // sourceColumn + + if (hasMoreVlq(mappings, i, semi)) { + i = decodeInteger(mappings, i, state, 4); // namesIndex + seg = [col, state[1], state[2], state[3], state[4]]; + } else { + seg = [col, state[1], state[2], state[3]]; + } + } else { + seg = [col]; + } + + line.push(seg); + } + + if (!sorted) sort(line); + decoded.push(line); + index = semi + 1; + } while (index <= mappings.length); + + return decoded; +} + +function indexOf(mappings: string, index: number): number { + const idx = mappings.indexOf(';', index); + return idx === -1 ? mappings.length : idx; +} + +function decodeInteger(mappings: string, pos: number, state: SourceMapSegment, j: number): number { + let value = 0; + let shift = 0; + let integer = 0; + + do { + const c = mappings.charCodeAt(pos++); + integer = charToInt[c]; + value |= (integer & 31) << shift; + shift += 5; + } while (integer & 32); + + const shouldNegate = value & 1; + value >>>= 1; + + if (shouldNegate) { + value = -0x80000000 | -value; + } + + state[j] += value; + return pos; +} + +function hasMoreVlq(mappings: string, i: number, length: number): boolean { + if (i >= length) return false; + return mappings.charCodeAt(i) !== comma; +} + +function sort(line: SourceMapSegment[]) { + line.sort(sortComparator); +} + +function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number { + return a[0] - b[0]; +} + +export function encode(decoded: SourceMapMappings): string; +export function encode(decoded: Readonly): string; +export function encode(decoded: Readonly): string { + const state: [number, number, number, number, number] = new Int32Array(5) as any; + const bufLength = 1024 * 16; + const subLength = bufLength - 36; + const buf = new Uint8Array(bufLength); + const sub = buf.subarray(0, subLength); + let pos = 0; + let out = ''; + + for (let i = 0; i < decoded.length; i++) { + const line = decoded[i]; + if (i > 0) { + if (pos === bufLength) { + out += td.decode(buf); + pos = 0; + } + buf[pos++] = semicolon; + } + if (line.length === 0) continue; + + state[0] = 0; + + for (let j = 0; j < line.length; j++) { + const segment = line[j]; + // We can push up to 5 ints, each int can take at most 7 chars, and we + // may push a comma. + if (pos > subLength) { + out += td.decode(sub); + buf.copyWithin(0, subLength, pos); + pos -= subLength; + } + if (j > 0) buf[pos++] = comma; + + pos = encodeInteger(buf, pos, state, segment, 0); // genColumn + + if (segment.length === 1) continue; + pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex + pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine + pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn + + if (segment.length === 4) continue; + pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex + } + } + + return out + td.decode(buf.subarray(0, pos)); +} + +function encodeInteger( + buf: Uint8Array, + pos: number, + state: SourceMapSegment, + segment: SourceMapSegment, + j: number, +): number { + const next = segment[j]; + let num = next - state[j]; + state[j] = next; + + num = num < 0 ? (-num << 1) | 1 : num << 1; + do { + let clamped = num & 0b011111; + num >>>= 5; + if (num > 0) clamped |= 0b100000; + buf[pos++] = intToChar[clamped]; + } while (num > 0); + + return pos; +} diff --git a/node_modules/@jridgewell/trace-mapping/package.json b/node_modules/@jridgewell/trace-mapping/package.json new file mode 100644 index 0000000..9fcc07f --- /dev/null +++ b/node_modules/@jridgewell/trace-mapping/package.json @@ -0,0 +1,75 @@ +{ + "name": "@jridgewell/trace-mapping", + "version": "0.3.18", + "description": "Trace the original position through a source map", + "keywords": [ + "source", + "map" + ], + "main": "dist/trace-mapping.umd.js", + "module": "dist/trace-mapping.mjs", + "types": "dist/types/trace-mapping.d.ts", + "files": [ + "dist" + ], + "exports": { + ".": [ + { + "types": "./dist/types/trace-mapping.d.ts", + "browser": "./dist/trace-mapping.umd.js", + "require": "./dist/trace-mapping.umd.js", + "import": "./dist/trace-mapping.mjs" + }, + "./dist/trace-mapping.umd.js" + ], + "./package.json": "./package.json" + }, + "author": "Justin Ridgewell ", + "repository": { + "type": "git", + "url": "git+https://github.com/jridgewell/trace-mapping.git" + }, + "license": "MIT", + "scripts": { + "benchmark": "run-s build:rollup benchmark:*", + "benchmark:install": "cd benchmark && npm install", + "benchmark:only": "node --expose-gc benchmark/index.mjs", + "build": "run-s -n build:*", + "build:rollup": "rollup -c rollup.config.js", + "build:ts": "tsc --project tsconfig.build.json", + "lint": "run-s -n lint:*", + "lint:prettier": "npm run test:lint:prettier -- --write", + "lint:ts": "npm run test:lint:ts -- --fix", + "prebuild": "rm -rf dist", + "prepublishOnly": "npm run preversion", + "preversion": "run-s test build", + "test": "run-s -n test:lint test:only", + "test:debug": "ava debug", + "test:lint": "run-s -n test:lint:*", + "test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'", + "test:lint:ts": "eslint '{src,test}/**/*.ts'", + "test:only": "c8 ava", + "test:watch": "ava --watch" + }, + "devDependencies": { + "@rollup/plugin-typescript": "8.5.0", + "@typescript-eslint/eslint-plugin": "5.39.0", + "@typescript-eslint/parser": "5.39.0", + "ava": "4.3.3", + "benchmark": "2.1.4", + "c8": "7.12.0", + "esbuild": "0.15.10", + "eslint": "8.25.0", + "eslint-config-prettier": "8.5.0", + "eslint-plugin-no-only-tests": "3.0.0", + "npm-run-all": "4.1.5", + "prettier": "2.7.1", + "rollup": "2.79.1", + "tsx": "3.10.1", + "typescript": "4.8.4" + }, + "dependencies": { + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" + } +} diff --git a/node_modules/@nodelib/fs.scandir/LICENSE b/node_modules/@nodelib/fs.scandir/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.scandir/README.md b/node_modules/@nodelib/fs.scandir/README.md new file mode 100644 index 0000000..e0b218b --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/README.md @@ -0,0 +1,171 @@ +# @nodelib/fs.scandir + +> List files and directories inside the specified directory. + +## :bulb: Highlights + +The package is aimed at obtaining information about entries in the directory. + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode). +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.scandir +``` + +## Usage + +```ts +import * as fsScandir from '@nodelib/fs.scandir'; + +fsScandir.scandir('path', (error, stats) => { /* … */ }); +``` + +## API + +### .scandir(path, [optionsOrSettings], callback) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style. + +```ts +fsScandir.scandir('path', (error, entries) => { /* … */ }); +fsScandir.scandir('path', {}, (error, entries) => { /* … */ }); +fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ }); +``` + +### .scandirSync(path, [optionsOrSettings]) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path. + +```ts +const entries = fsScandir.scandirSync('path'); +const entries = fsScandir.scandirSync('path', {}); +const entries = fsScandir.scandirSync(('path', new fsScandir.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsScandir.Settings({ followSymbolicLinks: false }); + +const entries = fsScandir.scandirSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class. +* `stats` (optional) — An instance of `fs.Stats` class. + +For example, the `scandir` call for `tools` directory with one directory inside: + +```ts +{ + dirent: Dirent { name: 'typedoc', /* … */ }, + name: 'typedoc', + path: 'tools/typedoc' +} +``` + +## Options + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO?? + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; + readdir?: typeof fs.readdir; + readdirSync?: typeof fs.readdirSync; +} + +const settings = new fsScandir.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## `old` and `modern` mode + +This package has two modes that are used depending on the environment and parameters of use. + +### old + +* Node.js below `10.10` or when the `stats` option is enabled + +When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links). + +### modern + +* Node.js 10.10+ and the `stats` option is disabled + +In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present. + +This mode makes fewer calls to the file system. It's faster. + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts new file mode 100644 index 0000000..827f1db --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts @@ -0,0 +1,20 @@ +import type * as fsStat from '@nodelib/fs.stat'; +import type { Dirent, ErrnoException } from '../types'; +export interface ReaddirAsynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; + (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; +} +export interface ReaddirSynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }): Dirent[]; + (filepath: string): string[]; +} +export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { + readdir: ReaddirAsynchronousMethod; + readdirSync: ReaddirSynchronousMethod; +}; +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js new file mode 100644 index 0000000..f0fe022 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/@nodelib/fs.scandir/out/constants.d.ts new file mode 100644 index 0000000..33f1749 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/constants.d.ts @@ -0,0 +1,4 @@ +/** + * IS `true` for Node.js 10.10 and greater. + */ +export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/@nodelib/fs.scandir/out/constants.js new file mode 100644 index 0000000..7e3d441 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/constants.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; +const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); +if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { + throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); +} +const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); +const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); +const SUPPORTED_MAJOR_VERSION = 10; +const SUPPORTED_MINOR_VERSION = 10; +const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; +const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; +/** + * IS `true` for Node.js 10.10 and greater. + */ +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/@nodelib/fs.scandir/out/index.d.ts new file mode 100644 index 0000000..b9da83e --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Dirent, Entry } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function scandir(path: string, callback: AsyncCallback): void; +declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace scandir { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; +export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/@nodelib/fs.scandir/out/index.js new file mode 100644 index 0000000..99c70d3 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.scandirSync = exports.scandir = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function scandir(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.scandir = scandir; +function scandirSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.scandirSync = scandirSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts new file mode 100644 index 0000000..5829676 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts @@ -0,0 +1,7 @@ +/// +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; +export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/@nodelib/fs.scandir/out/providers/async.js new file mode 100644 index 0000000..e8e2f0a --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const rpl = require("run-parallel"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings, callback) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + readdirWithFileTypes(directory, settings, callback); + return; + } + readdir(directory, settings, callback); +} +exports.read = read; +function readdirWithFileTypes(directory, settings, callback) { + settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const entries = dirents.map((dirent) => ({ + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + })); + if (!settings.followSymbolicLinks) { + callSuccessCallback(callback, entries); + return; + } + const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); + rpl(tasks, (rplError, rplEntries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, rplEntries); + }); + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function makeRplTaskEntry(entry, settings) { + return (done) => { + if (!entry.dirent.isSymbolicLink()) { + done(null, entry); + return; + } + settings.fs.stat(entry.path, (statError, stats) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + done(statError); + return; + } + done(null, entry); + return; + } + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + done(null, entry); + }); + }; +} +function readdir(directory, settings, callback) { + settings.fs.readdir(directory, (readdirError, names) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const tasks = names.map((name) => { + const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + return (done) => { + fsStat.stat(path, settings.fsStatSettings, (error, stats) => { + if (error !== null) { + done(error); + return; + } + const entry = { + name, + path, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + done(null, entry); + }); + }; + }); + rpl(tasks, (rplError, entries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, entries); + }); + }); +} +exports.readdir = readdir; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts new file mode 100644 index 0000000..2b4d08b --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts @@ -0,0 +1 @@ +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/@nodelib/fs.scandir/out/providers/common.js new file mode 100644 index 0000000..8724cb5 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = void 0; +function joinPathSegments(a, b, separator) { + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts new file mode 100644 index 0000000..e05c8f0 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts @@ -0,0 +1,5 @@ +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare function read(directory: string, settings: Settings): Entry[]; +export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; +export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/@nodelib/fs.scandir/out/providers/sync.js new file mode 100644 index 0000000..146db34 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + return readdirWithFileTypes(directory, settings); + } + return readdir(directory, settings); +} +exports.read = read; +function readdirWithFileTypes(directory, settings) { + const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); + return dirents.map((dirent) => { + const entry = { + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + }; + if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { + try { + const stats = settings.fs.statSync(entry.path); + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + } + catch (error) { + if (settings.throwErrorOnBrokenSymbolicLink) { + throw error; + } + } + } + return entry; + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function readdir(directory, settings) { + const names = settings.fs.readdirSync(directory); + return names.map((name) => { + const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + const stats = fsStat.statSync(entryPath, settings.fsStatSettings); + const entry = { + name, + path: entryPath, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + return entry; + }); +} +exports.readdir = readdir; diff --git a/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/@nodelib/fs.scandir/out/settings.d.ts new file mode 100644 index 0000000..a0db115 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/settings.d.ts @@ -0,0 +1,20 @@ +import * as fsStat from '@nodelib/fs.stat'; +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLinks: boolean; + readonly fs: fs.FileSystemAdapter; + readonly pathSegmentSeparator: string; + readonly stats: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly fsStatSettings: fsStat.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/@nodelib/fs.scandir/out/settings.js new file mode 100644 index 0000000..15a3e8c --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/settings.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.stats = this._getValue(this._options.stats, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + this.fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this.followSymbolicLinks, + fs: this.fs, + throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts new file mode 100644 index 0000000..f326c5e --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts @@ -0,0 +1,20 @@ +/// +import type * as fs from 'fs'; +export interface Entry { + dirent: Dirent; + name: string; + path: string; + stats?: Stats; +} +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; +export interface Dirent { + isBlockDevice: () => boolean; + isCharacterDevice: () => boolean; + isDirectory: () => boolean; + isFIFO: () => boolean; + isFile: () => boolean; + isSocket: () => boolean; + isSymbolicLink: () => boolean; + name: string; +} diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/@nodelib/fs.scandir/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts new file mode 100644 index 0000000..bb863f1 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts @@ -0,0 +1,2 @@ +import type { Dirent, Stats } from '../types'; +export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/@nodelib/fs.scandir/out/utils/fs.js new file mode 100644 index 0000000..ace7c74 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts new file mode 100644 index 0000000..1b41954 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts @@ -0,0 +1,2 @@ +import * as fs from './fs'; +export { fs }; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/@nodelib/fs.scandir/out/utils/index.js new file mode 100644 index 0000000..f5de129 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fs = void 0; +const fs = require("./fs"); +exports.fs = fs; diff --git a/node_modules/@nodelib/fs.scandir/package.json b/node_modules/@nodelib/fs.scandir/package.json new file mode 100644 index 0000000..d3a8924 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.scandir", + "version": "2.1.5", + "description": "List files and directories inside the specified directory", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "scandir", + "readdir", + "dirent" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4", + "@types/run-parallel": "^1.1.0" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/@nodelib/fs.stat/LICENSE b/node_modules/@nodelib/fs.stat/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.stat/README.md b/node_modules/@nodelib/fs.stat/README.md new file mode 100644 index 0000000..686f047 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/README.md @@ -0,0 +1,126 @@ +# @nodelib/fs.stat + +> Get the status of a file with some features. + +## :bulb: Highlights + +Wrapper around standard method `fs.lstat` and `fs.stat` with some features. + +* :beginner: Normally follows symbolic link. +* :gear: Can safely work with broken symbolic link. + +## Install + +```console +npm install @nodelib/fs.stat +``` + +## Usage + +```ts +import * as fsStat from '@nodelib/fs.stat'; + +fsStat.stat('path', (error, stats) => { /* … */ }); +``` + +## API + +### .stat(path, [optionsOrSettings], callback) + +Returns an instance of `fs.Stats` class for provided path with standard callback-style. + +```ts +fsStat.stat('path', (error, stats) => { /* … */ }); +fsStat.stat('path', {}, (error, stats) => { /* … */ }); +fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ }); +``` + +### .statSync(path, [optionsOrSettings]) + +Returns an instance of `fs.Stats` class for provided path. + +```ts +const stats = fsStat.stat('path'); +const stats = fsStat.stat('path', {}); +const stats = fsStat.stat('path', new fsStat.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsStat.Settings({ followSymbolicLink: false }); + +const stats = fsStat.stat('path', settings); +``` + +## Options + +### `followSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`. + +### `markSymbolicLink` + +* Type: `boolean` +* Default: `false` + +Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`). + +> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; +} + +const settings = new fsStat.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts new file mode 100644 index 0000000..3af759c --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts @@ -0,0 +1,13 @@ +/// +import * as fs from 'fs'; +import type { ErrnoException } from '../types'; +export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; +export declare type StatSynchronousMethod = (path: string) => fs.Stats; +export interface FileSystemAdapter { + lstat: StatAsynchronousMethod; + stat: StatAsynchronousMethod; + lstatSync: StatSynchronousMethod; + statSync: StatSynchronousMethod; +} +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/@nodelib/fs.stat/out/adapters/fs.js new file mode 100644 index 0000000..8dc08c8 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/@nodelib/fs.stat/out/index.d.ts new file mode 100644 index 0000000..f95db99 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Stats } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function stat(path: string, callback: AsyncCallback): void; +declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace stat { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; +export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/@nodelib/fs.stat/out/index.js new file mode 100644 index 0000000..b23f751 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.statSync = exports.stat = exports.Settings = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function stat(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.stat = stat; +function statSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.statSync = statSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts new file mode 100644 index 0000000..85423ce --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts @@ -0,0 +1,4 @@ +import type Settings from '../settings'; +import type { ErrnoException, Stats } from '../types'; +export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; +export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/@nodelib/fs.stat/out/providers/async.js new file mode 100644 index 0000000..983ff0e --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/async.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings, callback) { + settings.fs.lstat(path, (lstatError, lstat) => { + if (lstatError !== null) { + callFailureCallback(callback, lstatError); + return; + } + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + callSuccessCallback(callback, lstat); + return; + } + settings.fs.stat(path, (statError, stat) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + callFailureCallback(callback, statError); + return; + } + callSuccessCallback(callback, lstat); + return; + } + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + callSuccessCallback(callback, stat); + }); + }); +} +exports.read = read; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts new file mode 100644 index 0000000..428c3d7 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts @@ -0,0 +1,3 @@ +import type Settings from '../settings'; +import type { Stats } from '../types'; +export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/@nodelib/fs.stat/out/providers/sync.js new file mode 100644 index 0000000..1521c36 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings) { + const lstat = settings.fs.lstatSync(path); + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + return lstat; + } + try { + const stat = settings.fs.statSync(path); + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + return stat; + } + catch (error) { + if (!settings.throwErrorOnBrokenSymbolicLink) { + return lstat; + } + throw error; + } +} +exports.read = read; diff --git a/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/@nodelib/fs.stat/out/settings.d.ts new file mode 100644 index 0000000..f4b3d44 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/settings.d.ts @@ -0,0 +1,16 @@ +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLink?: boolean; + fs?: Partial; + markSymbolicLink?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLink: boolean; + readonly fs: fs.FileSystemAdapter; + readonly markSymbolicLink: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/@nodelib/fs.stat/out/settings.js new file mode 100644 index 0000000..111ec09 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/settings.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/@nodelib/fs.stat/out/types/index.d.ts new file mode 100644 index 0000000..74c08ed --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/types/index.d.ts @@ -0,0 +1,4 @@ +/// +import type * as fs from 'fs'; +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/@nodelib/fs.stat/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.stat/package.json b/node_modules/@nodelib/fs.stat/package.json new file mode 100644 index 0000000..f2540c2 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/package.json @@ -0,0 +1,37 @@ +{ + "name": "@nodelib/fs.stat", + "version": "2.0.5", + "description": "Get the status of a file with some features", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "stat" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/@nodelib/fs.walk/LICENSE b/node_modules/@nodelib/fs.walk/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.walk/README.md b/node_modules/@nodelib/fs.walk/README.md new file mode 100644 index 0000000..6ccc08d --- /dev/null +++ b/node_modules/@nodelib/fs.walk/README.md @@ -0,0 +1,215 @@ +# @nodelib/fs.walk + +> A library for efficiently walking a directory recursively. + +## :bulb: Highlights + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode). +* :gear: Built-in directories/files and error filtering system. +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.walk +``` + +## Usage + +```ts +import * as fsWalk from '@nodelib/fs.walk'; + +fsWalk.walk('path', (error, entries) => { /* … */ }); +``` + +## API + +### .walk(path, [optionsOrSettings], callback) + +Reads the directory recursively and asynchronously. Requires a callback function. + +> :book: If you want to use the Promise API, use `util.promisify`. + +```ts +fsWalk.walk('path', (error, entries) => { /* … */ }); +fsWalk.walk('path', {}, (error, entries) => { /* … */ }); +fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ }); +``` + +### .walkStream(path, [optionsOrSettings]) + +Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider. + +```ts +const stream = fsWalk.walkStream('path'); +const stream = fsWalk.walkStream('path', {}); +const stream = fsWalk.walkStream('path', new fsWalk.Settings()); +``` + +### .walkSync(path, [optionsOrSettings]) + +Reads the directory recursively and synchronously. Returns an array of entries. + +```ts +const entries = fsWalk.walkSync('path'); +const entries = fsWalk.walkSync('path', {}); +const entries = fsWalk.walkSync('path', new fsWalk.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsWalk.Settings({ followSymbolicLinks: true }); + +const entries = fsWalk.walkSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. +* [`stats`] — An instance of `fs.Stats` class. + +## Options + +### basePath + +* Type: `string` +* Default: `undefined` + +By default, all paths are built relative to the root path. You can use this option to set custom root path. + +In the example below we read the files from the `root` directory, but in the results the root path will be `custom`. + +```ts +fsWalk.walkSync('root'); // → ['root/file.txt'] +fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt'] +``` + +### concurrency + +* Type: `number` +* Default: `Infinity` + +The maximum number of concurrent calls to `fs.readdir`. + +> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)). + +### deepFilter + +* Type: [`DeepFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the directory will be read deep or not. + +```ts +// Skip all directories that starts with `node_modules` +const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules'); +``` + +### entryFilter + +* Type: [`EntryFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the entry will be included to results or not. + +```ts +// Exclude all `.js` files from results +const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js'); +``` + +### errorFilter + +* Type: [`ErrorFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that allows you to skip errors that occur when reading directories. + +For example, you can skip `ENOENT` errors if required: + +```ts +// Skip all ENOENT errors +const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT'; +``` + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type. + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: `FileSystemAdapter` +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat: typeof fs.lstat; + stat: typeof fs.stat; + lstatSync: typeof fs.lstatSync; + statSync: typeof fs.statSync; + readdir: typeof fs.readdir; + readdirSync: typeof fs.readdirSync; +} + +const settings = new fsWalk.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/@nodelib/fs.walk/out/index.d.ts new file mode 100644 index 0000000..8864c7b --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/index.d.ts @@ -0,0 +1,14 @@ +/// +import type { Readable } from 'stream'; +import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; +import { AsyncCallback } from './providers/async'; +import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; +import type { Entry } from './types'; +declare function walk(directory: string, callback: AsyncCallback): void; +declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace walk { + function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; +declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; +export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/@nodelib/fs.walk/out/index.js new file mode 100644 index 0000000..1520787 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/index.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function walk(directory, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + return; + } + new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); +} +exports.walk = walk; +function walkSync(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new sync_1.default(directory, settings); + return provider.read(); +} +exports.walkSync = walkSync; +function walkStream(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new stream_1.default(directory, settings); + return provider.read(); +} +exports.walkStream = walkStream; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts new file mode 100644 index 0000000..0f6717d --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts @@ -0,0 +1,12 @@ +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; +export default class AsyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + private readonly _storage; + constructor(_root: string, _settings: Settings); + read(callback: AsyncCallback): void; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/@nodelib/fs.walk/out/providers/async.js new file mode 100644 index 0000000..51d3be5 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/async.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +class AsyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._storage = []; + } + read(callback) { + this._reader.onError((error) => { + callFailureCallback(callback, error); + }); + this._reader.onEntry((entry) => { + this._storage.push(entry); + }); + this._reader.onEnd(() => { + callSuccessCallback(callback, this._storage); + }); + this._reader.read(); + } +} +exports.default = AsyncProvider; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, entries) { + callback(null, entries); +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts new file mode 100644 index 0000000..874f60c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts @@ -0,0 +1,4 @@ +import AsyncProvider from './async'; +import StreamProvider from './stream'; +import SyncProvider from './sync'; +export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/@nodelib/fs.walk/out/providers/index.js new file mode 100644 index 0000000..4c2529c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/index.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; +const async_1 = require("./async"); +exports.AsyncProvider = async_1.default; +const stream_1 = require("./stream"); +exports.StreamProvider = stream_1.default; +const sync_1 = require("./sync"); +exports.SyncProvider = sync_1.default; diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts new file mode 100644 index 0000000..294185f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from 'stream'; +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +export default class StreamProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + protected readonly _stream: Readable; + constructor(_root: string, _settings: Settings); + read(): Readable; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/@nodelib/fs.walk/out/providers/stream.js new file mode 100644 index 0000000..51298b0 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const async_1 = require("../readers/async"); +class StreamProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._stream = new stream_1.Readable({ + objectMode: true, + read: () => { }, + destroy: () => { + if (!this._reader.isDestroyed) { + this._reader.destroy(); + } + } + }); + } + read() { + this._reader.onError((error) => { + this._stream.emit('error', error); + }); + this._reader.onEntry((entry) => { + this._stream.push(entry); + }); + this._reader.onEnd(() => { + this._stream.push(null); + }); + this._reader.read(); + return this._stream; + } +} +exports.default = StreamProvider; diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts new file mode 100644 index 0000000..551c42e --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts @@ -0,0 +1,10 @@ +import SyncReader from '../readers/sync'; +import type Settings from '../settings'; +import type { Entry } from '../types'; +export default class SyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: SyncReader; + constructor(_root: string, _settings: Settings); + read(): Entry[]; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/@nodelib/fs.walk/out/providers/sync.js new file mode 100644 index 0000000..faab6ca --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +class SyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new sync_1.default(this._root, this._settings); + } + read() { + return this._reader.read(); + } +} +exports.default = SyncProvider; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts new file mode 100644 index 0000000..9acf4e6 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts @@ -0,0 +1,30 @@ +/// +import { EventEmitter } from 'events'; +import * as fsScandir from '@nodelib/fs.scandir'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +import Reader from './reader'; +declare type EntryEventCallback = (entry: Entry) => void; +declare type ErrorEventCallback = (error: Errno) => void; +declare type EndEventCallback = () => void; +export default class AsyncReader extends Reader { + protected readonly _settings: Settings; + protected readonly _scandir: typeof fsScandir.scandir; + protected readonly _emitter: EventEmitter; + private readonly _queue; + private _isFatalError; + private _isDestroyed; + constructor(_root: string, _settings: Settings); + read(): EventEmitter; + get isDestroyed(): boolean; + destroy(): void; + onEntry(callback: EntryEventCallback): void; + onError(callback: ErrorEventCallback): void; + onEnd(callback: EndEventCallback): void; + private _pushToQueue; + private _worker; + private _handleError; + private _handleEntry; + private _emitEntry; +} +export {}; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/@nodelib/fs.walk/out/readers/async.js new file mode 100644 index 0000000..ebe8dd5 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/async.js @@ -0,0 +1,97 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const events_1 = require("events"); +const fsScandir = require("@nodelib/fs.scandir"); +const fastq = require("fastq"); +const common = require("./common"); +const reader_1 = require("./reader"); +class AsyncReader extends reader_1.default { + constructor(_root, _settings) { + super(_root, _settings); + this._settings = _settings; + this._scandir = fsScandir.scandir; + this._emitter = new events_1.EventEmitter(); + this._queue = fastq(this._worker.bind(this), this._settings.concurrency); + this._isFatalError = false; + this._isDestroyed = false; + this._queue.drain = () => { + if (!this._isFatalError) { + this._emitter.emit('end'); + } + }; + } + read() { + this._isFatalError = false; + this._isDestroyed = false; + setImmediate(() => { + this._pushToQueue(this._root, this._settings.basePath); + }); + return this._emitter; + } + get isDestroyed() { + return this._isDestroyed; + } + destroy() { + if (this._isDestroyed) { + throw new Error('The reader is already destroyed'); + } + this._isDestroyed = true; + this._queue.killAndDrain(); + } + onEntry(callback) { + this._emitter.on('entry', callback); + } + onError(callback) { + this._emitter.once('error', callback); + } + onEnd(callback) { + this._emitter.once('end', callback); + } + _pushToQueue(directory, base) { + const queueItem = { directory, base }; + this._queue.push(queueItem, (error) => { + if (error !== null) { + this._handleError(error); + } + }); + } + _worker(item, done) { + this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { + if (error !== null) { + done(error, undefined); + return; + } + for (const entry of entries) { + this._handleEntry(entry, item.base); + } + done(null, undefined); + }); + } + _handleError(error) { + if (this._isDestroyed || !common.isFatalError(this._settings, error)) { + return; + } + this._isFatalError = true; + this._isDestroyed = true; + this._emitter.emit('error', error); + } + _handleEntry(entry, base) { + if (this._isDestroyed || this._isFatalError) { + return; + } + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._emitEntry(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _emitEntry(entry) { + this._emitter.emit('entry', entry); + } +} +exports.default = AsyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts new file mode 100644 index 0000000..5985f97 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts @@ -0,0 +1,7 @@ +import type { FilterFunction } from '../settings'; +import type Settings from '../settings'; +import type { Errno } from '../types'; +export declare function isFatalError(settings: Settings, error: Errno): boolean; +export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; +export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/@nodelib/fs.walk/out/readers/common.js new file mode 100644 index 0000000..a93572f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/common.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; +function isFatalError(settings, error) { + if (settings.errorFilter === null) { + return true; + } + return !settings.errorFilter(error); +} +exports.isFatalError = isFatalError; +function isAppliedFilter(filter, value) { + return filter === null || filter(value); +} +exports.isAppliedFilter = isAppliedFilter; +function replacePathSegmentSeparator(filepath, separator) { + return filepath.split(/[/\\]/).join(separator); +} +exports.replacePathSegmentSeparator = replacePathSegmentSeparator; +function joinPathSegments(a, b, separator) { + if (a === '') { + return b; + } + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts new file mode 100644 index 0000000..e1f383b --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts @@ -0,0 +1,6 @@ +import type Settings from '../settings'; +export default class Reader { + protected readonly _root: string; + protected readonly _settings: Settings; + constructor(_root: string, _settings: Settings); +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/@nodelib/fs.walk/out/readers/reader.js new file mode 100644 index 0000000..782f07c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const common = require("./common"); +class Reader { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); + } +} +exports.default = Reader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts new file mode 100644 index 0000000..af41033 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts @@ -0,0 +1,15 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry } from '../types'; +import Reader from './reader'; +export default class SyncReader extends Reader { + protected readonly _scandir: typeof fsScandir.scandirSync; + private readonly _storage; + private readonly _queue; + read(): Entry[]; + private _pushToQueue; + private _handleQueue; + private _handleDirectory; + private _handleError; + private _handleEntry; + private _pushToStorage; +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/@nodelib/fs.walk/out/readers/sync.js new file mode 100644 index 0000000..9a8d5a6 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsScandir = require("@nodelib/fs.scandir"); +const common = require("./common"); +const reader_1 = require("./reader"); +class SyncReader extends reader_1.default { + constructor() { + super(...arguments); + this._scandir = fsScandir.scandirSync; + this._storage = []; + this._queue = new Set(); + } + read() { + this._pushToQueue(this._root, this._settings.basePath); + this._handleQueue(); + return this._storage; + } + _pushToQueue(directory, base) { + this._queue.add({ directory, base }); + } + _handleQueue() { + for (const item of this._queue.values()) { + this._handleDirectory(item.directory, item.base); + } + } + _handleDirectory(directory, base) { + try { + const entries = this._scandir(directory, this._settings.fsScandirSettings); + for (const entry of entries) { + this._handleEntry(entry, base); + } + } + catch (error) { + this._handleError(error); + } + } + _handleError(error) { + if (!common.isFatalError(this._settings, error)) { + return; + } + throw error; + } + _handleEntry(entry, base) { + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._pushToStorage(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _pushToStorage(entry) { + this._storage.push(entry); + } +} +exports.default = SyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/@nodelib/fs.walk/out/settings.d.ts new file mode 100644 index 0000000..d1c4b45 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/settings.d.ts @@ -0,0 +1,30 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry, Errno } from './types'; +export declare type FilterFunction = (value: T) => boolean; +export declare type DeepFilterFunction = FilterFunction; +export declare type EntryFilterFunction = FilterFunction; +export declare type ErrorFilterFunction = FilterFunction; +export interface Options { + basePath?: string; + concurrency?: number; + deepFilter?: DeepFilterFunction; + entryFilter?: EntryFilterFunction; + errorFilter?: ErrorFilterFunction; + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly basePath?: string; + readonly concurrency: number; + readonly deepFilter: DeepFilterFunction | null; + readonly entryFilter: EntryFilterFunction | null; + readonly errorFilter: ErrorFilterFunction | null; + readonly pathSegmentSeparator: string; + readonly fsScandirSettings: fsScandir.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/@nodelib/fs.walk/out/settings.js new file mode 100644 index 0000000..d7a85c8 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/settings.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsScandir = require("@nodelib/fs.scandir"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.basePath = this._getValue(this._options.basePath, undefined); + this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); + this.deepFilter = this._getValue(this._options.deepFilter, null); + this.entryFilter = this._getValue(this._options.entryFilter, null); + this.errorFilter = this._getValue(this._options.errorFilter, null); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.fsScandirSettings = new fsScandir.Settings({ + followSymbolicLinks: this._options.followSymbolicLinks, + fs: this._options.fs, + pathSegmentSeparator: this._options.pathSegmentSeparator, + stats: this._options.stats, + throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/@nodelib/fs.walk/out/types/index.d.ts new file mode 100644 index 0000000..6ee9bd3 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/types/index.d.ts @@ -0,0 +1,8 @@ +/// +import type * as scandir from '@nodelib/fs.scandir'; +export declare type Entry = scandir.Entry; +export declare type Errno = NodeJS.ErrnoException; +export interface QueueItem { + directory: string; + base?: string; +} diff --git a/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/@nodelib/fs.walk/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.walk/package.json b/node_modules/@nodelib/fs.walk/package.json new file mode 100644 index 0000000..86bfce4 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.walk", + "version": "1.2.8", + "description": "A library for efficiently walking a directory recursively", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "walk", + "scanner", + "crawler" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*", + "!out/**/tests/**" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8" +} diff --git a/node_modules/any-promise/.jshintrc b/node_modules/any-promise/.jshintrc new file mode 100644 index 0000000..979105e --- /dev/null +++ b/node_modules/any-promise/.jshintrc @@ -0,0 +1,4 @@ +{ + "node":true, + "strict":true +} diff --git a/node_modules/any-promise/.npmignore b/node_modules/any-promise/.npmignore new file mode 100644 index 0000000..1354abc --- /dev/null +++ b/node_modules/any-promise/.npmignore @@ -0,0 +1,7 @@ +.git* +test/ +test-browser/ +build/ +.travis.yml +*.swp +Makefile diff --git a/node_modules/any-promise/LICENSE b/node_modules/any-promise/LICENSE new file mode 100644 index 0000000..9187fe5 --- /dev/null +++ b/node_modules/any-promise/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2014-2016 Kevin Beaty + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/any-promise/README.md b/node_modules/any-promise/README.md new file mode 100644 index 0000000..174bea4 --- /dev/null +++ b/node_modules/any-promise/README.md @@ -0,0 +1,161 @@ +## Any Promise + +[![Build Status](https://secure.travis-ci.org/kevinbeaty/any-promise.svg)](http://travis-ci.org/kevinbeaty/any-promise) + +Let your library support any ES 2015 (ES6) compatible `Promise` and leave the choice to application authors. The application can *optionally* register its preferred `Promise` implementation and it will be exported when requiring `any-promise` from library code. + +If no preference is registered, defaults to the global `Promise` for newer Node.js versions. The browser version defaults to the window `Promise`, so polyfill or register as necessary. + +### Usage with global Promise: + +Assuming the global `Promise` is the desired implementation: + +```bash +# Install any libraries depending on any-promise +$ npm install mz +``` + +The installed libraries will use global Promise by default. + +```js +// in library +var Promise = require('any-promise') // the global Promise + +function promiseReturningFunction(){ + return new Promise(function(resolve, reject){...}) +} +``` + +### Usage with registration: + +Assuming `bluebird` is the desired Promise implementation: + +```bash +# Install preferred promise library +$ npm install bluebird +# Install any-promise to allow registration +$ npm install any-promise +# Install any libraries you would like to use depending on any-promise +$ npm install mz +``` + +Register your preference in the application entry point before any other `require` of packages that load `any-promise`: + +```javascript +// top of application index.js or other entry point +require('any-promise/register/bluebird') + +// -or- Equivalent to above, but allows customization of Promise library +require('any-promise/register')('bluebird', {Promise: require('bluebird')}) +``` + +Now that the implementation is registered, you can use any package depending on `any-promise`: + + +```javascript +var fsp = require('mz/fs') // mz/fs will use registered bluebird promises +var Promise = require('any-promise') // the registered bluebird promise +``` + +It is safe to call `register` multiple times, but it must always be with the same implementation. + +Again, registration is *optional*. It should only be called by the application user if overriding the global `Promise` implementation is desired. + +### Optional Application Registration + +As an application author, you can *optionally* register a preferred `Promise` implementation on application startup (before any call to `require('any-promise')`: + +You must register your preference before any call to `require('any-promise')` (by you or required packages), and only one implementation can be registered. Typically, this registration would occur at the top of the application entry point. + + +#### Registration shortcuts + +If you are using a known `Promise` implementation, you can register your preference with a shortcut: + + +```js +require('any-promise/register/bluebird') +// -or- +import 'any-promise/register/q'; +``` + +Shortcut registration is the preferred registration method as it works in the browser and Node.js. It is also convenient for using with `import` and many test runners, that offer a `--require` flag: + +``` +$ ava --require=any-promise/register/bluebird test.js +``` + +Current known implementations include `bluebird`, `q`, `when`, `rsvp`, `es6-promise`, `promise`, `native-promise-only`, `pinkie`, `vow` and `lie`. If you are not using a known implementation, you can use another registration method described below. + + +#### Basic Registration + +As an alternative to registration shortcuts, you can call the `register` function with the preferred `Promise` implementation. The benefit of this approach is that a `Promise` library can be required by name without being a known implementation. This approach does NOT work in the browser. To use `any-promise` in the browser use either registration shortcuts or specify the `Promise` constructor using advanced registration (see below). + +```javascript +require('any-promise/register')('when') +// -or- require('any-promise/register')('any other ES6 compatible library (known or otherwise)') +``` + +This registration method will try to detect the `Promise` constructor from requiring the specified implementation. If you would like to specify your own constructor, see advanced registration. + + +#### Advanced Registration + +To use the browser version, you should either install a polyfill or explicitly register the `Promise` constructor: + +```javascript +require('any-promise/register')('bluebird', {Promise: require('bluebird')}) +``` + +This could also be used for registering a custom `Promise` implementation or subclass. + +Your preference will be registered globally, allowing a single registration even if multiple versions of `any-promise` are installed in the NPM dependency tree or are using multiple bundled JavaScript files in the browser. You can bypass this global registration in options: + + +```javascript +require('../register')('es6-promise', {Promise: require('es6-promise').Promise, global: false}) +``` + +### Library Usage + +To use any `Promise` constructor, simply require it: + +```javascript +var Promise = require('any-promise'); + +return Promise + .all([xf, f, init, coll]) + .then(fn); + + +return new Promise(function(resolve, reject){ + try { + resolve(item); + } catch(e){ + reject(e); + } +}); + +``` + +Except noted below, libraries using `any-promise` should only use [documented](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) functions as there is no guarantee which implementation will be chosen by the application author. Libraries should never call `register`, only the application user should call if desired. + + +#### Advanced Library Usage + +If your library needs to branch code based on the registered implementation, you can retrieve it using `var impl = require('any-promise/implementation')`, where `impl` will be the package name (`"bluebird"`, `"when"`, etc.) if registered, `"global.Promise"` if using the global version on Node.js, or `"window.Promise"` if using the browser version. You should always include a default case, as there is no guarantee what package may be registered. + + +### Support for old Node.js versions + +Node.js versions prior to `v0.12` may have contained buggy versions of the global `Promise`. For this reason, the global `Promise` is not loaded automatically for these old versions. If using `any-promise` in Node.js versions versions `<= v0.12`, the user should register a desired implementation. + +If an implementation is not registered, `any-promise` will attempt to discover an installed `Promise` implementation. If no implementation can be found, an error will be thrown on `require('any-promise')`. While the auto-discovery usually avoids errors, it is non-deterministic. It is recommended that the user always register a preferred implementation for older Node.js versions. + +This auto-discovery is only available for Node.jS versions prior to `v0.12`. Any newer versions will always default to the global `Promise` implementation. + +### Related + +- [any-observable](https://github.com/sindresorhus/any-observable) - `any-promise` for Observables. + diff --git a/node_modules/any-promise/implementation.d.ts b/node_modules/any-promise/implementation.d.ts new file mode 100644 index 0000000..c331a56 --- /dev/null +++ b/node_modules/any-promise/implementation.d.ts @@ -0,0 +1,3 @@ +declare var implementation: string; + +export = implementation; diff --git a/node_modules/any-promise/implementation.js b/node_modules/any-promise/implementation.js new file mode 100644 index 0000000..a45ae94 --- /dev/null +++ b/node_modules/any-promise/implementation.js @@ -0,0 +1 @@ +module.exports = require('./register')().implementation diff --git a/node_modules/any-promise/index.d.ts b/node_modules/any-promise/index.d.ts new file mode 100644 index 0000000..9f646c5 --- /dev/null +++ b/node_modules/any-promise/index.d.ts @@ -0,0 +1,73 @@ +declare class Promise implements Promise.Thenable { + /** + * If you call resolve in the body of the callback passed to the constructor, + * your promise is fulfilled with result object passed to resolve. + * If you call reject your promise is rejected with the object passed to resolve. + * For consistency and debugging (eg stack traces), obj should be an instanceof Error. + * Any errors thrown in the constructor callback will be implicitly passed to reject(). + */ + constructor (callback: (resolve : (value?: R | Promise.Thenable) => void, reject: (error?: any) => void) => void); + + /** + * onFulfilled is called when/if "promise" resolves. onRejected is called when/if "promise" rejects. + * Both are optional, if either/both are omitted the next onFulfilled/onRejected in the chain is called. + * Both callbacks have a single parameter , the fulfillment value or rejection reason. + * "then" returns a new promise equivalent to the value you return from onFulfilled/onRejected after being passed through Promise.resolve. + * If an error is thrown in the callback, the returned promise rejects with that error. + * + * @param onFulfilled called when/if "promise" resolves + * @param onRejected called when/if "promise" rejects + */ + then (onFulfilled?: (value: R) => U | Promise.Thenable, onRejected?: (error: any) => U | Promise.Thenable): Promise; + then (onFulfilled?: (value: R) => U | Promise.Thenable, onRejected?: (error: any) => void): Promise; + + /** + * Sugar for promise.then(undefined, onRejected) + * + * @param onRejected called when/if "promise" rejects + */ + catch (onRejected?: (error: any) => U | Promise.Thenable): Promise; + + /** + * Make a new promise from the thenable. + * A thenable is promise-like in as far as it has a "then" method. + */ + static resolve (): Promise; + static resolve (value: R | Promise.Thenable): Promise; + + /** + * Make a promise that rejects to obj. For consistency and debugging (eg stack traces), obj should be an instanceof Error + */ + static reject (error: any): Promise; + + /** + * Make a promise that fulfills when every item in the array fulfills, and rejects if (and when) any item rejects. + * the array passed to all can be a mixture of promise-like objects and other objects. + * The fulfillment value is an array (in order) of fulfillment values. The rejection value is the first rejection value. + */ + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable, T8 | Promise.Thenable, T9 | Promise.Thenable, T10 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable, T8 | Promise.Thenable, T9 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable, T8 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable, T7 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6, T7]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable, T6 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5, T6]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable , T5 | Promise.Thenable]): Promise<[T1, T2, T3, T4, T5]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable, T4 | Promise.Thenable ]): Promise<[T1, T2, T3, T4]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable, T3 | Promise.Thenable]): Promise<[T1, T2, T3]>; + static all (values: [T1 | Promise.Thenable, T2 | Promise.Thenable]): Promise<[T1, T2]>; + static all (values: [T1 | Promise.Thenable]): Promise<[T1]>; + static all (values: Array>): Promise; + + /** + * Make a Promise that fulfills when any item fulfills, and rejects if any item rejects. + */ + static race (promises: (R | Promise.Thenable)[]): Promise; +} + +declare namespace Promise { + export interface Thenable { + then (onFulfilled?: (value: R) => U | Thenable, onRejected?: (error: any) => U | Thenable): Thenable; + then (onFulfilled?: (value: R) => U | Thenable, onRejected?: (error: any) => void): Thenable; + } +} + +export = Promise; diff --git a/node_modules/any-promise/index.js b/node_modules/any-promise/index.js new file mode 100644 index 0000000..74b8548 --- /dev/null +++ b/node_modules/any-promise/index.js @@ -0,0 +1 @@ +module.exports = require('./register')().Promise diff --git a/node_modules/any-promise/loader.js b/node_modules/any-promise/loader.js new file mode 100644 index 0000000..e164914 --- /dev/null +++ b/node_modules/any-promise/loader.js @@ -0,0 +1,78 @@ +"use strict" + // global key for user preferred registration +var REGISTRATION_KEY = '@@any-promise/REGISTRATION', + // Prior registration (preferred or detected) + registered = null + +/** + * Registers the given implementation. An implementation must + * be registered prior to any call to `require("any-promise")`, + * typically on application load. + * + * If called with no arguments, will return registration in + * following priority: + * + * For Node.js: + * + * 1. Previous registration + * 2. global.Promise if node.js version >= 0.12 + * 3. Auto detected promise based on first sucessful require of + * known promise libraries. Note this is a last resort, as the + * loaded library is non-deterministic. node.js >= 0.12 will + * always use global.Promise over this priority list. + * 4. Throws error. + * + * For Browser: + * + * 1. Previous registration + * 2. window.Promise + * 3. Throws error. + * + * Options: + * + * Promise: Desired Promise constructor + * global: Boolean - Should the registration be cached in a global variable to + * allow cross dependency/bundle registration? (default true) + */ +module.exports = function(root, loadImplementation){ + return function register(implementation, opts){ + implementation = implementation || null + opts = opts || {} + // global registration unless explicitly {global: false} in options (default true) + var registerGlobal = opts.global !== false; + + // load any previous global registration + if(registered === null && registerGlobal){ + registered = root[REGISTRATION_KEY] || null + } + + if(registered !== null + && implementation !== null + && registered.implementation !== implementation){ + // Throw error if attempting to redefine implementation + throw new Error('any-promise already defined as "'+registered.implementation+ + '". You can only register an implementation before the first '+ + ' call to require("any-promise") and an implementation cannot be changed') + } + + if(registered === null){ + // use provided implementation + if(implementation !== null && typeof opts.Promise !== 'undefined'){ + registered = { + Promise: opts.Promise, + implementation: implementation + } + } else { + // require implementation if implementation is specified but not provided + registered = loadImplementation(implementation) + } + + if(registerGlobal){ + // register preference globally in case multiple installations + root[REGISTRATION_KEY] = registered + } + } + + return registered + } +} diff --git a/node_modules/any-promise/optional.js b/node_modules/any-promise/optional.js new file mode 100644 index 0000000..f388942 --- /dev/null +++ b/node_modules/any-promise/optional.js @@ -0,0 +1,6 @@ +"use strict"; +try { + module.exports = require('./register')().Promise || null +} catch(e) { + module.exports = null +} diff --git a/node_modules/any-promise/package.json b/node_modules/any-promise/package.json new file mode 100644 index 0000000..5baf14c --- /dev/null +++ b/node_modules/any-promise/package.json @@ -0,0 +1,45 @@ +{ + "name": "any-promise", + "version": "1.3.0", + "description": "Resolve any installed ES6 compatible promise", + "main": "index.js", + "typings": "index.d.ts", + "browser": { + "./register.js": "./register-shim.js" + }, + "scripts": { + "test": "ava" + }, + "repository": { + "type": "git", + "url": "https://github.com/kevinbeaty/any-promise" + }, + "keywords": [ + "promise", + "es6" + ], + "author": "Kevin Beaty", + "license": "MIT", + "bugs": { + "url": "https://github.com/kevinbeaty/any-promise/issues" + }, + "homepage": "http://github.com/kevinbeaty/any-promise", + "dependencies": {}, + "devDependencies": { + "ava": "^0.14.0", + "bluebird": "^3.0.0", + "es6-promise": "^3.0.0", + "is-promise": "^2.0.0", + "lie": "^3.0.0", + "mocha": "^2.0.0", + "native-promise-only": "^0.8.0", + "phantomjs-prebuilt": "^2.0.0", + "pinkie": "^2.0.0", + "promise": "^7.0.0", + "q": "^1.0.0", + "rsvp": "^3.0.0", + "vow": "^0.4.0", + "when": "^3.0.0", + "zuul": "^3.0.0" + } +} diff --git a/node_modules/any-promise/register-shim.js b/node_modules/any-promise/register-shim.js new file mode 100644 index 0000000..9049405 --- /dev/null +++ b/node_modules/any-promise/register-shim.js @@ -0,0 +1,18 @@ +"use strict"; +module.exports = require('./loader')(window, loadImplementation) + +/** + * Browser specific loadImplementation. Always uses `window.Promise` + * + * To register a custom implementation, must register with `Promise` option. + */ +function loadImplementation(){ + if(typeof window.Promise === 'undefined'){ + throw new Error("any-promise browser requires a polyfill or explicit registration"+ + " e.g: require('any-promise/register/bluebird')") + } + return { + Promise: window.Promise, + implementation: 'window.Promise' + } +} diff --git a/node_modules/any-promise/register.d.ts b/node_modules/any-promise/register.d.ts new file mode 100644 index 0000000..97f2fc0 --- /dev/null +++ b/node_modules/any-promise/register.d.ts @@ -0,0 +1,17 @@ +import Promise = require('./index'); + +declare function register (module?: string, options?: register.Options): register.Register; + +declare namespace register { + export interface Register { + Promise: typeof Promise; + implementation: string; + } + + export interface Options { + Promise?: typeof Promise; + global?: boolean + } +} + +export = register; diff --git a/node_modules/any-promise/register.js b/node_modules/any-promise/register.js new file mode 100644 index 0000000..255c6e2 --- /dev/null +++ b/node_modules/any-promise/register.js @@ -0,0 +1,94 @@ +"use strict" +module.exports = require('./loader')(global, loadImplementation); + +/** + * Node.js version of loadImplementation. + * + * Requires the given implementation and returns the registration + * containing {Promise, implementation} + * + * If implementation is undefined or global.Promise, loads it + * Otherwise uses require + */ +function loadImplementation(implementation){ + var impl = null + + if(shouldPreferGlobalPromise(implementation)){ + // if no implementation or env specified use global.Promise + impl = { + Promise: global.Promise, + implementation: 'global.Promise' + } + } else if(implementation){ + // if implementation specified, require it + var lib = require(implementation) + impl = { + Promise: lib.Promise || lib, + implementation: implementation + } + } else { + // try to auto detect implementation. This is non-deterministic + // and should prefer other branches, but this is our last chance + // to load something without throwing error + impl = tryAutoDetect() + } + + if(impl === null){ + throw new Error('Cannot find any-promise implementation nor'+ + ' global.Promise. You must install polyfill or call'+ + ' require("any-promise/register") with your preferred'+ + ' implementation, e.g. require("any-promise/register/bluebird")'+ + ' on application load prior to any require("any-promise").') + } + + return impl +} + +/** + * Determines if the global.Promise should be preferred if an implementation + * has not been registered. + */ +function shouldPreferGlobalPromise(implementation){ + if(implementation){ + return implementation === 'global.Promise' + } else if(typeof global.Promise !== 'undefined'){ + // Load global promise if implementation not specified + // Versions < 0.11 did not have global Promise + // Do not use for version < 0.12 as version 0.11 contained buggy versions + var version = (/v(\d+)\.(\d+)\.(\d+)/).exec(process.version) + return !(version && +version[1] == 0 && +version[2] < 12) + } + + // do not have global.Promise or another implementation was specified + return false +} + +/** + * Look for common libs as last resort there is no guarantee that + * this will return a desired implementation or even be deterministic. + * The priority is also nearly arbitrary. We are only doing this + * for older versions of Node.js <0.12 that do not have a reasonable + * global.Promise implementation and we the user has not registered + * the preference. This preserves the behavior of any-promise <= 0.1 + * and may be deprecated or removed in the future + */ +function tryAutoDetect(){ + var libs = [ + "es6-promise", + "promise", + "native-promise-only", + "bluebird", + "rsvp", + "when", + "q", + "pinkie", + "lie", + "vow"] + var i = 0, len = libs.length + for(; i < len; i++){ + try { + return loadImplementation(libs[i]) + } catch(e){} + } + return null +} diff --git a/node_modules/any-promise/register/bluebird.d.ts b/node_modules/any-promise/register/bluebird.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/bluebird.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/bluebird.js b/node_modules/any-promise/register/bluebird.js new file mode 100644 index 0000000..de0f87e --- /dev/null +++ b/node_modules/any-promise/register/bluebird.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('bluebird', {Promise: require('bluebird')}) diff --git a/node_modules/any-promise/register/es6-promise.d.ts b/node_modules/any-promise/register/es6-promise.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/es6-promise.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/es6-promise.js b/node_modules/any-promise/register/es6-promise.js new file mode 100644 index 0000000..59bd55b --- /dev/null +++ b/node_modules/any-promise/register/es6-promise.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('es6-promise', {Promise: require('es6-promise').Promise}) diff --git a/node_modules/any-promise/register/lie.d.ts b/node_modules/any-promise/register/lie.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/lie.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/lie.js b/node_modules/any-promise/register/lie.js new file mode 100644 index 0000000..7d305ca --- /dev/null +++ b/node_modules/any-promise/register/lie.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('lie', {Promise: require('lie')}) diff --git a/node_modules/any-promise/register/native-promise-only.d.ts b/node_modules/any-promise/register/native-promise-only.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/native-promise-only.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/native-promise-only.js b/node_modules/any-promise/register/native-promise-only.js new file mode 100644 index 0000000..70a5a5e --- /dev/null +++ b/node_modules/any-promise/register/native-promise-only.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('native-promise-only', {Promise: require('native-promise-only')}) diff --git a/node_modules/any-promise/register/pinkie.d.ts b/node_modules/any-promise/register/pinkie.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/pinkie.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/pinkie.js b/node_modules/any-promise/register/pinkie.js new file mode 100644 index 0000000..caaf98a --- /dev/null +++ b/node_modules/any-promise/register/pinkie.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('pinkie', {Promise: require('pinkie')}) diff --git a/node_modules/any-promise/register/promise.d.ts b/node_modules/any-promise/register/promise.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/promise.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/promise.js b/node_modules/any-promise/register/promise.js new file mode 100644 index 0000000..746620d --- /dev/null +++ b/node_modules/any-promise/register/promise.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('promise', {Promise: require('promise')}) diff --git a/node_modules/any-promise/register/q.d.ts b/node_modules/any-promise/register/q.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/q.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/q.js b/node_modules/any-promise/register/q.js new file mode 100644 index 0000000..0fc633a --- /dev/null +++ b/node_modules/any-promise/register/q.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('q', {Promise: require('q').Promise}) diff --git a/node_modules/any-promise/register/rsvp.d.ts b/node_modules/any-promise/register/rsvp.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/rsvp.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/rsvp.js b/node_modules/any-promise/register/rsvp.js new file mode 100644 index 0000000..02b1318 --- /dev/null +++ b/node_modules/any-promise/register/rsvp.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('rsvp', {Promise: require('rsvp').Promise}) diff --git a/node_modules/any-promise/register/vow.d.ts b/node_modules/any-promise/register/vow.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/vow.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/vow.js b/node_modules/any-promise/register/vow.js new file mode 100644 index 0000000..5b6868c --- /dev/null +++ b/node_modules/any-promise/register/vow.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('vow', {Promise: require('vow').Promise}) diff --git a/node_modules/any-promise/register/when.d.ts b/node_modules/any-promise/register/when.d.ts new file mode 100644 index 0000000..336ce12 --- /dev/null +++ b/node_modules/any-promise/register/when.d.ts @@ -0,0 +1 @@ +export {} diff --git a/node_modules/any-promise/register/when.js b/node_modules/any-promise/register/when.js new file mode 100644 index 0000000..d91c13d --- /dev/null +++ b/node_modules/any-promise/register/when.js @@ -0,0 +1,2 @@ +'use strict'; +require('../register')('when', {Promise: require('when').Promise}) diff --git a/node_modules/anymatch/LICENSE b/node_modules/anymatch/LICENSE new file mode 100644 index 0000000..491766c --- /dev/null +++ b/node_modules/anymatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com) + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/anymatch/README.md b/node_modules/anymatch/README.md new file mode 100644 index 0000000..1dd67f5 --- /dev/null +++ b/node_modules/anymatch/README.md @@ -0,0 +1,87 @@ +anymatch [![Build Status](https://travis-ci.org/micromatch/anymatch.svg?branch=master)](https://travis-ci.org/micromatch/anymatch) [![Coverage Status](https://img.shields.io/coveralls/micromatch/anymatch.svg?branch=master)](https://coveralls.io/r/micromatch/anymatch?branch=master) +====== +Javascript module to match a string against a regular expression, glob, string, +or function that takes the string as an argument and returns a truthy or falsy +value. The matcher can also be an array of any or all of these. Useful for +allowing a very flexible user-defined config to define things like file paths. + +__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__ + + +Usage +----- +```sh +npm install anymatch +``` + +#### anymatch(matchers, testString, [returnIndex], [options]) +* __matchers__: (_Array|String|RegExp|Function_) +String to be directly matched, string with glob patterns, regular expression +test, function that takes the testString as an argument and returns a truthy +value if it should be matched, or an array of any number and mix of these types. +* __testString__: (_String|Array_) The string to test against the matchers. If +passed as an array, the first element of the array will be used as the +`testString` for non-function matchers, while the entire array will be applied +as the arguments for function matchers. +* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options. + * __returnIndex__: (_Boolean [optional]_) If true, return the array index of +the first matcher that that testString matched, or -1 if no match, instead of a +boolean result. + +```js +const anymatch = require('anymatch'); + +const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ; + +anymatch(matchers, 'path/to/file.js'); // true +anymatch(matchers, 'path/anyjs/baz.js'); // true +anymatch(matchers, 'path/to/foo.js'); // true +anymatch(matchers, 'path/to/bar.js'); // true +anymatch(matchers, 'bar.js'); // false + +// returnIndex = true +anymatch(matchers, 'foo.js', {returnIndex: true}); // 2 +anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1 + +// any picomatc + +// using globs to match directories and their children +anymatch('node_modules', 'node_modules'); // true +anymatch('node_modules', 'node_modules/somelib/index.js'); // false +anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true +anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false +anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true + +const matcher = anymatch(matchers); +['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ] +anymatch master* ❯ + +``` + +#### anymatch(matchers) +You can also pass in only your matcher(s) to get a curried function that has +already been bound to the provided matching criteria. This can be used as an +`Array#filter` callback. + +```js +var matcher = anymatch(matchers); + +matcher('path/to/file.js'); // true +matcher('path/anyjs/baz.js', true); // 1 + +['foo.js', 'bar.js'].filter(matcher); // ['foo.js'] +``` + +Changelog +---------- +[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases) + +- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only. +- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information). +- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch) +for glob pattern matching. Issues with glob pattern matching should be +reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues). + +License +------- +[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE) diff --git a/node_modules/anymatch/index.d.ts b/node_modules/anymatch/index.d.ts new file mode 100644 index 0000000..3ef7eaa --- /dev/null +++ b/node_modules/anymatch/index.d.ts @@ -0,0 +1,20 @@ +type AnymatchFn = (testString: string) => boolean; +type AnymatchPattern = string|RegExp|AnymatchFn; +type AnymatchMatcher = AnymatchPattern|AnymatchPattern[] +type AnymatchTester = { + (testString: string|any[], returnIndex: true): number; + (testString: string|any[]): boolean; +} + +type PicomatchOptions = {dot: boolean}; + +declare const anymatch: { + (matchers: AnymatchMatcher): AnymatchTester; + (matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester; + (matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number; + (matchers: AnymatchMatcher, testString: string|any[]): boolean; +} + +export {AnymatchMatcher as Matcher} +export {AnymatchTester as Tester} +export default anymatch diff --git a/node_modules/anymatch/index.js b/node_modules/anymatch/index.js new file mode 100644 index 0000000..8eb73e9 --- /dev/null +++ b/node_modules/anymatch/index.js @@ -0,0 +1,104 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { value: true }); + +const picomatch = require('picomatch'); +const normalizePath = require('normalize-path'); + +/** + * @typedef {(testString: string) => boolean} AnymatchFn + * @typedef {string|RegExp|AnymatchFn} AnymatchPattern + * @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher + */ +const BANG = '!'; +const DEFAULT_OPTIONS = {returnIndex: false}; +const arrify = (item) => Array.isArray(item) ? item : [item]; + +/** + * @param {AnymatchPattern} matcher + * @param {object} options + * @returns {AnymatchFn} + */ +const createPattern = (matcher, options) => { + if (typeof matcher === 'function') { + return matcher; + } + if (typeof matcher === 'string') { + const glob = picomatch(matcher, options); + return (string) => matcher === string || glob(string); + } + if (matcher instanceof RegExp) { + return (string) => matcher.test(string); + } + return (string) => false; +}; + +/** + * @param {Array} patterns + * @param {Array} negPatterns + * @param {String|Array} args + * @param {Boolean} returnIndex + * @returns {boolean|number} + */ +const matchPatterns = (patterns, negPatterns, args, returnIndex) => { + const isList = Array.isArray(args); + const _path = isList ? args[0] : args; + if (!isList && typeof _path !== 'string') { + throw new TypeError('anymatch: second argument must be a string: got ' + + Object.prototype.toString.call(_path)) + } + const path = normalizePath(_path, false); + + for (let index = 0; index < negPatterns.length; index++) { + const nglob = negPatterns[index]; + if (nglob(path)) { + return returnIndex ? -1 : false; + } + } + + const applied = isList && [path].concat(args.slice(1)); + for (let index = 0; index < patterns.length; index++) { + const pattern = patterns[index]; + if (isList ? pattern(...applied) : pattern(path)) { + return returnIndex ? index : true; + } + } + + return returnIndex ? -1 : false; +}; + +/** + * @param {AnymatchMatcher} matchers + * @param {Array|string} testString + * @param {object} options + * @returns {boolean|number|Function} + */ +const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => { + if (matchers == null) { + throw new TypeError('anymatch: specify first argument'); + } + const opts = typeof options === 'boolean' ? {returnIndex: options} : options; + const returnIndex = opts.returnIndex || false; + + // Early cache for matchers. + const mtchers = arrify(matchers); + const negatedGlobs = mtchers + .filter(item => typeof item === 'string' && item.charAt(0) === BANG) + .map(item => item.slice(1)) + .map(item => picomatch(item, opts)); + const patterns = mtchers + .filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG)) + .map(matcher => createPattern(matcher, opts)); + + if (testString == null) { + return (testString, ri = false) => { + const returnIndex = typeof ri === 'boolean' ? ri : false; + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); + } + } + + return matchPatterns(patterns, negatedGlobs, testString, returnIndex); +}; + +anymatch.default = anymatch; +module.exports = anymatch; diff --git a/node_modules/anymatch/package.json b/node_modules/anymatch/package.json new file mode 100644 index 0000000..2cb2307 --- /dev/null +++ b/node_modules/anymatch/package.json @@ -0,0 +1,48 @@ +{ + "name": "anymatch", + "version": "3.1.3", + "description": "Matches strings against configurable strings, globs, regular expressions, and/or functions", + "files": [ + "index.js", + "index.d.ts" + ], + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "author": { + "name": "Elan Shanker", + "url": "https://github.com/es128" + }, + "license": "ISC", + "homepage": "https://github.com/micromatch/anymatch", + "repository": { + "type": "git", + "url": "https://github.com/micromatch/anymatch" + }, + "keywords": [ + "match", + "any", + "string", + "file", + "fs", + "list", + "glob", + "regex", + "regexp", + "regular", + "expression", + "function" + ], + "scripts": { + "test": "nyc mocha", + "mocha": "mocha" + }, + "devDependencies": { + "mocha": "^6.1.3", + "nyc": "^14.0.0" + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/arg/LICENSE.md b/node_modules/arg/LICENSE.md new file mode 100644 index 0000000..b708f87 --- /dev/null +++ b/node_modules/arg/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2021 Vercel, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/arg/README.md b/node_modules/arg/README.md new file mode 100644 index 0000000..6501df5 --- /dev/null +++ b/node_modules/arg/README.md @@ -0,0 +1,317 @@ +# Arg + +`arg` is an unopinionated, no-frills CLI argument parser. + +## Installation + +```bash +npm install arg +``` + +## Usage + +`arg()` takes either 1 or 2 arguments: + +1. Command line specification object (see below) +2. Parse options (_Optional_, defaults to `{permissive: false, argv: process.argv.slice(2), stopAtPositional: false}`) + +It returns an object with any values present on the command-line (missing options are thus +missing from the resulting object). Arg performs no validation/requirement checking - we +leave that up to the application. + +All parameters that aren't consumed by options (commonly referred to as "extra" parameters) +are added to `result._`, which is _always_ an array (even if no extra parameters are passed, +in which case an empty array is returned). + +```javascript +const arg = require('arg'); + +// `options` is an optional parameter +const args = arg( + spec, + (options = { permissive: false, argv: process.argv.slice(2) }) +); +``` + +For example: + +```console +$ node ./hello.js --verbose -vvv --port=1234 -n 'My name' foo bar --tag qux --tag=qix -- --foobar +``` + +```javascript +// hello.js +const arg = require('arg'); + +const args = arg({ + // Types + '--help': Boolean, + '--version': Boolean, + '--verbose': arg.COUNT, // Counts the number of times --verbose is passed + '--port': Number, // --port or --port= + '--name': String, // --name or --name= + '--tag': [String], // --tag or --tag= + + // Aliases + '-v': '--verbose', + '-n': '--name', // -n ; result is stored in --name + '--label': '--name' // --label or --label=; + // result is stored in --name +}); + +console.log(args); +/* +{ + _: ["foo", "bar", "--foobar"], + '--port': 1234, + '--verbose': 4, + '--name': "My name", + '--tag': ["qux", "qix"] +} +*/ +``` + +The values for each key=>value pair is either a type (function or [function]) or a string (indicating an alias). + +- In the case of a function, the string value of the argument's value is passed to it, + and the return value is used as the ultimate value. + +- In the case of an array, the only element _must_ be a type function. Array types indicate + that the argument may be passed multiple times, and as such the resulting value in the returned + object is an array with all of the values that were passed using the specified flag. + +- In the case of a string, an alias is established. If a flag is passed that matches the _key_, + then the _value_ is substituted in its place. + +Type functions are passed three arguments: + +1. The parameter value (always a string) +2. The parameter name (e.g. `--label`) +3. The previous value for the destination (useful for reduce-like operations or for supporting `-v` multiple times, etc.) + +This means the built-in `String`, `Number`, and `Boolean` type constructors "just work" as type functions. + +Note that `Boolean` and `[Boolean]` have special treatment - an option argument is _not_ consumed or passed, but instead `true` is +returned. These options are called "flags". + +For custom handlers that wish to behave as flags, you may pass the function through `arg.flag()`: + +```javascript +const arg = require('arg'); + +const argv = [ + '--foo', + 'bar', + '-ff', + 'baz', + '--foo', + '--foo', + 'qux', + '-fff', + 'qix' +]; + +function myHandler(value, argName, previousValue) { + /* `value` is always `true` */ + return 'na ' + (previousValue || 'batman!'); +} + +const args = arg( + { + '--foo': arg.flag(myHandler), + '-f': '--foo' + }, + { + argv + } +); + +console.log(args); +/* +{ + _: ['bar', 'baz', 'qux', 'qix'], + '--foo': 'na na na na na na na na batman!' +} +*/ +``` + +As well, `arg` supplies a helper argument handler called `arg.COUNT`, which equivalent to a `[Boolean]` argument's `.length` +property - effectively counting the number of times the boolean flag, denoted by the key, is passed on the command line.. +For example, this is how you could implement `ssh`'s multiple levels of verbosity (`-vvvv` being the most verbose). + +```javascript +const arg = require('arg'); + +const argv = ['-AAAA', '-BBBB']; + +const args = arg( + { + '-A': arg.COUNT, + '-B': [Boolean] + }, + { + argv + } +); + +console.log(args); +/* +{ + _: [], + '-A': 4, + '-B': [true, true, true, true] +} +*/ +``` + +### Options + +If a second parameter is specified and is an object, it specifies parsing options to modify the behavior of `arg()`. + +#### `argv` + +If you have already sliced or generated a number of raw arguments to be parsed (as opposed to letting `arg` +slice them from `process.argv`) you may specify them in the `argv` option. + +For example: + +```javascript +const args = arg( + { + '--foo': String + }, + { + argv: ['hello', '--foo', 'world'] + } +); +``` + +results in: + +```javascript +const args = { + _: ['hello'], + '--foo': 'world' +}; +``` + +#### `permissive` + +When `permissive` set to `true`, `arg` will push any unknown arguments +onto the "extra" argument array (`result._`) instead of throwing an error about +an unknown flag. + +For example: + +```javascript +const arg = require('arg'); + +const argv = [ + '--foo', + 'hello', + '--qux', + 'qix', + '--bar', + '12345', + 'hello again' +]; + +const args = arg( + { + '--foo': String, + '--bar': Number + }, + { + argv, + permissive: true + } +); +``` + +results in: + +```javascript +const args = { + _: ['--qux', 'qix', 'hello again'], + '--foo': 'hello', + '--bar': 12345 +}; +``` + +#### `stopAtPositional` + +When `stopAtPositional` is set to `true`, `arg` will halt parsing at the first +positional argument. + +For example: + +```javascript +const arg = require('arg'); + +const argv = ['--foo', 'hello', '--bar']; + +const args = arg( + { + '--foo': Boolean, + '--bar': Boolean + }, + { + argv, + stopAtPositional: true + } +); +``` + +results in: + +```javascript +const args = { + _: ['hello', '--bar'], + '--foo': true +}; +``` + +### Errors + +Some errors that `arg` throws provide a `.code` property in order to aid in recovering from user error, or to +differentiate between user error and developer error (bug). + +##### ARG_UNKNOWN_OPTION + +If an unknown option (not defined in the spec object) is passed, an error with code `ARG_UNKNOWN_OPTION` will be thrown: + +```js +// cli.js +try { + require('arg')({ '--hi': String }); +} catch (err) { + if (err.code === 'ARG_UNKNOWN_OPTION') { + console.log(err.message); + } else { + throw err; + } +} +``` + +```shell +node cli.js --extraneous true +Unknown or unexpected option: --extraneous +``` + +# FAQ + +A few questions and answers that have been asked before: + +### How do I require an argument with `arg`? + +Do the assertion yourself, such as: + +```javascript +const args = arg({ '--name': String }); + +if (!args['--name']) throw new Error('missing required argument: --name'); +``` + +# License + +Released under the [MIT License](LICENSE.md). diff --git a/node_modules/arg/index.d.ts b/node_modules/arg/index.d.ts new file mode 100644 index 0000000..44f9f35 --- /dev/null +++ b/node_modules/arg/index.d.ts @@ -0,0 +1,44 @@ +declare function arg( + spec: T, + options?: arg.Options +): arg.Result; + +declare namespace arg { + export const flagSymbol: unique symbol; + + export function flag(fn: T): T & { [arg.flagSymbol]: true }; + + export const COUNT: Handler & { [arg.flagSymbol]: true }; + + export type Handler = ( + value: string, + name: string, + previousValue?: T + ) => T; + + export class ArgError extends Error { + constructor(message: string, code: string); + + code: string; + } + + export interface Spec { + [key: string]: string | Handler | [Handler]; + } + + export type Result = { _: string[] } & { + [K in keyof T]?: T[K] extends Handler + ? ReturnType + : T[K] extends [Handler] + ? Array> + : never; + }; + + export interface Options { + argv?: string[]; + permissive?: boolean; + stopAtPositional?: boolean; + } +} + +export = arg; diff --git a/node_modules/arg/index.js b/node_modules/arg/index.js new file mode 100644 index 0000000..3f60f4c --- /dev/null +++ b/node_modules/arg/index.js @@ -0,0 +1,195 @@ +const flagSymbol = Symbol('arg flag'); + +class ArgError extends Error { + constructor(msg, code) { + super(msg); + this.name = 'ArgError'; + this.code = code; + + Object.setPrototypeOf(this, ArgError.prototype); + } +} + +function arg( + opts, + { + argv = process.argv.slice(2), + permissive = false, + stopAtPositional = false + } = {} +) { + if (!opts) { + throw new ArgError( + 'argument specification object is required', + 'ARG_CONFIG_NO_SPEC' + ); + } + + const result = { _: [] }; + + const aliases = {}; + const handlers = {}; + + for (const key of Object.keys(opts)) { + if (!key) { + throw new ArgError( + 'argument key cannot be an empty string', + 'ARG_CONFIG_EMPTY_KEY' + ); + } + + if (key[0] !== '-') { + throw new ArgError( + `argument key must start with '-' but found: '${key}'`, + 'ARG_CONFIG_NONOPT_KEY' + ); + } + + if (key.length === 1) { + throw new ArgError( + `argument key must have a name; singular '-' keys are not allowed: ${key}`, + 'ARG_CONFIG_NONAME_KEY' + ); + } + + if (typeof opts[key] === 'string') { + aliases[key] = opts[key]; + continue; + } + + let type = opts[key]; + let isFlag = false; + + if ( + Array.isArray(type) && + type.length === 1 && + typeof type[0] === 'function' + ) { + const [fn] = type; + type = (value, name, prev = []) => { + prev.push(fn(value, name, prev[prev.length - 1])); + return prev; + }; + isFlag = fn === Boolean || fn[flagSymbol] === true; + } else if (typeof type === 'function') { + isFlag = type === Boolean || type[flagSymbol] === true; + } else { + throw new ArgError( + `type missing or not a function or valid array type: ${key}`, + 'ARG_CONFIG_VAD_TYPE' + ); + } + + if (key[1] !== '-' && key.length > 2) { + throw new ArgError( + `short argument keys (with a single hyphen) must have only one character: ${key}`, + 'ARG_CONFIG_SHORTOPT_TOOLONG' + ); + } + + handlers[key] = [type, isFlag]; + } + + for (let i = 0, len = argv.length; i < len; i++) { + const wholeArg = argv[i]; + + if (stopAtPositional && result._.length > 0) { + result._ = result._.concat(argv.slice(i)); + break; + } + + if (wholeArg === '--') { + result._ = result._.concat(argv.slice(i + 1)); + break; + } + + if (wholeArg.length > 1 && wholeArg[0] === '-') { + /* eslint-disable operator-linebreak */ + const separatedArguments = + wholeArg[1] === '-' || wholeArg.length === 2 + ? [wholeArg] + : wholeArg + .slice(1) + .split('') + .map((a) => `-${a}`); + /* eslint-enable operator-linebreak */ + + for (let j = 0; j < separatedArguments.length; j++) { + const arg = separatedArguments[j]; + const [originalArgName, argStr] = + arg[1] === '-' ? arg.split(/=(.*)/, 2) : [arg, undefined]; + + let argName = originalArgName; + while (argName in aliases) { + argName = aliases[argName]; + } + + if (!(argName in handlers)) { + if (permissive) { + result._.push(arg); + continue; + } else { + throw new ArgError( + `unknown or unexpected option: ${originalArgName}`, + 'ARG_UNKNOWN_OPTION' + ); + } + } + + const [type, isFlag] = handlers[argName]; + + if (!isFlag && j + 1 < separatedArguments.length) { + throw new ArgError( + `option requires argument (but was followed by another short argument): ${originalArgName}`, + 'ARG_MISSING_REQUIRED_SHORTARG' + ); + } + + if (isFlag) { + result[argName] = type(true, argName, result[argName]); + } else if (argStr === undefined) { + if ( + argv.length < i + 2 || + (argv[i + 1].length > 1 && + argv[i + 1][0] === '-' && + !( + argv[i + 1].match(/^-?\d*(\.(?=\d))?\d*$/) && + (type === Number || + // eslint-disable-next-line no-undef + (typeof BigInt !== 'undefined' && type === BigInt)) + )) + ) { + const extended = + originalArgName === argName ? '' : ` (alias for ${argName})`; + throw new ArgError( + `option requires argument: ${originalArgName}${extended}`, + 'ARG_MISSING_REQUIRED_LONGARG' + ); + } + + result[argName] = type(argv[i + 1], argName, result[argName]); + ++i; + } else { + result[argName] = type(argStr, argName, result[argName]); + } + } + } else { + result._.push(wholeArg); + } + } + + return result; +} + +arg.flag = (fn) => { + fn[flagSymbol] = true; + return fn; +}; + +// Utility types +arg.COUNT = arg.flag((v, name, existingCount) => (existingCount || 0) + 1); + +// Expose error class +arg.ArgError = ArgError; + +module.exports = arg; diff --git a/node_modules/arg/package.json b/node_modules/arg/package.json new file mode 100644 index 0000000..47368d7 --- /dev/null +++ b/node_modules/arg/package.json @@ -0,0 +1,28 @@ +{ + "name": "arg", + "version": "5.0.2", + "description": "Unopinionated, no-frills CLI argument parser", + "main": "index.js", + "types": "index.d.ts", + "repository": "vercel/arg", + "author": "Josh Junon ", + "license": "MIT", + "files": [ + "index.js", + "index.d.ts" + ], + "scripts": { + "test": "WARN_EXIT=1 jest --coverage -w 2" + }, + "devDependencies": { + "chai": "^4.1.1", + "jest": "^27.0.6", + "prettier": "^2.3.2" + }, + "prettier": { + "arrowParens": "always", + "singleQuote": true, + "tabWidth": 2, + "trailingComma": "none" + } +} diff --git a/node_modules/binary-extensions/binary-extensions.json b/node_modules/binary-extensions/binary-extensions.json new file mode 100644 index 0000000..4aab383 --- /dev/null +++ b/node_modules/binary-extensions/binary-extensions.json @@ -0,0 +1,260 @@ +[ + "3dm", + "3ds", + "3g2", + "3gp", + "7z", + "a", + "aac", + "adp", + "ai", + "aif", + "aiff", + "alz", + "ape", + "apk", + "appimage", + "ar", + "arj", + "asf", + "au", + "avi", + "bak", + "baml", + "bh", + "bin", + "bk", + "bmp", + "btif", + "bz2", + "bzip2", + "cab", + "caf", + "cgm", + "class", + "cmx", + "cpio", + "cr2", + "cur", + "dat", + "dcm", + "deb", + "dex", + "djvu", + "dll", + "dmg", + "dng", + "doc", + "docm", + "docx", + "dot", + "dotm", + "dra", + "DS_Store", + "dsk", + "dts", + "dtshd", + "dvb", + "dwg", + "dxf", + "ecelp4800", + "ecelp7470", + "ecelp9600", + "egg", + "eol", + "eot", + "epub", + "exe", + "f4v", + "fbs", + "fh", + "fla", + "flac", + "flatpak", + "fli", + "flv", + "fpx", + "fst", + "fvt", + "g3", + "gh", + "gif", + "graffle", + "gz", + "gzip", + "h261", + "h263", + "h264", + "icns", + "ico", + "ief", + "img", + "ipa", + "iso", + "jar", + "jpeg", + "jpg", + "jpgv", + "jpm", + "jxr", + "key", + "ktx", + "lha", + "lib", + "lvp", + "lz", + "lzh", + "lzma", + "lzo", + "m3u", + "m4a", + "m4v", + "mar", + "mdi", + "mht", + "mid", + "midi", + "mj2", + "mka", + "mkv", + "mmr", + "mng", + "mobi", + "mov", + "movie", + "mp3", + "mp4", + "mp4a", + "mpeg", + "mpg", + "mpga", + "mxu", + "nef", + "npx", + "numbers", + "nupkg", + "o", + "odp", + "ods", + "odt", + "oga", + "ogg", + "ogv", + "otf", + "ott", + "pages", + "pbm", + "pcx", + "pdb", + "pdf", + "pea", + "pgm", + "pic", + "png", + "pnm", + "pot", + "potm", + "potx", + "ppa", + "ppam", + "ppm", + "pps", + "ppsm", + "ppsx", + "ppt", + "pptm", + "pptx", + "psd", + "pya", + "pyc", + "pyo", + "pyv", + "qt", + "rar", + "ras", + "raw", + "resources", + "rgb", + "rip", + "rlc", + "rmf", + "rmvb", + "rpm", + "rtf", + "rz", + "s3m", + "s7z", + "scpt", + "sgi", + "shar", + "snap", + "sil", + "sketch", + "slk", + "smv", + "snk", + "so", + "stl", + "suo", + "sub", + "swf", + "tar", + "tbz", + "tbz2", + "tga", + "tgz", + "thmx", + "tif", + "tiff", + "tlz", + "ttc", + "ttf", + "txz", + "udf", + "uvh", + "uvi", + "uvm", + "uvp", + "uvs", + "uvu", + "viv", + "vob", + "war", + "wav", + "wax", + "wbmp", + "wdp", + "weba", + "webm", + "webp", + "whl", + "wim", + "wm", + "wma", + "wmv", + "wmx", + "woff", + "woff2", + "wrm", + "wvx", + "xbm", + "xif", + "xla", + "xlam", + "xls", + "xlsb", + "xlsm", + "xlsx", + "xlt", + "xltm", + "xltx", + "xm", + "xmind", + "xpi", + "xpm", + "xwd", + "xz", + "z", + "zip", + "zipx" +] diff --git a/node_modules/binary-extensions/binary-extensions.json.d.ts b/node_modules/binary-extensions/binary-extensions.json.d.ts new file mode 100644 index 0000000..94a248c --- /dev/null +++ b/node_modules/binary-extensions/binary-extensions.json.d.ts @@ -0,0 +1,3 @@ +declare const binaryExtensionsJson: readonly string[]; + +export = binaryExtensionsJson; diff --git a/node_modules/binary-extensions/index.d.ts b/node_modules/binary-extensions/index.d.ts new file mode 100644 index 0000000..f469ac5 --- /dev/null +++ b/node_modules/binary-extensions/index.d.ts @@ -0,0 +1,14 @@ +/** +List of binary file extensions. + +@example +``` +import binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` +*/ +declare const binaryExtensions: readonly string[]; + +export = binaryExtensions; diff --git a/node_modules/binary-extensions/index.js b/node_modules/binary-extensions/index.js new file mode 100644 index 0000000..d46e468 --- /dev/null +++ b/node_modules/binary-extensions/index.js @@ -0,0 +1 @@ +module.exports = require('./binary-extensions.json'); diff --git a/node_modules/binary-extensions/license b/node_modules/binary-extensions/license new file mode 100644 index 0000000..401b1c7 --- /dev/null +++ b/node_modules/binary-extensions/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2019 Sindre Sorhus (https://sindresorhus.com), Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/binary-extensions/package.json b/node_modules/binary-extensions/package.json new file mode 100644 index 0000000..c4d3641 --- /dev/null +++ b/node_modules/binary-extensions/package.json @@ -0,0 +1,38 @@ +{ + "name": "binary-extensions", + "version": "2.2.0", + "description": "List of binary file extensions", + "license": "MIT", + "repository": "sindresorhus/binary-extensions", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "binary-extensions.json", + "binary-extensions.json.d.ts" + ], + "keywords": [ + "binary", + "extensions", + "extension", + "file", + "json", + "list", + "array" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/binary-extensions/readme.md b/node_modules/binary-extensions/readme.md new file mode 100644 index 0000000..3e25dd8 --- /dev/null +++ b/node_modules/binary-extensions/readme.md @@ -0,0 +1,41 @@ +# binary-extensions + +> List of binary file extensions + +The list is just a [JSON file](binary-extensions.json) and can be used anywhere. + + +## Install + +``` +$ npm install binary-extensions +``` + + +## Usage + +```js +const binaryExtensions = require('binary-extensions'); + +console.log(binaryExtensions); +//=> ['3ds', '3g2', …] +``` + + +## Related + +- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file +- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/braces/CHANGELOG.md b/node_modules/braces/CHANGELOG.md new file mode 100644 index 0000000..36f798b --- /dev/null +++ b/node_modules/braces/CHANGELOG.md @@ -0,0 +1,184 @@ +# Release history + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## [3.0.0] - 2018-04-08 + +v3.0 is a complete refactor, resulting in a faster, smaller codebase, with fewer deps, and a more accurate parser and compiler. + +**Breaking Changes** + +- The undocumented `.makeRe` method was removed + +**Non-breaking changes** + +- Caching was removed + +## [2.3.2] - 2018-04-08 + +- start refactoring +- cover sets +- better range handling + +## [2.3.1] - 2018-02-17 + +- Remove unnecessary escape in Regex. (#14) + +## [2.3.0] - 2017-10-19 + +- minor code reorganization +- optimize regex +- expose `maxLength` option + +## [2.2.1] - 2017-05-30 + +- don't condense when braces contain extglobs + +## [2.2.0] - 2017-05-28 + +- ensure word boundaries are preserved +- fixes edge case where extglob characters precede a brace pattern + +## [2.1.1] - 2017-04-27 + +- use snapdragon-node +- handle edge case +- optimizations, lint + +## [2.0.4] - 2017-04-11 + +- pass opts to compiler +- minor optimization in create method +- re-write parser handlers to remove negation regex + +## [2.0.3] - 2016-12-10 + +- use split-string +- clear queue at the end +- adds sequences example +- add unit tests + +## [2.0.2] - 2016-10-21 + +- fix comma handling in nested extglobs + +## [2.0.1] - 2016-10-20 + +- add comments +- more tests, ensure quotes are stripped + +## [2.0.0] - 2016-10-19 + +- don't expand braces inside character classes +- add quantifier pattern + +## [1.8.5] - 2016-05-21 + +- Refactor (#10) + +## [1.8.4] - 2016-04-20 + +- fixes https://github.com/jonschlinkert/micromatch/issues/66 + +## [1.8.0] - 2015-03-18 + +- adds exponent examples, tests +- fixes the first example in https://github.com/jonschlinkert/micromatch/issues/38 + +## [1.6.0] - 2015-01-30 + +- optimizations, `bash` mode: +- improve path escaping + +## [1.5.0] - 2015-01-28 + +- Merge pull request #5 from eush77/lib-files + +## [1.4.0] - 2015-01-24 + +- add extglob tests +- externalize exponent function +- better whitespace handling + +## [1.3.0] - 2015-01-24 + +- make regex patterns explicity + +## [1.1.0] - 2015-01-11 + +- don't create a match group with `makeRe` + +## [1.0.0] - 2014-12-23 + +- Merge commit '97b05f5544f8348736a8efaecf5c32bbe3e2ad6e' +- support empty brace syntax +- better bash coverage +- better support for regex strings + +## [0.1.4] - 2014-11-14 + +- improve recognition of bad args, recognize mismatched argument types +- support escaping +- remove pathname-expansion +- support whitespace in patterns + +## [0.1.0] + +- first commit + +[2.3.2]: https://github.com/micromatch/braces/compare/2.3.1...2.3.2 +[2.3.1]: https://github.com/micromatch/braces/compare/2.3.0...2.3.1 +[2.3.0]: https://github.com/micromatch/braces/compare/2.2.1...2.3.0 +[2.2.1]: https://github.com/micromatch/braces/compare/2.2.0...2.2.1 +[2.2.0]: https://github.com/micromatch/braces/compare/2.1.1...2.2.0 +[2.1.1]: https://github.com/micromatch/braces/compare/2.1.0...2.1.1 +[2.1.0]: https://github.com/micromatch/braces/compare/2.0.4...2.1.0 +[2.0.4]: https://github.com/micromatch/braces/compare/2.0.3...2.0.4 +[2.0.3]: https://github.com/micromatch/braces/compare/2.0.2...2.0.3 +[2.0.2]: https://github.com/micromatch/braces/compare/2.0.1...2.0.2 +[2.0.1]: https://github.com/micromatch/braces/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/micromatch/braces/compare/1.8.5...2.0.0 +[1.8.5]: https://github.com/micromatch/braces/compare/1.8.4...1.8.5 +[1.8.4]: https://github.com/micromatch/braces/compare/1.8.0...1.8.4 +[1.8.0]: https://github.com/micromatch/braces/compare/1.6.0...1.8.0 +[1.6.0]: https://github.com/micromatch/braces/compare/1.5.0...1.6.0 +[1.5.0]: https://github.com/micromatch/braces/compare/1.4.0...1.5.0 +[1.4.0]: https://github.com/micromatch/braces/compare/1.3.0...1.4.0 +[1.3.0]: https://github.com/micromatch/braces/compare/1.2.0...1.3.0 +[1.2.0]: https://github.com/micromatch/braces/compare/1.1.0...1.2.0 +[1.1.0]: https://github.com/micromatch/braces/compare/1.0.0...1.1.0 +[1.0.0]: https://github.com/micromatch/braces/compare/0.1.4...1.0.0 +[0.1.4]: https://github.com/micromatch/braces/compare/0.1.0...0.1.4 + +[Unreleased]: https://github.com/micromatch/braces/compare/0.1.0...HEAD +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog \ No newline at end of file diff --git a/node_modules/braces/LICENSE b/node_modules/braces/LICENSE new file mode 100644 index 0000000..d32ab44 --- /dev/null +++ b/node_modules/braces/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2018, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/braces/README.md b/node_modules/braces/README.md new file mode 100644 index 0000000..cba2f60 --- /dev/null +++ b/node_modules/braces/README.md @@ -0,0 +1,593 @@ +# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) + +> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save braces +``` + +## v3.0.0 Released!! + +See the [changelog](CHANGELOG.md) for details. + +## Why use braces? + +Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. + +* **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) +* **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. +* **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. +* **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). +* **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). +* [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` +* [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` +* [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` +* [Supports escaping](#escaping) - To prevent evaluation of special characters. + +## Usage + +The main export is a function that takes one or more brace `patterns` and `options`. + +```js +const braces = require('braces'); +// braces(patterns[, options]); + +console.log(braces(['{01..05}', '{a..e}'])); +//=> ['(0[1-5])', '([a-e])'] + +console.log(braces(['{01..05}', '{a..e}'], { expand: true })); +//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] +``` + +### Brace Expansion vs. Compilation + +By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. + +**Compiled** + +```js +console.log(braces('a/{x,y,z}/b')); +//=> ['a/(x|y|z)/b'] +console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); +//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] +``` + +**Expanded** + +Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): + +```js +console.log(braces('a/{x,y,z}/b', { expand: true })); +//=> ['a/x/b', 'a/y/b', 'a/z/b'] + +console.log(braces.expand('{01..10}')); +//=> ['01','02','03','04','05','06','07','08','09','10'] +``` + +### Lists + +Expand lists (like Bash "sets"): + +```js +console.log(braces('a/{foo,bar,baz}/*.js')); +//=> ['a/(foo|bar|baz)/*.js'] + +console.log(braces.expand('a/{foo,bar,baz}/*.js')); +//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] +``` + +### Sequences + +Expand ranges of characters (like Bash "sequences"): + +```js +console.log(braces.expand('{1..3}')); // ['1', '2', '3'] +console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] +console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] +console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] + +// supports zero-padded ranges +console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] +console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] +``` + +See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. + +### Steppped ranges + +Steps, or increments, may be used with ranges: + +```js +console.log(braces.expand('{2..10..2}')); +//=> ['2', '4', '6', '8', '10'] + +console.log(braces('{2..10..2}')); +//=> ['(2|4|6|8|10)'] +``` + +When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. + +### Nesting + +Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. + +**"Expanded" braces** + +```js +console.log(braces.expand('a{b,c,/{x,y}}/e')); +//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] + +console.log(braces.expand('a/{x,{1..5},y}/c')); +//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] +``` + +**"Optimized" braces** + +```js +console.log(braces('a{b,c,/{x,y}}/e')); +//=> ['a(b|c|/(x|y))/e'] + +console.log(braces('a/{x,{1..5},y}/c')); +//=> ['a/(x|([1-5])|y)/c'] +``` + +### Escaping + +**Escaping braces** + +A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: + +```js +console.log(braces.expand('a\\{d,c,b}e')); +//=> ['a{d,c,b}e'] + +console.log(braces.expand('a{d,c,b\\}e')); +//=> ['a{d,c,b}e'] +``` + +**Escaping commas** + +Commas inside braces may also be escaped: + +```js +console.log(braces.expand('a{b\\,c}d')); +//=> ['a{b,c}d'] + +console.log(braces.expand('a{d\\,c,b}e')); +//=> ['ad,ce', 'abe'] +``` + +**Single items** + +Following bash conventions, a brace pattern is also not expanded when it contains a single character: + +```js +console.log(braces.expand('a{b}c')); +//=> ['a{b}c'] +``` + +## Options + +### options.maxLength + +**Type**: `Number` + +**Default**: `65,536` + +**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. + +```js +console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error +``` + +### options.expand + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). + +```js +console.log(braces('a/{b,c}/d', { expand: true })); +//=> [ 'a/b/d', 'a/c/d' ] +``` + +### options.nodupes + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Remove duplicates from the returned array. + +### options.rangeLimit + +**Type**: `Number` + +**Default**: `1000` + +**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. + +You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. + +**Examples** + +```js +// pattern exceeds the "rangeLimit", so it's optimized automatically +console.log(braces.expand('{1..1000}')); +//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] + +// pattern does not exceed "rangeLimit", so it's NOT optimized +console.log(braces.expand('{1..100}')); +//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] +``` + +### options.transform + +**Type**: `Function` + +**Default**: `undefined` + +**Description**: Customize range expansion. + +**Example: Transforming non-numeric values** + +```js +const alpha = braces.expand('x/{a..e}/y', { + transform(value, index) { + // When non-numeric values are passed, "value" is a character code. + return 'foo/' + String.fromCharCode(value) + '-' + index; + } +}); +console.log(alpha); +//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] +``` + +**Example: Transforming numeric values** + +```js +const numeric = braces.expand('{1..5}', { + transform(value) { + // when numeric values are passed, "value" is a number + return 'foo/' + value * 2; + } +}); +console.log(numeric); +//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] +``` + +### options.quantifiers + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. + +Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) + +The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. + +**Examples** + +```js +const braces = require('braces'); +console.log(braces('a/b{1,3}/{x,y,z}')); +//=> [ 'a/b(1|3)/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true})); +//=> [ 'a/b{1,3}/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', {quantifiers: true, expand: true})); +//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] +``` + +### options.unescape + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Strip backslashes that were used for escaping from the result. + +## What is "brace expansion"? + +Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). + +In addition to "expansion", braces are also used for matching. In other words: + +* [brace expansion](#brace-expansion) is for generating new lists +* [brace matching](#brace-matching) is for filtering existing lists + +
+More about brace expansion (click to expand) + +There are two main types of brace expansion: + +1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` +2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". + +Here are some example brace patterns to illustrate how they work: + +**Sets** + +``` +{a,b,c} => a b c +{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 +``` + +**Sequences** + +``` +{1..9} => 1 2 3 4 5 6 7 8 9 +{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 +{1..20..3} => 1 4 7 10 13 16 19 +{a..j} => a b c d e f g h i j +{j..a} => j i h g f e d c b a +{a..z..3} => a d g j m p s v y +``` + +**Combination** + +Sets and sequences can be mixed together or used along with any other strings. + +``` +{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 +foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar +``` + +The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. + +## Brace matching + +In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. + +For example, the pattern `foo/{1..3}/bar` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +``` + +But not: + +``` +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +## Brace matching pitfalls + +Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. + +### tldr + +**"brace bombs"** + +* brace expansion can eat up a huge amount of processing resources +* as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially +* users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) + +For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. + +### The solution + +Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. + +### Geometric complexity + +At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. + +For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: + +``` +{1,2}{3,4} => (2X2) => 13 14 23 24 +{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 +``` + +But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: + +``` +{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 + 249 257 258 259 267 268 269 347 348 349 357 + 358 359 367 368 369 +``` + +Now, imagine how this complexity grows given that each element is a n-tuple: + +``` +{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) +{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) +``` + +Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. + +**More information** + +Interested in learning more about brace expansion? + +* [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) +* [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) +* [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) + +
+ +## Performance + +Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. + +### Better algorithms + +Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. + +Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. + +**The proof is in the numbers** + +Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. + +| **Pattern** | **braces** | **[minimatch][]** | +| --- | --- | --- | +| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs)| N/A (freezes) | +| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | +| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | +| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | +| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | +| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | +| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | +| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | +| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | +| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | +| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | +| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | +| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | +| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | +| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | +| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | +| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | + +### Faster algorithms + +When you need expansion, braces is still much faster. + +_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ + +| **Pattern** | **braces** | **[minimatch][]** | +| --- | --- | --- | +| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | +| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | +| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | +| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | +| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | +| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | +| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | +| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | + +If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm benchmark +``` + +### Latest results + +Braces is more accurate, without sacrificing performance. + +```bash +# range (expanded) + braces x 29,040 ops/sec ±3.69% (91 runs sampled)) + minimatch x 4,735 ops/sec ±1.28% (90 runs sampled) + +# range (optimized for regex) + braces x 382,878 ops/sec ±0.56% (94 runs sampled) + minimatch x 1,040 ops/sec ±0.44% (93 runs sampled) + +# nested ranges (expanded) + braces x 19,744 ops/sec ±2.27% (92 runs sampled)) + minimatch x 4,579 ops/sec ±0.50% (93 runs sampled) + +# nested ranges (optimized for regex) + braces x 246,019 ops/sec ±2.02% (93 runs sampled) + minimatch x 1,028 ops/sec ±0.39% (94 runs sampled) + +# set (expanded) + braces x 138,641 ops/sec ±0.53% (95 runs sampled) + minimatch x 219,582 ops/sec ±0.98% (94 runs sampled) + +# set (optimized for regex) + braces x 388,408 ops/sec ±0.41% (95 runs sampled) + minimatch x 44,724 ops/sec ±0.91% (89 runs sampled) + +# nested sets (expanded) + braces x 84,966 ops/sec ±0.48% (94 runs sampled) + minimatch x 140,720 ops/sec ±0.37% (95 runs sampled) + +# nested sets (optimized for regex) + braces x 263,340 ops/sec ±2.06% (92 runs sampled) + minimatch x 28,714 ops/sec ±0.40% (90 runs sampled) +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 197 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [es128](https://github.com/es128) | +| 1 | [eush77](https://github.com/eush77) | +| 1 | [hemanth](https://github.com/hemanth) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/braces/index.js b/node_modules/braces/index.js new file mode 100644 index 0000000..0eee0f5 --- /dev/null +++ b/node_modules/braces/index.js @@ -0,0 +1,170 @@ +'use strict'; + +const stringify = require('./lib/stringify'); +const compile = require('./lib/compile'); +const expand = require('./lib/expand'); +const parse = require('./lib/parse'); + +/** + * Expand the given pattern or create a regex-compatible string. + * + * ```js + * const braces = require('braces'); + * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] + * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {String} + * @api public + */ + +const braces = (input, options = {}) => { + let output = []; + + if (Array.isArray(input)) { + for (let pattern of input) { + let result = braces.create(pattern, options); + if (Array.isArray(result)) { + output.push(...result); + } else { + output.push(result); + } + } + } else { + output = [].concat(braces.create(input, options)); + } + + if (options && options.expand === true && options.nodupes === true) { + output = [...new Set(output)]; + } + return output; +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * // braces.parse(pattern, [, options]); + * const ast = braces.parse('a/{b,c}/d'); + * console.log(ast); + * ``` + * @param {String} pattern Brace pattern to parse + * @param {Object} options + * @return {Object} Returns an AST + * @api public + */ + +braces.parse = (input, options = {}) => parse(input, options); + +/** + * Creates a braces string from an AST, or an AST node. + * + * ```js + * const braces = require('braces'); + * let ast = braces.parse('foo/{a,b}/bar'); + * console.log(stringify(ast.nodes[2])); //=> '{a,b}' + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.stringify = (input, options = {}) => { + if (typeof input === 'string') { + return stringify(braces.parse(input, options), options); + } + return stringify(input, options); +}; + +/** + * Compiles a brace pattern into a regex-compatible, optimized string. + * This method is called by the main [braces](#braces) function by default. + * + * ```js + * const braces = require('braces'); + * console.log(braces.compile('a/{b,c}/d')); + * //=> ['a/(b|c)/d'] + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.compile = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + return compile(input, options); +}; + +/** + * Expands a brace pattern into an array. This method is called by the + * main [braces](#braces) function when `options.expand` is true. Before + * using this method it's recommended that you read the [performance notes](#performance)) + * and advantages of using [.compile](#compile) instead. + * + * ```js + * const braces = require('braces'); + * console.log(braces.expand('a/{b,c}/d')); + * //=> ['a/b/d', 'a/c/d']; + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.expand = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + + let result = expand(input, options); + + // filter out empty strings if specified + if (options.noempty === true) { + result = result.filter(Boolean); + } + + // filter out duplicates if specified + if (options.nodupes === true) { + result = [...new Set(result)]; + } + + return result; +}; + +/** + * Processes a brace pattern and returns either an expanded array + * (if `options.expand` is true), a highly optimized regex-compatible string. + * This method is called by the main [braces](#braces) function. + * + * ```js + * const braces = require('braces'); + * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) + * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.create = (input, options = {}) => { + if (input === '' || input.length < 3) { + return [input]; + } + + return options.expand !== true + ? braces.compile(input, options) + : braces.expand(input, options); +}; + +/** + * Expose "braces" + */ + +module.exports = braces; diff --git a/node_modules/braces/lib/compile.js b/node_modules/braces/lib/compile.js new file mode 100644 index 0000000..3e984a4 --- /dev/null +++ b/node_modules/braces/lib/compile.js @@ -0,0 +1,57 @@ +'use strict'; + +const fill = require('fill-range'); +const utils = require('./utils'); + +const compile = (ast, options = {}) => { + let walk = (node, parent = {}) => { + let invalidBlock = utils.isInvalidBrace(parent); + let invalidNode = node.invalid === true && options.escapeInvalid === true; + let invalid = invalidBlock === true || invalidNode === true; + let prefix = options.escapeInvalid === true ? '\\' : ''; + let output = ''; + + if (node.isOpen === true) { + return prefix + node.value; + } + if (node.isClose === true) { + return prefix + node.value; + } + + if (node.type === 'open') { + return invalid ? (prefix + node.value) : '('; + } + + if (node.type === 'close') { + return invalid ? (prefix + node.value) : ')'; + } + + if (node.type === 'comma') { + return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|'); + } + + if (node.value) { + return node.value; + } + + if (node.nodes && node.ranges > 0) { + let args = utils.reduce(node.nodes); + let range = fill(...args, { ...options, wrap: false, toRegex: true }); + + if (range.length !== 0) { + return args.length > 1 && range.length > 1 ? `(${range})` : range; + } + } + + if (node.nodes) { + for (let child of node.nodes) { + output += walk(child, node); + } + } + return output; + }; + + return walk(ast); +}; + +module.exports = compile; diff --git a/node_modules/braces/lib/constants.js b/node_modules/braces/lib/constants.js new file mode 100644 index 0000000..a937943 --- /dev/null +++ b/node_modules/braces/lib/constants.js @@ -0,0 +1,57 @@ +'use strict'; + +module.exports = { + MAX_LENGTH: 1024 * 64, + + // Digits + CHAR_0: '0', /* 0 */ + CHAR_9: '9', /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 'A', /* A */ + CHAR_LOWERCASE_A: 'a', /* a */ + CHAR_UPPERCASE_Z: 'Z', /* Z */ + CHAR_LOWERCASE_Z: 'z', /* z */ + + CHAR_LEFT_PARENTHESES: '(', /* ( */ + CHAR_RIGHT_PARENTHESES: ')', /* ) */ + + CHAR_ASTERISK: '*', /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: '&', /* & */ + CHAR_AT: '@', /* @ */ + CHAR_BACKSLASH: '\\', /* \ */ + CHAR_BACKTICK: '`', /* ` */ + CHAR_CARRIAGE_RETURN: '\r', /* \r */ + CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ + CHAR_COLON: ':', /* : */ + CHAR_COMMA: ',', /* , */ + CHAR_DOLLAR: '$', /* . */ + CHAR_DOT: '.', /* . */ + CHAR_DOUBLE_QUOTE: '"', /* " */ + CHAR_EQUAL: '=', /* = */ + CHAR_EXCLAMATION_MARK: '!', /* ! */ + CHAR_FORM_FEED: '\f', /* \f */ + CHAR_FORWARD_SLASH: '/', /* / */ + CHAR_HASH: '#', /* # */ + CHAR_HYPHEN_MINUS: '-', /* - */ + CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ + CHAR_LEFT_CURLY_BRACE: '{', /* { */ + CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ + CHAR_LINE_FEED: '\n', /* \n */ + CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ + CHAR_PERCENT: '%', /* % */ + CHAR_PLUS: '+', /* + */ + CHAR_QUESTION_MARK: '?', /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ + CHAR_RIGHT_CURLY_BRACE: '}', /* } */ + CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ + CHAR_SEMICOLON: ';', /* ; */ + CHAR_SINGLE_QUOTE: '\'', /* ' */ + CHAR_SPACE: ' ', /* */ + CHAR_TAB: '\t', /* \t */ + CHAR_UNDERSCORE: '_', /* _ */ + CHAR_VERTICAL_LINE: '|', /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ +}; diff --git a/node_modules/braces/lib/expand.js b/node_modules/braces/lib/expand.js new file mode 100644 index 0000000..376c748 --- /dev/null +++ b/node_modules/braces/lib/expand.js @@ -0,0 +1,113 @@ +'use strict'; + +const fill = require('fill-range'); +const stringify = require('./stringify'); +const utils = require('./utils'); + +const append = (queue = '', stash = '', enclose = false) => { + let result = []; + + queue = [].concat(queue); + stash = [].concat(stash); + + if (!stash.length) return queue; + if (!queue.length) { + return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; + } + + for (let item of queue) { + if (Array.isArray(item)) { + for (let value of item) { + result.push(append(value, stash, enclose)); + } + } else { + for (let ele of stash) { + if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; + result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele)); + } + } + } + return utils.flatten(result); +}; + +const expand = (ast, options = {}) => { + let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit; + + let walk = (node, parent = {}) => { + node.queue = []; + + let p = parent; + let q = parent.queue; + + while (p.type !== 'brace' && p.type !== 'root' && p.parent) { + p = p.parent; + q = p.queue; + } + + if (node.invalid || node.dollar) { + q.push(append(q.pop(), stringify(node, options))); + return; + } + + if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { + q.push(append(q.pop(), ['{}'])); + return; + } + + if (node.nodes && node.ranges > 0) { + let args = utils.reduce(node.nodes); + + if (utils.exceedsLimit(...args, options.step, rangeLimit)) { + throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); + } + + let range = fill(...args, options); + if (range.length === 0) { + range = stringify(node, options); + } + + q.push(append(q.pop(), range)); + node.nodes = []; + return; + } + + let enclose = utils.encloseBrace(node); + let queue = node.queue; + let block = node; + + while (block.type !== 'brace' && block.type !== 'root' && block.parent) { + block = block.parent; + queue = block.queue; + } + + for (let i = 0; i < node.nodes.length; i++) { + let child = node.nodes[i]; + + if (child.type === 'comma' && node.type === 'brace') { + if (i === 1) queue.push(''); + queue.push(''); + continue; + } + + if (child.type === 'close') { + q.push(append(q.pop(), queue, enclose)); + continue; + } + + if (child.value && child.type !== 'open') { + queue.push(append(queue.pop(), child.value)); + continue; + } + + if (child.nodes) { + walk(child, node); + } + } + + return queue; + }; + + return utils.flatten(walk(ast)); +}; + +module.exports = expand; diff --git a/node_modules/braces/lib/parse.js b/node_modules/braces/lib/parse.js new file mode 100644 index 0000000..145ea26 --- /dev/null +++ b/node_modules/braces/lib/parse.js @@ -0,0 +1,333 @@ +'use strict'; + +const stringify = require('./stringify'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + CHAR_BACKSLASH, /* \ */ + CHAR_BACKTICK, /* ` */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_RIGHT_SQUARE_BRACKET, /* ] */ + CHAR_DOUBLE_QUOTE, /* " */ + CHAR_SINGLE_QUOTE, /* ' */ + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE +} = require('./constants'); + +/** + * parse + */ + +const parse = (input, options = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + let opts = options || {}; + let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (input.length > max) { + throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); + } + + let ast = { type: 'root', input, nodes: [] }; + let stack = [ast]; + let block = ast; + let prev = ast; + let brackets = 0; + let length = input.length; + let index = 0; + let depth = 0; + let value; + let memo = {}; + + /** + * Helpers + */ + + const advance = () => input[index++]; + const push = node => { + if (node.type === 'text' && prev.type === 'dot') { + prev.type = 'text'; + } + + if (prev && prev.type === 'text' && node.type === 'text') { + prev.value += node.value; + return; + } + + block.nodes.push(node); + node.parent = block; + node.prev = prev; + prev = node; + return node; + }; + + push({ type: 'bos' }); + + while (index < length) { + block = stack[stack.length - 1]; + value = advance(); + + /** + * Invalid chars + */ + + if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { + continue; + } + + /** + * Escaped chars + */ + + if (value === CHAR_BACKSLASH) { + push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); + continue; + } + + /** + * Right square bracket (literal): ']' + */ + + if (value === CHAR_RIGHT_SQUARE_BRACKET) { + push({ type: 'text', value: '\\' + value }); + continue; + } + + /** + * Left square bracket: '[' + */ + + if (value === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + + let closed = true; + let next; + + while (index < length && (next = advance())) { + value += next; + + if (next === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + continue; + } + + if (next === CHAR_BACKSLASH) { + value += advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + brackets--; + + if (brackets === 0) { + break; + } + } + } + + push({ type: 'text', value }); + continue; + } + + /** + * Parentheses + */ + + if (value === CHAR_LEFT_PARENTHESES) { + block = push({ type: 'paren', nodes: [] }); + stack.push(block); + push({ type: 'text', value }); + continue; + } + + if (value === CHAR_RIGHT_PARENTHESES) { + if (block.type !== 'paren') { + push({ type: 'text', value }); + continue; + } + block = stack.pop(); + push({ type: 'text', value }); + block = stack[stack.length - 1]; + continue; + } + + /** + * Quotes: '|"|` + */ + + if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { + let open = value; + let next; + + if (options.keepQuotes !== true) { + value = ''; + } + + while (index < length && (next = advance())) { + if (next === CHAR_BACKSLASH) { + value += next + advance(); + continue; + } + + if (next === open) { + if (options.keepQuotes === true) value += next; + break; + } + + value += next; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Left curly brace: '{' + */ + + if (value === CHAR_LEFT_CURLY_BRACE) { + depth++; + + let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; + let brace = { + type: 'brace', + open: true, + close: false, + dollar, + depth, + commas: 0, + ranges: 0, + nodes: [] + }; + + block = push(brace); + stack.push(block); + push({ type: 'open', value }); + continue; + } + + /** + * Right curly brace: '}' + */ + + if (value === CHAR_RIGHT_CURLY_BRACE) { + if (block.type !== 'brace') { + push({ type: 'text', value }); + continue; + } + + let type = 'close'; + block = stack.pop(); + block.close = true; + + push({ type, value }); + depth--; + + block = stack[stack.length - 1]; + continue; + } + + /** + * Comma: ',' + */ + + if (value === CHAR_COMMA && depth > 0) { + if (block.ranges > 0) { + block.ranges = 0; + let open = block.nodes.shift(); + block.nodes = [open, { type: 'text', value: stringify(block) }]; + } + + push({ type: 'comma', value }); + block.commas++; + continue; + } + + /** + * Dot: '.' + */ + + if (value === CHAR_DOT && depth > 0 && block.commas === 0) { + let siblings = block.nodes; + + if (depth === 0 || siblings.length === 0) { + push({ type: 'text', value }); + continue; + } + + if (prev.type === 'dot') { + block.range = []; + prev.value += value; + prev.type = 'range'; + + if (block.nodes.length !== 3 && block.nodes.length !== 5) { + block.invalid = true; + block.ranges = 0; + prev.type = 'text'; + continue; + } + + block.ranges++; + block.args = []; + continue; + } + + if (prev.type === 'range') { + siblings.pop(); + + let before = siblings[siblings.length - 1]; + before.value += prev.value + value; + prev = before; + block.ranges--; + continue; + } + + push({ type: 'dot', value }); + continue; + } + + /** + * Text + */ + + push({ type: 'text', value }); + } + + // Mark imbalanced braces and brackets as invalid + do { + block = stack.pop(); + + if (block.type !== 'root') { + block.nodes.forEach(node => { + if (!node.nodes) { + if (node.type === 'open') node.isOpen = true; + if (node.type === 'close') node.isClose = true; + if (!node.nodes) node.type = 'text'; + node.invalid = true; + } + }); + + // get the location of the block on parent.nodes (block's siblings) + let parent = stack[stack.length - 1]; + let index = parent.nodes.indexOf(block); + // replace the (invalid) block with it's nodes + parent.nodes.splice(index, 1, ...block.nodes); + } + } while (stack.length > 0); + + push({ type: 'eos' }); + return ast; +}; + +module.exports = parse; diff --git a/node_modules/braces/lib/stringify.js b/node_modules/braces/lib/stringify.js new file mode 100644 index 0000000..414b7bc --- /dev/null +++ b/node_modules/braces/lib/stringify.js @@ -0,0 +1,32 @@ +'use strict'; + +const utils = require('./utils'); + +module.exports = (ast, options = {}) => { + let stringify = (node, parent = {}) => { + let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); + let invalidNode = node.invalid === true && options.escapeInvalid === true; + let output = ''; + + if (node.value) { + if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { + return '\\' + node.value; + } + return node.value; + } + + if (node.value) { + return node.value; + } + + if (node.nodes) { + for (let child of node.nodes) { + output += stringify(child); + } + } + return output; + }; + + return stringify(ast); +}; + diff --git a/node_modules/braces/lib/utils.js b/node_modules/braces/lib/utils.js new file mode 100644 index 0000000..e3551a6 --- /dev/null +++ b/node_modules/braces/lib/utils.js @@ -0,0 +1,112 @@ +'use strict'; + +exports.isInteger = num => { + if (typeof num === 'number') { + return Number.isInteger(num); + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isInteger(Number(num)); + } + return false; +}; + +/** + * Find a node of the given type + */ + +exports.find = (node, type) => node.nodes.find(node => node.type === type); + +/** + * Find a node of the given type + */ + +exports.exceedsLimit = (min, max, step = 1, limit) => { + if (limit === false) return false; + if (!exports.isInteger(min) || !exports.isInteger(max)) return false; + return ((Number(max) - Number(min)) / Number(step)) >= limit; +}; + +/** + * Escape the given node with '\\' before node.value + */ + +exports.escapeNode = (block, n = 0, type) => { + let node = block.nodes[n]; + if (!node) return; + + if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { + if (node.escaped !== true) { + node.value = '\\' + node.value; + node.escaped = true; + } + } +}; + +/** + * Returns true if the given brace node should be enclosed in literal braces + */ + +exports.encloseBrace = node => { + if (node.type !== 'brace') return false; + if ((node.commas >> 0 + node.ranges >> 0) === 0) { + node.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a brace node is invalid. + */ + +exports.isInvalidBrace = block => { + if (block.type !== 'brace') return false; + if (block.invalid === true || block.dollar) return true; + if ((block.commas >> 0 + block.ranges >> 0) === 0) { + block.invalid = true; + return true; + } + if (block.open !== true || block.close !== true) { + block.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a node is an open or close node + */ + +exports.isOpenOrClose = node => { + if (node.type === 'open' || node.type === 'close') { + return true; + } + return node.open === true || node.close === true; +}; + +/** + * Reduce an array of text nodes. + */ + +exports.reduce = nodes => nodes.reduce((acc, node) => { + if (node.type === 'text') acc.push(node.value); + if (node.type === 'range') node.type = 'text'; + return acc; +}, []); + +/** + * Flatten an array + */ + +exports.flatten = (...args) => { + const result = []; + const flat = arr => { + for (let i = 0; i < arr.length; i++) { + let ele = arr[i]; + Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele); + } + return result; + }; + flat(args); + return result; +}; diff --git a/node_modules/braces/package.json b/node_modules/braces/package.json new file mode 100644 index 0000000..3f52e34 --- /dev/null +++ b/node_modules/braces/package.json @@ -0,0 +1,77 @@ +{ + "name": "braces", + "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", + "version": "3.0.2", + "homepage": "https://github.com/micromatch/braces", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Elan Shanker (https://github.com/es128)", + "Eugene Sharygin (https://github.com/eush77)", + "hemanth.hm (http://h3manth.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/braces", + "bugs": { + "url": "https://github.com/micromatch/braces/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "mocha", + "benchmark": "node benchmark" + }, + "dependencies": { + "fill-range": "^7.0.1" + }, + "devDependencies": { + "ansi-colors": "^3.2.4", + "bash-path": "^2.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "braces", + "expand", + "expansion", + "filepath", + "fill", + "fs", + "glob", + "globbing", + "letter", + "match", + "matches", + "matching", + "number", + "numerical", + "path", + "range", + "ranges", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + }, + "plugins": [ + "gulp-format-md" + ] + } +} diff --git a/node_modules/camelcase-css/README.md b/node_modules/camelcase-css/README.md new file mode 100644 index 0000000..caaa817 --- /dev/null +++ b/node_modules/camelcase-css/README.md @@ -0,0 +1,27 @@ +# camelcase-css [![NPM Version][npm-image]][npm-url] [![Build Status][travis-image]][travis-url] + +> Convert a kebab-cased CSS property into a camelCased DOM property. + + +## Installation +[Node.js](http://nodejs.org/) `>= 6` is required. Type this at the command line: +```shell +npm install camelcase-css +``` + + +## Usage +```js +const camelCaseCSS = require('camelcase-css'); + +camelCaseCSS('-webkit-border-radius'); //-> WebkitBorderRadius +camelCaseCSS('-moz-border-radius'); //-> MozBorderRadius +camelCaseCSS('-ms-border-radius'); //-> msBorderRadius +camelCaseCSS('border-radius'); //-> borderRadius +``` + + +[npm-image]: https://img.shields.io/npm/v/camelcase-css.svg +[npm-url]: https://npmjs.org/package/camelcase-css +[travis-image]: https://img.shields.io/travis/stevenvachon/camelcase-css.svg +[travis-url]: https://travis-ci.org/stevenvachon/camelcase-css diff --git a/node_modules/camelcase-css/index-es5.js b/node_modules/camelcase-css/index-es5.js new file mode 100644 index 0000000..48ef078 --- /dev/null +++ b/node_modules/camelcase-css/index-es5.js @@ -0,0 +1,24 @@ +"use strict"; + +var pattern = /-(\w|$)/g; + +var callback = function callback(dashChar, char) { + return char.toUpperCase(); +}; + +var camelCaseCSS = function camelCaseCSS(property) { + property = property.toLowerCase(); + + // NOTE :: IE8's "styleFloat" is intentionally not supported + if (property === "float") { + return "cssFloat"; + } + // Microsoft vendor-prefixes are uniquely cased + else if (property.charCodeAt(0) === 45&& property.charCodeAt(1) === 109&& property.charCodeAt(2) === 115&& property.charCodeAt(3) === 45) { + return property.substr(1).replace(pattern, callback); + } else { + return property.replace(pattern, callback); + } +}; + +module.exports = camelCaseCSS; diff --git a/node_modules/camelcase-css/index.js b/node_modules/camelcase-css/index.js new file mode 100644 index 0000000..c9d1f1b --- /dev/null +++ b/node_modules/camelcase-css/index.js @@ -0,0 +1,30 @@ +"use strict"; +const pattern = /-(\w|$)/g; + +const callback = (dashChar, char) => char.toUpperCase(); + + + +const camelCaseCSS = property => +{ + property = property.toLowerCase(); + + // NOTE :: IE8's "styleFloat" is intentionally not supported + if (property === "float") + { + return "cssFloat"; + } + // Microsoft vendor-prefixes are uniquely cased + else if (property.startsWith("-ms-")) + { + return property.substr(1).replace(pattern, callback); + } + else + { + return property.replace(pattern, callback); + } +}; + + + +module.exports = camelCaseCSS; diff --git a/node_modules/camelcase-css/license b/node_modules/camelcase-css/license new file mode 100644 index 0000000..b760007 --- /dev/null +++ b/node_modules/camelcase-css/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Steven Vachon (svachon.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/camelcase-css/package.json b/node_modules/camelcase-css/package.json new file mode 100644 index 0000000..61dc51a --- /dev/null +++ b/node_modules/camelcase-css/package.json @@ -0,0 +1,34 @@ +{ + "name": "camelcase-css", + "description": "Convert a kebab-cased CSS property into a camelCased DOM property.", + "version": "2.0.1", + "license": "MIT", + "author": "Steven Vachon (https://www.svachon.com/)", + "repository": "stevenvachon/camelcase-css", + "browser": "index-es5.js", + "devDependencies": { + "babel-cli": "^6.26.0", + "babel-core": "^6.26.3", + "babel-plugin-optimize-starts-with": "^1.0.1", + "babel-preset-env": "^1.7.0", + "chai": "^4.1.2", + "mocha": "^5.2.0" + }, + "engines": { + "node": ">= 6" + }, + "scripts": { + "pretest": "babel index.js --out-file=index-es5.js --presets=env --plugins=optimize-starts-with", + "test": "mocha test.js --check-leaks --bail" + }, + "files": [ + "index.js", + "index-es5.js" + ], + "keywords": [ + "camelcase", + "case", + "css", + "dom" + ] +} diff --git a/node_modules/chokidar/LICENSE b/node_modules/chokidar/LICENSE new file mode 100644 index 0000000..fa9162b --- /dev/null +++ b/node_modules/chokidar/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2012-2019 Paul Miller (https://paulmillr.com), Elan Shanker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the “Software”), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/chokidar/README.md b/node_modules/chokidar/README.md new file mode 100644 index 0000000..d6a57fd --- /dev/null +++ b/node_modules/chokidar/README.md @@ -0,0 +1,308 @@ +# Chokidar [![Weekly downloads](https://img.shields.io/npm/dw/chokidar.svg)](https://github.com/paulmillr/chokidar) [![Yearly downloads](https://img.shields.io/npm/dy/chokidar.svg)](https://github.com/paulmillr/chokidar) + +> Minimal and efficient cross-platform file watching library + +[![NPM](https://nodei.co/npm/chokidar.png)](https://www.npmjs.com/package/chokidar) + +## Why? + +Node.js `fs.watch`: + +* Doesn't report filenames on MacOS. +* Doesn't report events at all when using editors like Sublime on MacOS. +* Often reports events twice. +* Emits most changes as `rename`. +* Does not provide an easy way to recursively watch file trees. +* Does not support recursive watching on Linux. + +Node.js `fs.watchFile`: + +* Almost as bad at event handling. +* Also does not provide any recursive watching. +* Results in high CPU utilization. + +Chokidar resolves these problems. + +Initially made for **[Brunch](https://brunch.io/)** (an ultra-swift web app build tool), it is now used in +[Microsoft's Visual Studio Code](https://github.com/microsoft/vscode), +[gulp](https://github.com/gulpjs/gulp/), +[karma](https://karma-runner.github.io/), +[PM2](https://github.com/Unitech/PM2), +[browserify](http://browserify.org/), +[webpack](https://webpack.github.io/), +[BrowserSync](https://www.browsersync.io/), +and [many others](https://www.npmjs.com/browse/depended/chokidar). +It has proven itself in production environments. + +Version 3 is out! Check out our blog post about it: [Chokidar 3: How to save 32TB of traffic every week](https://paulmillr.com/posts/chokidar-3-save-32tb-of-traffic/) + +## How? + +Chokidar does still rely on the Node.js core `fs` module, but when using +`fs.watch` and `fs.watchFile` for watching, it normalizes the events it +receives, often checking for truth by getting file stats and/or dir contents. + +On MacOS, chokidar by default uses a native extension exposing the Darwin +`FSEvents` API. This provides very efficient recursive watching compared with +implementations like `kqueue` available on most \*nix platforms. Chokidar still +does have to do some work to normalize the events received that way as well. + +On most other platforms, the `fs.watch`-based implementation is the default, which +avoids polling and keeps CPU usage down. Be advised that chokidar will initiate +watchers recursively for everything within scope of the paths that have been +specified, so be judicious about not wasting system resources by watching much +more than needed. + +## Getting started + +Install with npm: + +```sh +npm install chokidar +``` + +Then `require` and use it in your code: + +```javascript +const chokidar = require('chokidar'); + +// One-liner for current directory +chokidar.watch('.').on('all', (event, path) => { + console.log(event, path); +}); +``` + +## API + +```javascript +// Example of a more typical implementation structure + +// Initialize watcher. +const watcher = chokidar.watch('file, dir, glob, or array', { + ignored: /(^|[\/\\])\../, // ignore dotfiles + persistent: true +}); + +// Something to use when events are received. +const log = console.log.bind(console); +// Add event listeners. +watcher + .on('add', path => log(`File ${path} has been added`)) + .on('change', path => log(`File ${path} has been changed`)) + .on('unlink', path => log(`File ${path} has been removed`)); + +// More possible events. +watcher + .on('addDir', path => log(`Directory ${path} has been added`)) + .on('unlinkDir', path => log(`Directory ${path} has been removed`)) + .on('error', error => log(`Watcher error: ${error}`)) + .on('ready', () => log('Initial scan complete. Ready for changes')) + .on('raw', (event, path, details) => { // internal + log('Raw event info:', event, path, details); + }); + +// 'add', 'addDir' and 'change' events also receive stat() results as second +// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats +watcher.on('change', (path, stats) => { + if (stats) console.log(`File ${path} changed size to ${stats.size}`); +}); + +// Watch new files. +watcher.add('new-file'); +watcher.add(['new-file-2', 'new-file-3', '**/other-file*']); + +// Get list of actual paths being watched on the filesystem +var watchedPaths = watcher.getWatched(); + +// Un-watch some files. +await watcher.unwatch('new-file*'); + +// Stop watching. +// The method is async! +watcher.close().then(() => console.log('closed')); + +// Full list of options. See below for descriptions. +// Do not use this example! +chokidar.watch('file', { + persistent: true, + + ignored: '*.txt', + ignoreInitial: false, + followSymlinks: true, + cwd: '.', + disableGlobbing: false, + + usePolling: false, + interval: 100, + binaryInterval: 300, + alwaysStat: false, + depth: 99, + awaitWriteFinish: { + stabilityThreshold: 2000, + pollInterval: 100 + }, + + ignorePermissionErrors: false, + atomic: true // or a custom 'atomicity delay', in milliseconds (default 100) +}); + +``` + +`chokidar.watch(paths, [options])` + +* `paths` (string or array of strings). Paths to files, dirs to be watched +recursively, or glob patterns. + - Note: globs must not contain windows separators (`\`), + because that's how they work by the standard — + you'll need to replace them with forward slashes (`/`). + - Note 2: for additional glob documentation, check out low-level + library: [picomatch](https://github.com/micromatch/picomatch). +* `options` (object) Options object as defined below: + +#### Persistence + +* `persistent` (default: `true`). Indicates whether the process +should continue to run as long as files are being watched. If set to +`false` when using `fsevents` to watch, no more events will be emitted +after `ready`, even if the process continues to run. + +#### Path filtering + +* `ignored` ([anymatch](https://github.com/es128/anymatch)-compatible definition) +Defines files/paths to be ignored. The whole relative or absolute path is +tested, not just filename. If a function with two arguments is provided, it +gets called twice per path - once with a single argument (the path), second +time with two arguments (the path and the +[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) +object of that path). +* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while +instantiating the watching as chokidar discovers these file paths (before the `ready` event). +* `followSymlinks` (default: `true`). When `false`, only the +symlinks themselves will be watched for changes instead of following +the link references and bubbling events through the link's path. +* `cwd` (no default). The base directory from which watch `paths` are to be +derived. Paths emitted with events will be relative to this. +* `disableGlobbing` (default: `false`). If set to `true` then the strings passed to `.watch()` and `.add()` are treated as +literal path names, even if they look like globs. + +#### Performance + +* `usePolling` (default: `false`). +Whether to use fs.watchFile (backed by polling), or fs.watch. If polling +leads to high CPU utilization, consider setting this to `false`. It is +typically necessary to **set this to `true` to successfully watch files over +a network**, and it may be necessary to successfully watch files in other +non-standard situations. Setting to `true` explicitly on MacOS overrides the +`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable +to true (1) or false (0) in order to override this option. +* _Polling-specific settings_ (effective when `usePolling: true`) + * `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also + set the CHOKIDAR_INTERVAL env variable to override this option. + * `binaryInterval` (default: `300`). Interval of file system + polling for binary files. + ([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) +* `useFsEvents` (default: `true` on MacOS). Whether to use the +`fsevents` watching interface if available. When set to `true` explicitly +and `fsevents` is available this supercedes the `usePolling` setting. When +set to `false` on MacOS, `usePolling: true` becomes the default. +* `alwaysStat` (default: `false`). If relying upon the +[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) +object that may get passed with `add`, `addDir`, and `change` events, set +this to `true` to ensure it is provided even in cases where it wasn't +already available from the underlying watch events. +* `depth` (default: `undefined`). If set, limits how many levels of +subdirectories will be traversed. +* `awaitWriteFinish` (default: `false`). +By default, the `add` event will fire when a file first appears on disk, before +the entire file has been written. Furthermore, in some cases some `change` +events will be emitted while the file is being written. In some cases, +especially when watching for large files there will be a need to wait for the +write operation to finish before responding to a file creation or modification. +Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size, +holding its `add` and `change` events until the size does not change for a +configurable amount of time. The appropriate duration setting is heavily +dependent on the OS and hardware. For accurate detection this parameter should +be relatively high, making file watching much less responsive. +Use with caution. + * *`options.awaitWriteFinish` can be set to an object in order to adjust + timing params:* + * `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in + milliseconds for a file size to remain constant before emitting its event. + * `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds. + +#### Errors + +* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files +that don't have read permissions if possible. If watching fails due to `EPERM` +or `EACCES` with this set to `true`, the errors will be suppressed silently. +* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`). +Automatically filters out artifacts that occur when using editors that use +"atomic writes" instead of writing directly to the source file. If a file is +re-added within 100 ms of being deleted, Chokidar emits a `change` event +rather than `unlink` then `add`. If the default of 100 ms does not work well +for you, you can override it by setting `atomic` to a custom value, in +milliseconds. + +### Methods & Events + +`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`: + +* `.add(path / paths)`: Add files, directories, or glob patterns for tracking. +Takes an array of strings or just one string. +* `.on(event, callback)`: Listen for an FS event. +Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`, +`raw`, `error`. +Additionally `all` is available which gets emitted with the underlying event +name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully. +* `.unwatch(path / paths)`: Stop watching files, directories, or glob patterns. +Takes an array of strings or just one string. +* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen. +* `.getWatched()`: Returns an object representing all the paths on the file +system being watched by this `FSWatcher` instance. The object's keys are all the +directories (using absolute paths unless the `cwd` option was used), and the +values are arrays of the names of the items contained in each directory. + +## CLI + +If you need a CLI interface for your file watching, check out +[chokidar-cli](https://github.com/open-cli-tools/chokidar-cli), allowing you to +execute a command on each change, or get a stdio stream of change events. + +## Install Troubleshooting + +* `npm WARN optional dep failed, continuing fsevents@n.n.n` + * This message is normal part of how `npm` handles optional dependencies and is + not indicative of a problem. Even if accompanied by other related error messages, + Chokidar should function properly. + +* `TypeError: fsevents is not a constructor` + * Update chokidar by doing `rm -rf node_modules package-lock.json yarn.lock && npm install`, or update your dependency that uses chokidar. + +* Chokidar is producing `ENOSP` error on Linux, like this: + * `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell` + `Error: watch /home/ ENOSPC` + * This means Chokidar ran out of file handles and you'll need to increase their count by executing the following command in Terminal: + `echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p` + +## Changelog + +For more detailed changelog, see [`full_changelog.md`](.github/full_changelog.md). +- **v3.5 (Jan 6, 2021):** Support for ARM Macs with Apple Silicon. Fixes for deleted symlinks. +- **v3.4 (Apr 26, 2020):** Support for directory-based symlinks. Fixes for macos file replacement. +- **v3.3 (Nov 2, 2019):** `FSWatcher#close()` method became async. That fixes IO race conditions related to close method. +- **v3.2 (Oct 1, 2019):** Improve Linux RAM usage by 50%. Race condition fixes. Windows glob fixes. Improve stability by using tight range of dependency versions. +- **v3.1 (Sep 16, 2019):** dotfiles are no longer filtered out by default. Use `ignored` option if needed. Improve initial Linux scan time by 50%. +- **v3 (Apr 30, 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16 and higher. +- **v2 (Dec 29, 2017):** Globs are now posix-style-only; without windows support. Tons of bugfixes. +- **v1 (Apr 7, 2015):** Glob support, symlink support, tons of bugfixes. Node 0.8+ is supported +- **v0.1 (Apr 20, 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66) + +## Also + +Why was chokidar named this way? What's the meaning behind it? + +>Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four). + +## License + +MIT (c) Paul Miller (), see [LICENSE](LICENSE) file. diff --git a/node_modules/chokidar/index.js b/node_modules/chokidar/index.js new file mode 100644 index 0000000..ed4b6d5 --- /dev/null +++ b/node_modules/chokidar/index.js @@ -0,0 +1,973 @@ +'use strict'; + +const { EventEmitter } = require('events'); +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); +const readdirp = require('readdirp'); +const anymatch = require('anymatch').default; +const globParent = require('glob-parent'); +const isGlob = require('is-glob'); +const braces = require('braces'); +const normalizePath = require('normalize-path'); + +const NodeFsHandler = require('./lib/nodefs-handler'); +const FsEventsHandler = require('./lib/fsevents-handler'); +const { + EV_ALL, + EV_READY, + EV_ADD, + EV_CHANGE, + EV_UNLINK, + EV_ADD_DIR, + EV_UNLINK_DIR, + EV_RAW, + EV_ERROR, + + STR_CLOSE, + STR_END, + + BACK_SLASH_RE, + DOUBLE_SLASH_RE, + SLASH_OR_BACK_SLASH_RE, + DOT_RE, + REPLACER_RE, + + SLASH, + SLASH_SLASH, + BRACE_START, + BANG, + ONE_DOT, + TWO_DOTS, + GLOBSTAR, + SLASH_GLOBSTAR, + ANYMATCH_OPTS, + STRING_TYPE, + FUNCTION_TYPE, + EMPTY_STR, + EMPTY_FN, + + isWindows, + isMacos, + isIBMi +} = require('./lib/constants'); + +const stat = promisify(fs.stat); +const readdir = promisify(fs.readdir); + +/** + * @typedef {String} Path + * @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName + * @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType + */ + +/** + * + * @typedef {Object} WatchHelpers + * @property {Boolean} followSymlinks + * @property {'stat'|'lstat'} statMethod + * @property {Path} path + * @property {Path} watchPath + * @property {Function} entryPath + * @property {Boolean} hasGlob + * @property {Object} globFilter + * @property {Function} filterPath + * @property {Function} filterDir + */ + +const arrify = (value = []) => Array.isArray(value) ? value : [value]; +const flatten = (list, result = []) => { + list.forEach(item => { + if (Array.isArray(item)) { + flatten(item, result); + } else { + result.push(item); + } + }); + return result; +}; + +const unifyPaths = (paths_) => { + /** + * @type {Array} + */ + const paths = flatten(arrify(paths_)); + if (!paths.every(p => typeof p === STRING_TYPE)) { + throw new TypeError(`Non-string provided as watch path: ${paths}`); + } + return paths.map(normalizePathToUnix); +}; + +// If SLASH_SLASH occurs at the beginning of path, it is not replaced +// because "//StoragePC/DrivePool/Movies" is a valid network path +const toUnix = (string) => { + let str = string.replace(BACK_SLASH_RE, SLASH); + let prepend = false; + if (str.startsWith(SLASH_SLASH)) { + prepend = true; + } + while (str.match(DOUBLE_SLASH_RE)) { + str = str.replace(DOUBLE_SLASH_RE, SLASH); + } + if (prepend) { + str = SLASH + str; + } + return str; +}; + +// Our version of upath.normalize +// TODO: this is not equal to path-normalize module - investigate why +const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path))); + +const normalizeIgnored = (cwd = EMPTY_STR) => (path) => { + if (typeof path !== STRING_TYPE) return path; + return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path)); +}; + +const getAbsolutePath = (path, cwd) => { + if (sysPath.isAbsolute(path)) { + return path; + } + if (path.startsWith(BANG)) { + return BANG + sysPath.join(cwd, path.slice(1)); + } + return sysPath.join(cwd, path); +}; + +const undef = (opts, key) => opts[key] === undefined; + +/** + * Directory entry. + * @property {Path} path + * @property {Set} items + */ +class DirEntry { + /** + * @param {Path} dir + * @param {Function} removeWatcher + */ + constructor(dir, removeWatcher) { + this.path = dir; + this._removeWatcher = removeWatcher; + /** @type {Set} */ + this.items = new Set(); + } + + add(item) { + const {items} = this; + if (!items) return; + if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item); + } + + async remove(item) { + const {items} = this; + if (!items) return; + items.delete(item); + if (items.size > 0) return; + + const dir = this.path; + try { + await readdir(dir); + } catch (err) { + if (this._removeWatcher) { + this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir)); + } + } + } + + has(item) { + const {items} = this; + if (!items) return; + return items.has(item); + } + + /** + * @returns {Array} + */ + getChildren() { + const {items} = this; + if (!items) return; + return [...items.values()]; + } + + dispose() { + this.items.clear(); + delete this.path; + delete this._removeWatcher; + delete this.items; + Object.freeze(this); + } +} + +const STAT_METHOD_F = 'stat'; +const STAT_METHOD_L = 'lstat'; +class WatchHelper { + constructor(path, watchPath, follow, fsw) { + this.fsw = fsw; + this.path = path = path.replace(REPLACER_RE, EMPTY_STR); + this.watchPath = watchPath; + this.fullWatchPath = sysPath.resolve(watchPath); + this.hasGlob = watchPath !== path; + /** @type {object|boolean} */ + if (path === EMPTY_STR) this.hasGlob = false; + this.globSymlink = this.hasGlob && follow ? undefined : false; + this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false; + this.dirParts = this.getDirParts(path); + this.dirParts.forEach((parts) => { + if (parts.length > 1) parts.pop(); + }); + this.followSymlinks = follow; + this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L; + } + + checkGlobSymlink(entry) { + // only need to resolve once + // first entry should always have entry.parentDir === EMPTY_STR + if (this.globSymlink === undefined) { + this.globSymlink = entry.fullParentDir === this.fullWatchPath ? + false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath}; + } + + if (this.globSymlink) { + return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath); + } + + return entry.fullPath; + } + + entryPath(entry) { + return sysPath.join(this.watchPath, + sysPath.relative(this.watchPath, this.checkGlobSymlink(entry)) + ); + } + + filterPath(entry) { + const {stats} = entry; + if (stats && stats.isSymbolicLink()) return this.filterDir(entry); + const resolvedPath = this.entryPath(entry); + const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ? + this.globFilter(resolvedPath) : true; + return matchesGlob && + this.fsw._isntIgnored(resolvedPath, stats) && + this.fsw._hasReadPermissions(stats); + } + + getDirParts(path) { + if (!this.hasGlob) return []; + const parts = []; + const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path]; + expandedPath.forEach((path) => { + parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE)); + }); + return parts; + } + + filterDir(entry) { + if (this.hasGlob) { + const entryParts = this.getDirParts(this.checkGlobSymlink(entry)); + let globstar = false; + this.unmatchedGlob = !this.dirParts.some((parts) => { + return parts.every((part, i) => { + if (part === GLOBSTAR) globstar = true; + return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS); + }); + }); + } + return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats); + } +} + +/** + * Watches files & directories for changes. Emitted events: + * `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error` + * + * new FSWatcher() + * .add(directories) + * .on('add', path => log('File', path, 'was added')) + */ +class FSWatcher extends EventEmitter { +// Not indenting methods for history sake; for now. +constructor(_opts) { + super(); + + const opts = {}; + if (_opts) Object.assign(opts, _opts); // for frozen objects + + /** @type {Map} */ + this._watched = new Map(); + /** @type {Map} */ + this._closers = new Map(); + /** @type {Set} */ + this._ignoredPaths = new Set(); + + /** @type {Map} */ + this._throttled = new Map(); + + /** @type {Map} */ + this._symlinkPaths = new Map(); + + this._streams = new Set(); + this.closed = false; + + // Set up default options. + if (undef(opts, 'persistent')) opts.persistent = true; + if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false; + if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false; + if (undef(opts, 'interval')) opts.interval = 100; + if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300; + if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false; + opts.enableBinaryInterval = opts.binaryInterval !== opts.interval; + + // Enable fsevents on OS X when polling isn't explicitly enabled. + if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling; + + // If we can't use fsevents, ensure the options reflect it's disabled. + const canUseFsEvents = FsEventsHandler.canUse(); + if (!canUseFsEvents) opts.useFsEvents = false; + + // Use polling on Mac if not using fsevents. + // Other platforms use non-polling fs_watch. + if (undef(opts, 'usePolling') && !opts.useFsEvents) { + opts.usePolling = isMacos; + } + + // Always default to polling on IBM i because fs.watch() is not available on IBM i. + if(isIBMi) { + opts.usePolling = true; + } + + // Global override (useful for end-developers that need to force polling for all + // instances of chokidar, regardless of usage/dependency depth) + const envPoll = process.env.CHOKIDAR_USEPOLLING; + if (envPoll !== undefined) { + const envLower = envPoll.toLowerCase(); + + if (envLower === 'false' || envLower === '0') { + opts.usePolling = false; + } else if (envLower === 'true' || envLower === '1') { + opts.usePolling = true; + } else { + opts.usePolling = !!envLower; + } + } + const envInterval = process.env.CHOKIDAR_INTERVAL; + if (envInterval) { + opts.interval = Number.parseInt(envInterval, 10); + } + + // Editor atomic write normalization enabled by default with fs.watch + if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents; + if (opts.atomic) this._pendingUnlinks = new Map(); + + if (undef(opts, 'followSymlinks')) opts.followSymlinks = true; + + if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false; + if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {}; + const awf = opts.awaitWriteFinish; + if (awf) { + if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000; + if (!awf.pollInterval) awf.pollInterval = 100; + this._pendingWrites = new Map(); + } + if (opts.ignored) opts.ignored = arrify(opts.ignored); + + let readyCalls = 0; + this._emitReady = () => { + readyCalls++; + if (readyCalls >= this._readyCount) { + this._emitReady = EMPTY_FN; + this._readyEmitted = true; + // use process.nextTick to allow time for listener to be bound + process.nextTick(() => this.emit(EV_READY)); + } + }; + this._emitRaw = (...args) => this.emit(EV_RAW, ...args); + this._readyEmitted = false; + this.options = opts; + + // Initialize with proper watcher. + if (opts.useFsEvents) { + this._fsEventsHandler = new FsEventsHandler(this); + } else { + this._nodeFsHandler = new NodeFsHandler(this); + } + + // You’re frozen when your heart’s not open. + Object.freeze(opts); +} + +// Public methods + +/** + * Adds paths to be watched on an existing FSWatcher instance + * @param {Path|Array} paths_ + * @param {String=} _origAdd private; for handling non-existent paths to be watched + * @param {Boolean=} _internal private; indicates a non-user add + * @returns {FSWatcher} for chaining + */ +add(paths_, _origAdd, _internal) { + const {cwd, disableGlobbing} = this.options; + this.closed = false; + let paths = unifyPaths(paths_); + if (cwd) { + paths = paths.map((path) => { + const absPath = getAbsolutePath(path, cwd); + + // Check `path` instead of `absPath` because the cwd portion can't be a glob + if (disableGlobbing || !isGlob(path)) { + return absPath; + } + return normalizePath(absPath); + }); + } + + // set aside negated glob strings + paths = paths.filter((path) => { + if (path.startsWith(BANG)) { + this._ignoredPaths.add(path.slice(1)); + return false; + } + + // if a path is being added that was previously ignored, stop ignoring it + this._ignoredPaths.delete(path); + this._ignoredPaths.delete(path + SLASH_GLOBSTAR); + + // reset the cached userIgnored anymatch fn + // to make ignoredPaths changes effective + this._userIgnored = undefined; + + return true; + }); + + if (this.options.useFsEvents && this._fsEventsHandler) { + if (!this._readyCount) this._readyCount = paths.length; + if (this.options.persistent) this._readyCount *= 2; + paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path)); + } else { + if (!this._readyCount) this._readyCount = 0; + this._readyCount += paths.length; + Promise.all( + paths.map(async path => { + const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd); + if (res) this._emitReady(); + return res; + }) + ).then(results => { + if (this.closed) return; + results.filter(item => item).forEach(item => { + this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item)); + }); + }); + } + + return this; +} + +/** + * Close watchers or start ignoring events from specified paths. + * @param {Path|Array} paths_ - string or array of strings, file/directory paths and/or globs + * @returns {FSWatcher} for chaining +*/ +unwatch(paths_) { + if (this.closed) return this; + const paths = unifyPaths(paths_); + const {cwd} = this.options; + + paths.forEach((path) => { + // convert to absolute path unless relative path already matches + if (!sysPath.isAbsolute(path) && !this._closers.has(path)) { + if (cwd) path = sysPath.join(cwd, path); + path = sysPath.resolve(path); + } + + this._closePath(path); + + this._ignoredPaths.add(path); + if (this._watched.has(path)) { + this._ignoredPaths.add(path + SLASH_GLOBSTAR); + } + + // reset the cached userIgnored anymatch fn + // to make ignoredPaths changes effective + this._userIgnored = undefined; + }); + + return this; +} + +/** + * Close watchers and remove all listeners from watched paths. + * @returns {Promise}. +*/ +close() { + if (this.closed) return this._closePromise; + this.closed = true; + + // Memory management. + this.removeAllListeners(); + const closers = []; + this._closers.forEach(closerList => closerList.forEach(closer => { + const promise = closer(); + if (promise instanceof Promise) closers.push(promise); + })); + this._streams.forEach(stream => stream.destroy()); + this._userIgnored = undefined; + this._readyCount = 0; + this._readyEmitted = false; + this._watched.forEach(dirent => dirent.dispose()); + ['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => { + this[`_${key}`].clear(); + }); + + this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve(); + return this._closePromise; +} + +/** + * Expose list of watched paths + * @returns {Object} for chaining +*/ +getWatched() { + const watchList = {}; + this._watched.forEach((entry, dir) => { + const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir; + watchList[key || ONE_DOT] = entry.getChildren().sort(); + }); + return watchList; +} + +emitWithAll(event, args) { + this.emit(...args); + if (event !== EV_ERROR) this.emit(EV_ALL, ...args); +} + +// Common helpers +// -------------- + +/** + * Normalize and emit events. + * Calling _emit DOES NOT MEAN emit() would be called! + * @param {EventName} event Type of event + * @param {Path} path File or directory path + * @param {*=} val1 arguments to be passed with event + * @param {*=} val2 + * @param {*=} val3 + * @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag + */ +async _emit(event, path, val1, val2, val3) { + if (this.closed) return; + + const opts = this.options; + if (isWindows) path = sysPath.normalize(path); + if (opts.cwd) path = sysPath.relative(opts.cwd, path); + /** @type Array */ + const args = [event, path]; + if (val3 !== undefined) args.push(val1, val2, val3); + else if (val2 !== undefined) args.push(val1, val2); + else if (val1 !== undefined) args.push(val1); + + const awf = opts.awaitWriteFinish; + let pw; + if (awf && (pw = this._pendingWrites.get(path))) { + pw.lastChange = new Date(); + return this; + } + + if (opts.atomic) { + if (event === EV_UNLINK) { + this._pendingUnlinks.set(path, args); + setTimeout(() => { + this._pendingUnlinks.forEach((entry, path) => { + this.emit(...entry); + this.emit(EV_ALL, ...entry); + this._pendingUnlinks.delete(path); + }); + }, typeof opts.atomic === 'number' ? opts.atomic : 100); + return this; + } + if (event === EV_ADD && this._pendingUnlinks.has(path)) { + event = args[0] = EV_CHANGE; + this._pendingUnlinks.delete(path); + } + } + + if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) { + const awfEmit = (err, stats) => { + if (err) { + event = args[0] = EV_ERROR; + args[1] = err; + this.emitWithAll(event, args); + } else if (stats) { + // if stats doesn't exist the file must have been deleted + if (args.length > 2) { + args[2] = stats; + } else { + args.push(stats); + } + this.emitWithAll(event, args); + } + }; + + this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit); + return this; + } + + if (event === EV_CHANGE) { + const isThrottled = !this._throttle(EV_CHANGE, path, 50); + if (isThrottled) return this; + } + + if (opts.alwaysStat && val1 === undefined && + (event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE) + ) { + const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path; + let stats; + try { + stats = await stat(fullPath); + } catch (err) {} + // Suppress event when fs_stat fails, to avoid sending undefined 'stat' + if (!stats || this.closed) return; + args.push(stats); + } + this.emitWithAll(event, args); + + return this; +} + +/** + * Common handler for errors + * @param {Error} error + * @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag + */ +_handleError(error) { + const code = error && error.code; + if (error && code !== 'ENOENT' && code !== 'ENOTDIR' && + (!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES')) + ) { + this.emit(EV_ERROR, error); + } + return error || this.closed; +} + +/** + * Helper utility for throttling + * @param {ThrottleType} actionType type being throttled + * @param {Path} path being acted upon + * @param {Number} timeout duration of time to suppress duplicate actions + * @returns {Object|false} tracking object or false if action should be suppressed + */ +_throttle(actionType, path, timeout) { + if (!this._throttled.has(actionType)) { + this._throttled.set(actionType, new Map()); + } + + /** @type {Map} */ + const action = this._throttled.get(actionType); + /** @type {Object} */ + const actionPath = action.get(path); + + if (actionPath) { + actionPath.count++; + return false; + } + + let timeoutObject; + const clear = () => { + const item = action.get(path); + const count = item ? item.count : 0; + action.delete(path); + clearTimeout(timeoutObject); + if (item) clearTimeout(item.timeoutObject); + return count; + }; + timeoutObject = setTimeout(clear, timeout); + const thr = {timeoutObject, clear, count: 0}; + action.set(path, thr); + return thr; +} + +_incrReadyCount() { + return this._readyCount++; +} + +/** + * Awaits write operation to finish. + * Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback. + * @param {Path} path being acted upon + * @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished + * @param {EventName} event + * @param {Function} awfEmit Callback to be called when ready for event to be emitted. + */ +_awaitWriteFinish(path, threshold, event, awfEmit) { + let timeoutHandler; + + let fullPath = path; + if (this.options.cwd && !sysPath.isAbsolute(path)) { + fullPath = sysPath.join(this.options.cwd, path); + } + + const now = new Date(); + + const awaitWriteFinish = (prevStat) => { + fs.stat(fullPath, (err, curStat) => { + if (err || !this._pendingWrites.has(path)) { + if (err && err.code !== 'ENOENT') awfEmit(err); + return; + } + + const now = Number(new Date()); + + if (prevStat && curStat.size !== prevStat.size) { + this._pendingWrites.get(path).lastChange = now; + } + const pw = this._pendingWrites.get(path); + const df = now - pw.lastChange; + + if (df >= threshold) { + this._pendingWrites.delete(path); + awfEmit(undefined, curStat); + } else { + timeoutHandler = setTimeout( + awaitWriteFinish, + this.options.awaitWriteFinish.pollInterval, + curStat + ); + } + }); + }; + + if (!this._pendingWrites.has(path)) { + this._pendingWrites.set(path, { + lastChange: now, + cancelWait: () => { + this._pendingWrites.delete(path); + clearTimeout(timeoutHandler); + return event; + } + }); + timeoutHandler = setTimeout( + awaitWriteFinish, + this.options.awaitWriteFinish.pollInterval + ); + } +} + +_getGlobIgnored() { + return [...this._ignoredPaths.values()]; +} + +/** + * Determines whether user has asked to ignore this path. + * @param {Path} path filepath or dir + * @param {fs.Stats=} stats result of fs.stat + * @returns {Boolean} + */ +_isIgnored(path, stats) { + if (this.options.atomic && DOT_RE.test(path)) return true; + if (!this._userIgnored) { + const {cwd} = this.options; + const ign = this.options.ignored; + + const ignored = ign && ign.map(normalizeIgnored(cwd)); + const paths = arrify(ignored) + .filter((path) => typeof path === STRING_TYPE && !isGlob(path)) + .map((path) => path + SLASH_GLOBSTAR); + const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths); + this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS); + } + + return this._userIgnored([path, stats]); +} + +_isntIgnored(path, stat) { + return !this._isIgnored(path, stat); +} + +/** + * Provides a set of common helpers and properties relating to symlink and glob handling. + * @param {Path} path file, directory, or glob pattern being watched + * @param {Number=} depth at any depth > 0, this isn't a glob + * @returns {WatchHelper} object containing helpers for this path + */ +_getWatchHelpers(path, depth) { + const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path); + const follow = this.options.followSymlinks; + + return new WatchHelper(path, watchPath, follow, this); +} + +// Directory helpers +// ----------------- + +/** + * Provides directory tracking objects + * @param {String} directory path of the directory + * @returns {DirEntry} the directory's tracking object + */ +_getWatchedDir(directory) { + if (!this._boundRemove) this._boundRemove = this._remove.bind(this); + const dir = sysPath.resolve(directory); + if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove)); + return this._watched.get(dir); +} + +// File helpers +// ------------ + +/** + * Check for read permissions. + * Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405 + * @param {fs.Stats} stats - object, result of fs_stat + * @returns {Boolean} indicates whether the file can be read +*/ +_hasReadPermissions(stats) { + if (this.options.ignorePermissionErrors) return true; + + // stats.mode may be bigint + const md = stats && Number.parseInt(stats.mode, 10); + const st = md & 0o777; + const it = Number.parseInt(st.toString(8)[0], 10); + return Boolean(4 & it); +} + +/** + * Handles emitting unlink events for + * files and directories, and via recursion, for + * files and directories within directories that are unlinked + * @param {String} directory within which the following item is located + * @param {String} item base path of item/directory + * @returns {void} +*/ +_remove(directory, item, isDirectory) { + // if what is being deleted is a directory, get that directory's paths + // for recursive deleting and cleaning of watched object + // if it is not a directory, nestedDirectoryChildren will be empty array + const path = sysPath.join(directory, item); + const fullPath = sysPath.resolve(path); + isDirectory = isDirectory != null + ? isDirectory + : this._watched.has(path) || this._watched.has(fullPath); + + // prevent duplicate handling in case of arriving here nearly simultaneously + // via multiple paths (such as _handleFile and _handleDir) + if (!this._throttle('remove', path, 100)) return; + + // if the only watched file is removed, watch for its return + if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) { + this.add(directory, item, true); + } + + // This will create a new entry in the watched object in either case + // so we got to do the directory check beforehand + const wp = this._getWatchedDir(path); + const nestedDirectoryChildren = wp.getChildren(); + + // Recursively remove children directories / files. + nestedDirectoryChildren.forEach(nested => this._remove(path, nested)); + + // Check if item was on the watched list and remove it + const parent = this._getWatchedDir(directory); + const wasTracked = parent.has(item); + parent.remove(item); + + // Fixes issue #1042 -> Relative paths were detected and added as symlinks + // (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612), + // but never removed from the map in case the path was deleted. + // This leads to an incorrect state if the path was recreated: + // https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553 + if (this._symlinkPaths.has(fullPath)) { + this._symlinkPaths.delete(fullPath); + } + + // If we wait for this file to be fully written, cancel the wait. + let relPath = path; + if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path); + if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) { + const event = this._pendingWrites.get(relPath).cancelWait(); + if (event === EV_ADD) return; + } + + // The Entry will either be a directory that just got removed + // or a bogus entry to a file, in either case we have to remove it + this._watched.delete(path); + this._watched.delete(fullPath); + const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK; + if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path); + + // Avoid conflicts if we later create another file with the same name + if (!this.options.useFsEvents) { + this._closePath(path); + } +} + +/** + * Closes all watchers for a path + * @param {Path} path + */ +_closePath(path) { + this._closeFile(path) + const dir = sysPath.dirname(path); + this._getWatchedDir(dir).remove(sysPath.basename(path)); +} + +/** + * Closes only file-specific watchers + * @param {Path} path + */ +_closeFile(path) { + const closers = this._closers.get(path); + if (!closers) return; + closers.forEach(closer => closer()); + this._closers.delete(path); +} + +/** + * + * @param {Path} path + * @param {Function} closer + */ +_addPathCloser(path, closer) { + if (!closer) return; + let list = this._closers.get(path); + if (!list) { + list = []; + this._closers.set(path, list); + } + list.push(closer); +} + +_readdirp(root, opts) { + if (this.closed) return; + const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts}; + let stream = readdirp(root, options); + this._streams.add(stream); + stream.once(STR_CLOSE, () => { + stream = undefined; + }); + stream.once(STR_END, () => { + if (stream) { + this._streams.delete(stream); + stream = undefined; + } + }); + return stream; +} + +} + +// Export FSWatcher class +exports.FSWatcher = FSWatcher; + +/** + * Instantiates watcher with paths to be tracked. + * @param {String|Array} paths file/directory paths and/or globs + * @param {Object=} options chokidar opts + * @returns an instance of FSWatcher for chaining. + */ +const watch = (paths, options) => { + const watcher = new FSWatcher(options); + watcher.add(paths); + return watcher; +}; + +exports.watch = watch; diff --git a/node_modules/chokidar/lib/constants.js b/node_modules/chokidar/lib/constants.js new file mode 100644 index 0000000..1454f85 --- /dev/null +++ b/node_modules/chokidar/lib/constants.js @@ -0,0 +1,65 @@ +'use strict'; + +const {sep} = require('path'); +const {platform} = process; +const os = require('os'); + +exports.EV_ALL = 'all'; +exports.EV_READY = 'ready'; +exports.EV_ADD = 'add'; +exports.EV_CHANGE = 'change'; +exports.EV_ADD_DIR = 'addDir'; +exports.EV_UNLINK = 'unlink'; +exports.EV_UNLINK_DIR = 'unlinkDir'; +exports.EV_RAW = 'raw'; +exports.EV_ERROR = 'error'; + +exports.STR_DATA = 'data'; +exports.STR_END = 'end'; +exports.STR_CLOSE = 'close'; + +exports.FSEVENT_CREATED = 'created'; +exports.FSEVENT_MODIFIED = 'modified'; +exports.FSEVENT_DELETED = 'deleted'; +exports.FSEVENT_MOVED = 'moved'; +exports.FSEVENT_CLONED = 'cloned'; +exports.FSEVENT_UNKNOWN = 'unknown'; +exports.FSEVENT_TYPE_FILE = 'file'; +exports.FSEVENT_TYPE_DIRECTORY = 'directory'; +exports.FSEVENT_TYPE_SYMLINK = 'symlink'; + +exports.KEY_LISTENERS = 'listeners'; +exports.KEY_ERR = 'errHandlers'; +exports.KEY_RAW = 'rawEmitters'; +exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW]; + +exports.DOT_SLASH = `.${sep}`; + +exports.BACK_SLASH_RE = /\\/g; +exports.DOUBLE_SLASH_RE = /\/\//; +exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/; +exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/; +exports.REPLACER_RE = /^\.[/\\]/; + +exports.SLASH = '/'; +exports.SLASH_SLASH = '//'; +exports.BRACE_START = '{'; +exports.BANG = '!'; +exports.ONE_DOT = '.'; +exports.TWO_DOTS = '..'; +exports.STAR = '*'; +exports.GLOBSTAR = '**'; +exports.ROOT_GLOBSTAR = '/**/*'; +exports.SLASH_GLOBSTAR = '/**'; +exports.DIR_SUFFIX = 'Dir'; +exports.ANYMATCH_OPTS = {dot: true}; +exports.STRING_TYPE = 'string'; +exports.FUNCTION_TYPE = 'function'; +exports.EMPTY_STR = ''; +exports.EMPTY_FN = () => {}; +exports.IDENTITY_FN = val => val; + +exports.isWindows = platform === 'win32'; +exports.isMacos = platform === 'darwin'; +exports.isLinux = platform === 'linux'; +exports.isIBMi = os.type() === 'OS400'; diff --git a/node_modules/chokidar/lib/fsevents-handler.js b/node_modules/chokidar/lib/fsevents-handler.js new file mode 100644 index 0000000..0f7f2cb --- /dev/null +++ b/node_modules/chokidar/lib/fsevents-handler.js @@ -0,0 +1,524 @@ +'use strict'; + +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); + +let fsevents; +try { + fsevents = require('fsevents'); +} catch (error) { + if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error); +} + +if (fsevents) { + // TODO: real check + const mtch = process.version.match(/v(\d+)\.(\d+)/); + if (mtch && mtch[1] && mtch[2]) { + const maj = Number.parseInt(mtch[1], 10); + const min = Number.parseInt(mtch[2], 10); + if (maj === 8 && min < 16) { + fsevents = undefined; + } + } +} + +const { + EV_ADD, + EV_CHANGE, + EV_ADD_DIR, + EV_UNLINK, + EV_ERROR, + STR_DATA, + STR_END, + FSEVENT_CREATED, + FSEVENT_MODIFIED, + FSEVENT_DELETED, + FSEVENT_MOVED, + // FSEVENT_CLONED, + FSEVENT_UNKNOWN, + FSEVENT_TYPE_FILE, + FSEVENT_TYPE_DIRECTORY, + FSEVENT_TYPE_SYMLINK, + + ROOT_GLOBSTAR, + DIR_SUFFIX, + DOT_SLASH, + FUNCTION_TYPE, + EMPTY_FN, + IDENTITY_FN +} = require('./constants'); + +const Depth = (value) => isNaN(value) ? {} : {depth: value}; + +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const realpath = promisify(fs.realpath); + +const statMethods = { stat, lstat }; + +/** + * @typedef {String} Path + */ + +/** + * @typedef {Object} FsEventsWatchContainer + * @property {Set} listeners + * @property {Function} rawEmitter + * @property {{stop: Function}} watcher + */ + +// fsevents instance helper functions +/** + * Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances) + * @type {Map} + */ +const FSEventsWatchers = new Map(); + +// Threshold of duplicate path prefixes at which to start +// consolidating going forward +const consolidateThreshhold = 10; + +const wrongEventFlags = new Set([ + 69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912 +]); + +/** + * Instantiates the fsevents interface + * @param {Path} path path to be watched + * @param {Function} callback called when fsevents is bound and ready + * @returns {{stop: Function}} new fsevents instance + */ +const createFSEventsInstance = (path, callback) => { + const stop = fsevents.watch(path, callback); + return {stop}; +}; + +/** + * Instantiates the fsevents interface or binds listeners to an existing one covering + * the same file tree. + * @param {Path} path - to be watched + * @param {Path} realPath - real path for symlinks + * @param {Function} listener - called when fsevents emits events + * @param {Function} rawEmitter - passes data to listeners of the 'raw' event + * @returns {Function} closer + */ +function setFSEventsListener(path, realPath, listener, rawEmitter) { + let watchPath = sysPath.extname(realPath) ? sysPath.dirname(realPath) : realPath; + + const parentPath = sysPath.dirname(watchPath); + let cont = FSEventsWatchers.get(watchPath); + + // If we've accumulated a substantial number of paths that + // could have been consolidated by watching one directory + // above the current one, create a watcher on the parent + // path instead, so that we do consolidate going forward. + if (couldConsolidate(parentPath)) { + watchPath = parentPath; + } + + const resolvedPath = sysPath.resolve(path); + const hasSymlink = resolvedPath !== realPath; + + const filteredListener = (fullPath, flags, info) => { + if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath); + if ( + fullPath === resolvedPath || + !fullPath.indexOf(resolvedPath + sysPath.sep) + ) listener(fullPath, flags, info); + }; + + // check if there is already a watcher on a parent path + // modifies `watchPath` to the parent path when it finds a match + let watchedParent = false; + for (const watchedPath of FSEventsWatchers.keys()) { + if (realPath.indexOf(sysPath.resolve(watchedPath) + sysPath.sep) === 0) { + watchPath = watchedPath; + cont = FSEventsWatchers.get(watchPath); + watchedParent = true; + break; + } + } + + if (cont || watchedParent) { + cont.listeners.add(filteredListener); + } else { + cont = { + listeners: new Set([filteredListener]), + rawEmitter, + watcher: createFSEventsInstance(watchPath, (fullPath, flags) => { + if (!cont.listeners.size) return; + const info = fsevents.getInfo(fullPath, flags); + cont.listeners.forEach(list => { + list(fullPath, flags, info); + }); + + cont.rawEmitter(info.event, fullPath, info); + }) + }; + FSEventsWatchers.set(watchPath, cont); + } + + // removes this instance's listeners and closes the underlying fsevents + // instance if there are no more listeners left + return () => { + const lst = cont.listeners; + + lst.delete(filteredListener); + if (!lst.size) { + FSEventsWatchers.delete(watchPath); + if (cont.watcher) return cont.watcher.stop().then(() => { + cont.rawEmitter = cont.watcher = undefined; + Object.freeze(cont); + }); + } + }; +} + +// Decide whether or not we should start a new higher-level +// parent watcher +const couldConsolidate = (path) => { + let count = 0; + for (const watchPath of FSEventsWatchers.keys()) { + if (watchPath.indexOf(path) === 0) { + count++; + if (count >= consolidateThreshhold) { + return true; + } + } + } + + return false; +}; + +// returns boolean indicating whether fsevents can be used +const canUse = () => fsevents && FSEventsWatchers.size < 128; + +// determines subdirectory traversal levels from root to path +const calcDepth = (path, root) => { + let i = 0; + while (!path.indexOf(root) && (path = sysPath.dirname(path)) !== root) i++; + return i; +}; + +// returns boolean indicating whether the fsevents' event info has the same type +// as the one returned by fs.stat +const sameTypes = (info, stats) => ( + info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() || + info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() || + info.type === FSEVENT_TYPE_FILE && stats.isFile() +) + +/** + * @mixin + */ +class FsEventsHandler { + +/** + * @param {import('../index').FSWatcher} fsw + */ +constructor(fsw) { + this.fsw = fsw; +} +checkIgnored(path, stats) { + const ipaths = this.fsw._ignoredPaths; + if (this.fsw._isIgnored(path, stats)) { + ipaths.add(path); + if (stats && stats.isDirectory()) { + ipaths.add(path + ROOT_GLOBSTAR); + } + return true; + } + + ipaths.delete(path); + ipaths.delete(path + ROOT_GLOBSTAR); +} + +addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) { + const event = watchedDir.has(item) ? EV_CHANGE : EV_ADD; + this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts); +} + +async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) { + try { + const stats = await stat(path) + if (this.fsw.closed) return; + if (sameTypes(info, stats)) { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } catch (error) { + if (error.code === 'EACCES') { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } +} + +handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) { + if (this.fsw.closed || this.checkIgnored(path)) return; + + if (event === EV_UNLINK) { + const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY + // suppress unlink events on never before seen files + if (isDirectory || watchedDir.has(item)) { + this.fsw._remove(parent, item, isDirectory); + } + } else { + if (event === EV_ADD) { + // track new directories + if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path); + + if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) { + // push symlinks back to the top of the stack to get handled + const curDepth = opts.depth === undefined ? + undefined : calcDepth(fullPath, realPath) + 1; + return this._addToFsEvents(path, false, true, curDepth); + } + + // track new paths + // (other than symlinks being followed, which will be tracked soon) + this.fsw._getWatchedDir(parent).add(item); + } + /** + * @type {'add'|'addDir'|'unlink'|'unlinkDir'} + */ + const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event; + this.fsw._emit(eventName, path); + if (eventName === EV_ADD_DIR) this._addToFsEvents(path, false, true); + } +} + +/** + * Handle symlinks encountered during directory scan + * @param {String} watchPath - file/dir path to be watched with fsevents + * @param {String} realPath - real path (in case of symlinks) + * @param {Function} transform - path transformer + * @param {Function} globFilter - path filter in case a glob pattern was provided + * @returns {Function} closer for the watcher instance +*/ +_watchWithFsEvents(watchPath, realPath, transform, globFilter) { + if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return; + const opts = this.fsw.options; + const watchCallback = async (fullPath, flags, info) => { + if (this.fsw.closed) return; + if ( + opts.depth !== undefined && + calcDepth(fullPath, realPath) > opts.depth + ) return; + const path = transform(sysPath.join( + watchPath, sysPath.relative(watchPath, fullPath) + )); + if (globFilter && !globFilter(path)) return; + // ensure directories are tracked + const parent = sysPath.dirname(path); + const item = sysPath.basename(path); + const watchedDir = this.fsw._getWatchedDir( + info.type === FSEVENT_TYPE_DIRECTORY ? path : parent + ); + + // correct for wrong events emitted + if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) { + if (typeof opts.ignored === FUNCTION_TYPE) { + let stats; + try { + stats = await stat(path); + } catch (error) {} + if (this.fsw.closed) return; + if (this.checkIgnored(path, stats)) return; + if (sameTypes(info, stats)) { + this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } else { + this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } else { + this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } else { + switch (info.event) { + case FSEVENT_CREATED: + case FSEVENT_MODIFIED: + return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); + case FSEVENT_DELETED: + case FSEVENT_MOVED: + return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); + } + } + }; + + const closer = setFSEventsListener( + watchPath, + realPath, + watchCallback, + this.fsw._emitRaw + ); + + this.fsw._emitReady(); + return closer; +} + +/** + * Handle symlinks encountered during directory scan + * @param {String} linkPath path to symlink + * @param {String} fullPath absolute path to the symlink + * @param {Function} transform pre-existing path transformer + * @param {Number} curDepth level of subdirectories traversed to where symlink is + * @returns {Promise} + */ +async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) { + // don't follow the same symlink more than once + if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return; + + this.fsw._symlinkPaths.set(fullPath, true); + this.fsw._incrReadyCount(); + + try { + const linkTarget = await realpath(linkPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(linkTarget)) { + return this.fsw._emitReady(); + } + + this.fsw._incrReadyCount(); + + // add the linkTarget for watching with a wrapper for transform + // that causes emitted paths to incorporate the link's path + this._addToFsEvents(linkTarget || linkPath, (path) => { + let aliasedPath = linkPath; + if (linkTarget && linkTarget !== DOT_SLASH) { + aliasedPath = path.replace(linkTarget, linkPath); + } else if (path !== DOT_SLASH) { + aliasedPath = sysPath.join(linkPath, path); + } + return transform(aliasedPath); + }, false, curDepth); + } catch(error) { + if (this.fsw._handleError(error)) { + return this.fsw._emitReady(); + } + } +} + +/** + * + * @param {Path} newPath + * @param {fs.Stats} stats + */ +emitAdd(newPath, stats, processPath, opts, forceAdd) { + const pp = processPath(newPath); + const isDir = stats.isDirectory(); + const dirObj = this.fsw._getWatchedDir(sysPath.dirname(pp)); + const base = sysPath.basename(pp); + + // ensure empty dirs get tracked + if (isDir) this.fsw._getWatchedDir(pp); + if (dirObj.has(base)) return; + dirObj.add(base); + + if (!opts.ignoreInitial || forceAdd === true) { + this.fsw._emit(isDir ? EV_ADD_DIR : EV_ADD, pp, stats); + } +} + +initWatch(realPath, path, wh, processPath) { + if (this.fsw.closed) return; + const closer = this._watchWithFsEvents( + wh.watchPath, + sysPath.resolve(realPath || wh.watchPath), + processPath, + wh.globFilter + ); + this.fsw._addPathCloser(path, closer); +} + +/** + * Handle added path with fsevents + * @param {String} path file/dir path or glob pattern + * @param {Function|Boolean=} transform converts working path to what the user expects + * @param {Boolean=} forceAdd ensure add is emitted + * @param {Number=} priorDepth Level of subdirectories already traversed. + * @returns {Promise} + */ +async _addToFsEvents(path, transform, forceAdd, priorDepth) { + if (this.fsw.closed) { + return; + } + const opts = this.fsw.options; + const processPath = typeof transform === FUNCTION_TYPE ? transform : IDENTITY_FN; + + const wh = this.fsw._getWatchHelpers(path); + + // evaluate what is at the path we're being asked to watch + try { + const stats = await statMethods[wh.statMethod](wh.watchPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(wh.watchPath, stats)) { + throw null; + } + if (stats.isDirectory()) { + // emit addDir unless this is a glob parent + if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd); + + // don't recurse further if it would exceed depth setting + if (priorDepth && priorDepth > opts.depth) return; + + // scan the contents of the dir + this.fsw._readdirp(wh.watchPath, { + fileFilter: entry => wh.filterPath(entry), + directoryFilter: entry => wh.filterDir(entry), + ...Depth(opts.depth - (priorDepth || 0)) + }).on(STR_DATA, (entry) => { + // need to check filterPath on dirs b/c filterDir is less restrictive + if (this.fsw.closed) { + return; + } + if (entry.stats.isDirectory() && !wh.filterPath(entry)) return; + + const joinedPath = sysPath.join(wh.watchPath, entry.path); + const {fullPath} = entry; + + if (wh.followSymlinks && entry.stats.isSymbolicLink()) { + // preserve the current depth here since it can't be derived from + // real paths past the symlink + const curDepth = opts.depth === undefined ? + undefined : calcDepth(joinedPath, sysPath.resolve(wh.watchPath)) + 1; + + this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth); + } else { + this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd); + } + }).on(EV_ERROR, EMPTY_FN).on(STR_END, () => { + this.fsw._emitReady(); + }); + } else { + this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd); + this.fsw._emitReady(); + } + } catch (error) { + if (!error || this.fsw._handleError(error)) { + // TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__- + this.fsw._emitReady(); + this.fsw._emitReady(); + } + } + + if (opts.persistent && forceAdd !== true) { + if (typeof transform === FUNCTION_TYPE) { + // realpath has already been resolved + this.initWatch(undefined, path, wh, processPath); + } else { + let realPath; + try { + realPath = await realpath(wh.watchPath); + } catch (e) {} + this.initWatch(realPath, path, wh, processPath); + } + } +} + +} + +module.exports = FsEventsHandler; +module.exports.canUse = canUse; diff --git a/node_modules/chokidar/lib/nodefs-handler.js b/node_modules/chokidar/lib/nodefs-handler.js new file mode 100644 index 0000000..199cfe9 --- /dev/null +++ b/node_modules/chokidar/lib/nodefs-handler.js @@ -0,0 +1,654 @@ +'use strict'; + +const fs = require('fs'); +const sysPath = require('path'); +const { promisify } = require('util'); +const isBinaryPath = require('is-binary-path'); +const { + isWindows, + isLinux, + EMPTY_FN, + EMPTY_STR, + KEY_LISTENERS, + KEY_ERR, + KEY_RAW, + HANDLER_KEYS, + EV_CHANGE, + EV_ADD, + EV_ADD_DIR, + EV_ERROR, + STR_DATA, + STR_END, + BRACE_START, + STAR +} = require('./constants'); + +const THROTTLE_MODE_WATCH = 'watch'; + +const open = promisify(fs.open); +const stat = promisify(fs.stat); +const lstat = promisify(fs.lstat); +const close = promisify(fs.close); +const fsrealpath = promisify(fs.realpath); + +const statMethods = { lstat, stat }; + +// TODO: emit errors properly. Example: EMFILE on Macos. +const foreach = (val, fn) => { + if (val instanceof Set) { + val.forEach(fn); + } else { + fn(val); + } +}; + +const addAndConvert = (main, prop, item) => { + let container = main[prop]; + if (!(container instanceof Set)) { + main[prop] = container = new Set([container]); + } + container.add(item); +}; + +const clearItem = cont => key => { + const set = cont[key]; + if (set instanceof Set) { + set.clear(); + } else { + delete cont[key]; + } +}; + +const delFromSet = (main, prop, item) => { + const container = main[prop]; + if (container instanceof Set) { + container.delete(item); + } else if (container === item) { + delete main[prop]; + } +}; + +const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val; + +/** + * @typedef {String} Path + */ + +// fs_watch helpers + +// object to hold per-process fs_watch instances +// (may be shared across chokidar FSWatcher instances) + +/** + * @typedef {Object} FsWatchContainer + * @property {Set} listeners + * @property {Set} errHandlers + * @property {Set} rawEmitters + * @property {fs.FSWatcher=} watcher + * @property {Boolean=} watcherUnusable + */ + +/** + * @type {Map} + */ +const FsWatchInstances = new Map(); + +/** + * Instantiates the fs_watch interface + * @param {String} path to be watched + * @param {Object} options to be passed to fs_watch + * @param {Function} listener main event handler + * @param {Function} errHandler emits info about errors + * @param {Function} emitRaw emits raw event data + * @returns {fs.FSWatcher} new fsevents instance + */ +function createFsWatchInstance(path, options, listener, errHandler, emitRaw) { + const handleEvent = (rawEvent, evPath) => { + listener(path); + emitRaw(rawEvent, evPath, {watchedPath: path}); + + // emit based on events occurring for files from a directory's watcher in + // case the file's watcher misses it (and rely on throttling to de-dupe) + if (evPath && path !== evPath) { + fsWatchBroadcast( + sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath) + ); + } + }; + try { + return fs.watch(path, options, handleEvent); + } catch (error) { + errHandler(error); + } +} + +/** + * Helper for passing fs_watch event data to a collection of listeners + * @param {Path} fullPath absolute path bound to fs_watch instance + * @param {String} type listener type + * @param {*=} val1 arguments to be passed to listeners + * @param {*=} val2 + * @param {*=} val3 + */ +const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => { + const cont = FsWatchInstances.get(fullPath); + if (!cont) return; + foreach(cont[type], (listener) => { + listener(val1, val2, val3); + }); +}; + +/** + * Instantiates the fs_watch interface or binds listeners + * to an existing one covering the same file system entry + * @param {String} path + * @param {String} fullPath absolute path + * @param {Object} options to be passed to fs_watch + * @param {Object} handlers container for event listener functions + */ +const setFsWatchListener = (path, fullPath, options, handlers) => { + const {listener, errHandler, rawEmitter} = handlers; + let cont = FsWatchInstances.get(fullPath); + + /** @type {fs.FSWatcher=} */ + let watcher; + if (!options.persistent) { + watcher = createFsWatchInstance( + path, options, listener, errHandler, rawEmitter + ); + return watcher.close.bind(watcher); + } + if (cont) { + addAndConvert(cont, KEY_LISTENERS, listener); + addAndConvert(cont, KEY_ERR, errHandler); + addAndConvert(cont, KEY_RAW, rawEmitter); + } else { + watcher = createFsWatchInstance( + path, + options, + fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), + errHandler, // no need to use broadcast here + fsWatchBroadcast.bind(null, fullPath, KEY_RAW) + ); + if (!watcher) return; + watcher.on(EV_ERROR, async (error) => { + const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR); + cont.watcherUnusable = true; // documented since Node 10.4.1 + // Workaround for https://github.com/joyent/node/issues/4337 + if (isWindows && error.code === 'EPERM') { + try { + const fd = await open(path, 'r'); + await close(fd); + broadcastErr(error); + } catch (err) {} + } else { + broadcastErr(error); + } + }); + cont = { + listeners: listener, + errHandlers: errHandler, + rawEmitters: rawEmitter, + watcher + }; + FsWatchInstances.set(fullPath, cont); + } + // const index = cont.listeners.indexOf(listener); + + // removes this instance's listeners and closes the underlying fs_watch + // instance if there are no more listeners left + return () => { + delFromSet(cont, KEY_LISTENERS, listener); + delFromSet(cont, KEY_ERR, errHandler); + delFromSet(cont, KEY_RAW, rawEmitter); + if (isEmptySet(cont.listeners)) { + // Check to protect against issue gh-730. + // if (cont.watcherUnusable) { + cont.watcher.close(); + // } + FsWatchInstances.delete(fullPath); + HANDLER_KEYS.forEach(clearItem(cont)); + cont.watcher = undefined; + Object.freeze(cont); + } + }; +}; + +// fs_watchFile helpers + +// object to hold per-process fs_watchFile instances +// (may be shared across chokidar FSWatcher instances) +const FsWatchFileInstances = new Map(); + +/** + * Instantiates the fs_watchFile interface or binds listeners + * to an existing one covering the same file system entry + * @param {String} path to be watched + * @param {String} fullPath absolute path + * @param {Object} options options to be passed to fs_watchFile + * @param {Object} handlers container for event listener functions + * @returns {Function} closer + */ +const setFsWatchFileListener = (path, fullPath, options, handlers) => { + const {listener, rawEmitter} = handlers; + let cont = FsWatchFileInstances.get(fullPath); + + /* eslint-disable no-unused-vars, prefer-destructuring */ + let listeners = new Set(); + let rawEmitters = new Set(); + + const copts = cont && cont.options; + if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) { + // "Upgrade" the watcher to persistence or a quicker interval. + // This creates some unlikely edge case issues if the user mixes + // settings in a very weird way, but solving for those cases + // doesn't seem worthwhile for the added complexity. + listeners = cont.listeners; + rawEmitters = cont.rawEmitters; + fs.unwatchFile(fullPath); + cont = undefined; + } + + /* eslint-enable no-unused-vars, prefer-destructuring */ + + if (cont) { + addAndConvert(cont, KEY_LISTENERS, listener); + addAndConvert(cont, KEY_RAW, rawEmitter); + } else { + // TODO + // listeners.add(listener); + // rawEmitters.add(rawEmitter); + cont = { + listeners: listener, + rawEmitters: rawEmitter, + options, + watcher: fs.watchFile(fullPath, options, (curr, prev) => { + foreach(cont.rawEmitters, (rawEmitter) => { + rawEmitter(EV_CHANGE, fullPath, {curr, prev}); + }); + const currmtime = curr.mtimeMs; + if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) { + foreach(cont.listeners, (listener) => listener(path, curr)); + } + }) + }; + FsWatchFileInstances.set(fullPath, cont); + } + // const index = cont.listeners.indexOf(listener); + + // Removes this instance's listeners and closes the underlying fs_watchFile + // instance if there are no more listeners left. + return () => { + delFromSet(cont, KEY_LISTENERS, listener); + delFromSet(cont, KEY_RAW, rawEmitter); + if (isEmptySet(cont.listeners)) { + FsWatchFileInstances.delete(fullPath); + fs.unwatchFile(fullPath); + cont.options = cont.watcher = undefined; + Object.freeze(cont); + } + }; +}; + +/** + * @mixin + */ +class NodeFsHandler { + +/** + * @param {import("../index").FSWatcher} fsW + */ +constructor(fsW) { + this.fsw = fsW; + this._boundHandleError = (error) => fsW._handleError(error); +} + +/** + * Watch file for changes with fs_watchFile or fs_watch. + * @param {String} path to file or dir + * @param {Function} listener on fs change + * @returns {Function} closer for the watcher instance + */ +_watchWithNodeFs(path, listener) { + const opts = this.fsw.options; + const directory = sysPath.dirname(path); + const basename = sysPath.basename(path); + const parent = this.fsw._getWatchedDir(directory); + parent.add(basename); + const absolutePath = sysPath.resolve(path); + const options = {persistent: opts.persistent}; + if (!listener) listener = EMPTY_FN; + + let closer; + if (opts.usePolling) { + options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ? + opts.binaryInterval : opts.interval; + closer = setFsWatchFileListener(path, absolutePath, options, { + listener, + rawEmitter: this.fsw._emitRaw + }); + } else { + closer = setFsWatchListener(path, absolutePath, options, { + listener, + errHandler: this._boundHandleError, + rawEmitter: this.fsw._emitRaw + }); + } + return closer; +} + +/** + * Watch a file and emit add event if warranted. + * @param {Path} file Path + * @param {fs.Stats} stats result of fs_stat + * @param {Boolean} initialAdd was the file added at watch instantiation? + * @returns {Function} closer for the watcher instance + */ +_handleFile(file, stats, initialAdd) { + if (this.fsw.closed) { + return; + } + const dirname = sysPath.dirname(file); + const basename = sysPath.basename(file); + const parent = this.fsw._getWatchedDir(dirname); + // stats is always present + let prevStats = stats; + + // if the file is already being watched, do nothing + if (parent.has(basename)) return; + + const listener = async (path, newStats) => { + if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return; + if (!newStats || newStats.mtimeMs === 0) { + try { + const newStats = await stat(file); + if (this.fsw.closed) return; + // Check that change event was not fired because of changed only accessTime. + const at = newStats.atimeMs; + const mt = newStats.mtimeMs; + if (!at || at <= mt || mt !== prevStats.mtimeMs) { + this.fsw._emit(EV_CHANGE, file, newStats); + } + if (isLinux && prevStats.ino !== newStats.ino) { + this.fsw._closeFile(path) + prevStats = newStats; + this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener)); + } else { + prevStats = newStats; + } + } catch (error) { + // Fix issues where mtime is null but file is still present + this.fsw._remove(dirname, basename); + } + // add is about to be emitted if file not already tracked in parent + } else if (parent.has(basename)) { + // Check that change event was not fired because of changed only accessTime. + const at = newStats.atimeMs; + const mt = newStats.mtimeMs; + if (!at || at <= mt || mt !== prevStats.mtimeMs) { + this.fsw._emit(EV_CHANGE, file, newStats); + } + prevStats = newStats; + } + } + // kick off the watcher + const closer = this._watchWithNodeFs(file, listener); + + // emit an add event if we're supposed to + if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) { + if (!this.fsw._throttle(EV_ADD, file, 0)) return; + this.fsw._emit(EV_ADD, file, stats); + } + + return closer; +} + +/** + * Handle symlinks encountered while reading a dir. + * @param {Object} entry returned by readdirp + * @param {String} directory path of dir being read + * @param {String} path of this item + * @param {String} item basename of this item + * @returns {Promise} true if no more processing is needed for this entry. + */ +async _handleSymlink(entry, directory, path, item) { + if (this.fsw.closed) { + return; + } + const full = entry.fullPath; + const dir = this.fsw._getWatchedDir(directory); + + if (!this.fsw.options.followSymlinks) { + // watch symlink directly (don't follow) and detect changes + this.fsw._incrReadyCount(); + + let linkPath; + try { + linkPath = await fsrealpath(path); + } catch (e) { + this.fsw._emitReady(); + return true; + } + + if (this.fsw.closed) return; + if (dir.has(item)) { + if (this.fsw._symlinkPaths.get(full) !== linkPath) { + this.fsw._symlinkPaths.set(full, linkPath); + this.fsw._emit(EV_CHANGE, path, entry.stats); + } + } else { + dir.add(item); + this.fsw._symlinkPaths.set(full, linkPath); + this.fsw._emit(EV_ADD, path, entry.stats); + } + this.fsw._emitReady(); + return true; + } + + // don't follow the same symlink more than once + if (this.fsw._symlinkPaths.has(full)) { + return true; + } + + this.fsw._symlinkPaths.set(full, true); +} + +_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) { + // Normalize the directory name on Windows + directory = sysPath.join(directory, EMPTY_STR); + + if (!wh.hasGlob) { + throttler = this.fsw._throttle('readdir', directory, 1000); + if (!throttler) return; + } + + const previous = this.fsw._getWatchedDir(wh.path); + const current = new Set(); + + let stream = this.fsw._readdirp(directory, { + fileFilter: entry => wh.filterPath(entry), + directoryFilter: entry => wh.filterDir(entry), + depth: 0 + }).on(STR_DATA, async (entry) => { + if (this.fsw.closed) { + stream = undefined; + return; + } + const item = entry.path; + let path = sysPath.join(directory, item); + current.add(item); + + if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) { + return; + } + + if (this.fsw.closed) { + stream = undefined; + return; + } + // Files that present in current directory snapshot + // but absent in previous are added to watch list and + // emit `add` event. + if (item === target || !target && !previous.has(item)) { + this.fsw._incrReadyCount(); + + // ensure relativeness of path is preserved in case of watcher reuse + path = sysPath.join(dir, sysPath.relative(dir, path)); + + this._addToNodeFs(path, initialAdd, wh, depth + 1); + } + }).on(EV_ERROR, this._boundHandleError); + + return new Promise(resolve => + stream.once(STR_END, () => { + if (this.fsw.closed) { + stream = undefined; + return; + } + const wasThrottled = throttler ? throttler.clear() : false; + + resolve(); + + // Files that absent in current directory snapshot + // but present in previous emit `remove` event + // and are removed from @watched[directory]. + previous.getChildren().filter((item) => { + return item !== directory && + !current.has(item) && + // in case of intersecting globs; + // a path may have been filtered out of this readdir, but + // shouldn't be removed because it matches a different glob + (!wh.hasGlob || wh.filterPath({ + fullPath: sysPath.resolve(directory, item) + })); + }).forEach((item) => { + this.fsw._remove(directory, item); + }); + + stream = undefined; + + // one more time for any missed in case changes came in extremely quickly + if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler); + }) + ); +} + +/** + * Read directory to add / remove files from `@watched` list and re-read it on change. + * @param {String} dir fs path + * @param {fs.Stats} stats + * @param {Boolean} initialAdd + * @param {Number} depth relative to user-supplied path + * @param {String} target child path targeted for watch + * @param {Object} wh Common watch helpers for this path + * @param {String} realpath + * @returns {Promise} closer for the watcher instance. + */ +async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) { + const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir)); + const tracked = parentDir.has(sysPath.basename(dir)); + if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) { + if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR, dir, stats); + } + + // ensure dir is tracked (harmless if redundant) + parentDir.add(sysPath.basename(dir)); + this.fsw._getWatchedDir(dir); + let throttler; + let closer; + + const oDepth = this.fsw.options.depth; + if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) { + if (!target) { + await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler); + if (this.fsw.closed) return; + } + + closer = this._watchWithNodeFs(dir, (dirPath, stats) => { + // if current directory is removed, do nothing + if (stats && stats.mtimeMs === 0) return; + + this._handleRead(dirPath, false, wh, target, dir, depth, throttler); + }); + } + return closer; +} + +/** + * Handle added file, directory, or glob pattern. + * Delegates call to _handleFile / _handleDir after checks. + * @param {String} path to file or ir + * @param {Boolean} initialAdd was the file added at watch instantiation? + * @param {Object} priorWh depth relative to user-supplied path + * @param {Number} depth Child path actually targeted for watch + * @param {String=} target Child path actually targeted for watch + * @returns {Promise} + */ +async _addToNodeFs(path, initialAdd, priorWh, depth, target) { + const ready = this.fsw._emitReady; + if (this.fsw._isIgnored(path) || this.fsw.closed) { + ready(); + return false; + } + + const wh = this.fsw._getWatchHelpers(path, depth); + if (!wh.hasGlob && priorWh) { + wh.hasGlob = priorWh.hasGlob; + wh.globFilter = priorWh.globFilter; + wh.filterPath = entry => priorWh.filterPath(entry); + wh.filterDir = entry => priorWh.filterDir(entry); + } + + // evaluate what is at the path we're being asked to watch + try { + const stats = await statMethods[wh.statMethod](wh.watchPath); + if (this.fsw.closed) return; + if (this.fsw._isIgnored(wh.watchPath, stats)) { + ready(); + return false; + } + + const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START); + let closer; + if (stats.isDirectory()) { + const absPath = sysPath.resolve(path); + const targetPath = follow ? await fsrealpath(path) : path; + if (this.fsw.closed) return; + closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath); + if (this.fsw.closed) return; + // preserve this symlink's target path + if (absPath !== targetPath && targetPath !== undefined) { + this.fsw._symlinkPaths.set(absPath, targetPath); + } + } else if (stats.isSymbolicLink()) { + const targetPath = follow ? await fsrealpath(path) : path; + if (this.fsw.closed) return; + const parent = sysPath.dirname(wh.watchPath); + this.fsw._getWatchedDir(parent).add(wh.watchPath); + this.fsw._emit(EV_ADD, wh.watchPath, stats); + closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath); + if (this.fsw.closed) return; + + // preserve this symlink's target path + if (targetPath !== undefined) { + this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath); + } + } else { + closer = this._handleFile(wh.watchPath, stats, initialAdd); + } + ready(); + + this.fsw._addPathCloser(path, closer); + return false; + + } catch (error) { + if (this.fsw._handleError(error)) { + ready(); + return path; + } + } +} + +} + +module.exports = NodeFsHandler; diff --git a/node_modules/chokidar/node_modules/glob-parent/CHANGELOG.md b/node_modules/chokidar/node_modules/glob-parent/CHANGELOG.md new file mode 100644 index 0000000..fb9de96 --- /dev/null +++ b/node_modules/chokidar/node_modules/glob-parent/CHANGELOG.md @@ -0,0 +1,110 @@ +### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) + + +### Bug Fixes + +* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) + +### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) + + +### Bug Fixes + +* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) + +## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) + + +### Features + +* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) + +## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* Drop support for node <6 & bump dependencies + +### Miscellaneous Chores + +* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) + +## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* question marks are valid path characters on Windows so avoid flagging as a glob when alone +* Update is-glob dependency + +### Features + +* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) +* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) +* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) + +## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) + + +### Features + +* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) +* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) +* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) +* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) +* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) + + +### Bug Fixes + +* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) + +### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) + + +### Features + +* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) + + +### Bug Fixes + +* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) + +## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* update is-glob dependency + +### Features + +* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) + +## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) + + +### Features + +* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) + +## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) + +## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) + + +### Reverts + +* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) + +## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) + + +### Features + +* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) + +## 1.0.0 (2021-01-27) + diff --git a/node_modules/chokidar/node_modules/glob-parent/LICENSE b/node_modules/chokidar/node_modules/glob-parent/LICENSE new file mode 100644 index 0000000..63222d7 --- /dev/null +++ b/node_modules/chokidar/node_modules/glob-parent/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015, 2019 Elan Shanker + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/chokidar/node_modules/glob-parent/README.md b/node_modules/chokidar/node_modules/glob-parent/README.md new file mode 100644 index 0000000..36a2793 --- /dev/null +++ b/node_modules/chokidar/node_modules/glob-parent/README.md @@ -0,0 +1,137 @@ +

+ + + +

+ +# glob-parent + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] + +Extract the non-magic parent path from a glob string. + +## Usage + +```js +var globParent = require('glob-parent'); + +globParent('path/to/*.js'); // 'path/to' +globParent('/root/path/to/*.js'); // '/root/path/to' +globParent('/*.js'); // '/' +globParent('*.js'); // '.' +globParent('**/*.js'); // '.' +globParent('path/{to,from}'); // 'path' +globParent('path/!(to|from)'); // 'path' +globParent('path/?(to|from)'); // 'path' +globParent('path/+(to|from)'); // 'path' +globParent('path/*(to|from)'); // 'path' +globParent('path/@(to|from)'); // 'path' +globParent('path/**/*'); // 'path' + +// if provided a non-glob path, returns the nearest dir +globParent('path/foo/bar.js'); // 'path/foo' +globParent('path/foo/'); // 'path/foo' +globParent('path/foo'); // 'path' (see issue #3 for details) +``` + +## API + +### `globParent(maybeGlobString, [options])` + +Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. + +#### options + +```js +{ + // Disables the automatic conversion of slashes for Windows + flipBackslashes: true +} +``` + +## Escaping + +The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: + +- `?` (question mark) unless used as a path segment alone +- `*` (asterisk) +- `|` (pipe) +- `(` (opening parenthesis) +- `)` (closing parenthesis) +- `{` (opening curly brace) +- `}` (closing curly brace) +- `[` (opening bracket) +- `]` (closing bracket) + +**Example** + +```js +globParent('foo/[bar]/') // 'foo' +globParent('foo/\\[bar]/') // 'foo/[bar]' +``` + +## Limitations + +### Braces & Brackets +This library attempts a quick and imperfect method of determining which path +parts have glob magic without fully parsing/lexing the pattern. There are some +advanced use cases that can trip it up, such as nested braces where the outer +pair is escaped and the inner one contains a path separator. If you find +yourself in the unlikely circumstance of being affected by this or need to +ensure higher-fidelity glob handling in your library, it is recommended that you +pre-process your input with [expand-braces] and/or [expand-brackets]. + +### Windows +Backslashes are not valid path separators for globs. If a path with backslashes +is provided anyway, for simple cases, glob-parent will replace the path +separator for you and return the non-glob parent path (now with +forward-slashes, which are still valid as Windows path separators). + +This cannot be used in conjunction with escape characters. + +```js +// BAD +globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' + +// GOOD +globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' +``` + +If you are using escape characters for a pattern without path parts (i.e. +relative to `cwd`), prefix with `./` to avoid confusing glob-parent. + +```js +// BAD +globParent('foo \\[bar]') // 'foo ' +globParent('foo \\[bar]*') // 'foo ' + +// GOOD +globParent('./foo \\[bar]') // 'foo [bar]' +globParent('./foo \\[bar]*') // '.' +``` + +## License + +ISC + +[expand-braces]: https://github.com/jonschlinkert/expand-braces +[expand-brackets]: https://github.com/jonschlinkert/expand-brackets + +[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg +[npm-url]: https://www.npmjs.com/package/glob-parent +[npm-image]: https://img.shields.io/npm/v/glob-parent.svg + +[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master +[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master + +[travis-url]: https://travis-ci.org/gulpjs/glob-parent +[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci + +[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent +[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor + +[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent +[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg + +[gitter-url]: https://gitter.im/gulpjs/gulp +[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/chokidar/node_modules/glob-parent/index.js b/node_modules/chokidar/node_modules/glob-parent/index.js new file mode 100644 index 0000000..09e257e --- /dev/null +++ b/node_modules/chokidar/node_modules/glob-parent/index.js @@ -0,0 +1,42 @@ +'use strict'; + +var isGlob = require('is-glob'); +var pathPosixDirname = require('path').posix.dirname; +var isWin32 = require('os').platform() === 'win32'; + +var slash = '/'; +var backslash = /\\/g; +var enclosure = /[\{\[].*[\}\]]$/; +var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; +var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; + +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + * @returns {string} + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); + + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); + } + + // special case for strings ending in enclosure containing path separator + if (enclosure.test(str)) { + str += slash; + } + + // preserves full path in case of trailing path separator + str += 'a'; + + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlob(str) || globby.test(str)); + + // remove escape chars and return result + return str.replace(escaped, '$1'); +}; diff --git a/node_modules/chokidar/node_modules/glob-parent/package.json b/node_modules/chokidar/node_modules/glob-parent/package.json new file mode 100644 index 0000000..125c971 --- /dev/null +++ b/node_modules/chokidar/node_modules/glob-parent/package.json @@ -0,0 +1,48 @@ +{ + "name": "glob-parent", + "version": "5.1.2", + "description": "Extract the non-magic parent path from a glob string.", + "author": "Gulp Team (https://gulpjs.com/)", + "contributors": [ + "Elan Shanker (https://github.com/es128)", + "Blaine Bublitz " + ], + "repository": "gulpjs/glob-parent", + "license": "ISC", + "engines": { + "node": ">= 6" + }, + "main": "index.js", + "files": [ + "LICENSE", + "index.js" + ], + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "nyc mocha --async-only", + "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "dependencies": { + "is-glob": "^4.0.1" + }, + "devDependencies": { + "coveralls": "^3.0.11", + "eslint": "^2.13.1", + "eslint-config-gulp": "^3.0.1", + "expect": "^1.20.2", + "mocha": "^6.0.2", + "nyc": "^13.3.0" + }, + "keywords": [ + "glob", + "parent", + "strip", + "path", + "dirname", + "directory", + "base", + "wildcard" + ] +} diff --git a/node_modules/chokidar/package.json b/node_modules/chokidar/package.json new file mode 100644 index 0000000..6c3cd27 --- /dev/null +++ b/node_modules/chokidar/package.json @@ -0,0 +1,85 @@ +{ + "name": "chokidar", + "description": "Minimal and efficient cross-platform file watching library", + "version": "3.5.3", + "homepage": "https://github.com/paulmillr/chokidar", + "author": "Paul Miller (https://paulmillr.com)", + "contributors": [ + "Paul Miller (https://paulmillr.com)", + "Elan Shanker" + ], + "engines": { + "node": ">= 8.10.0" + }, + "main": "index.js", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + }, + "devDependencies": { + "@types/node": "^14", + "chai": "^4.3", + "dtslint": "^3.3.0", + "eslint": "^7.0.0", + "mocha": "^7.0.0", + "nyc": "^15.0.0", + "rimraf": "^3.0.0", + "sinon": "^9.0.1", + "sinon-chai": "^3.3.0", + "typescript": "~4.4.3", + "upath": "^1.2.0" + }, + "files": [ + "index.js", + "lib/*.js", + "types/index.d.ts" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/paulmillr/chokidar.git" + }, + "bugs": { + "url": "https://github.com/paulmillr/chokidar/issues" + }, + "license": "MIT", + "scripts": { + "dtslint": "dtslint types", + "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --exit --timeout 90000", + "test": "npm run lint && npm run mocha" + }, + "keywords": [ + "fs", + "watch", + "watchFile", + "watcher", + "watching", + "file", + "fsevents" + ], + "types": "./types/index.d.ts", + "nyc": { + "include": [ + "index.js", + "lib/*.js" + ], + "reporter": [ + "html", + "text" + ] + }, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ] +} diff --git a/node_modules/chokidar/types/index.d.ts b/node_modules/chokidar/types/index.d.ts new file mode 100644 index 0000000..aab8e33 --- /dev/null +++ b/node_modules/chokidar/types/index.d.ts @@ -0,0 +1,188 @@ +// TypeScript Version: 3.0 + +/// + +import * as fs from "fs"; +import { EventEmitter } from "events"; +import { Matcher } from 'anymatch'; + +export class FSWatcher extends EventEmitter implements fs.FSWatcher { + options: WatchOptions; + + /** + * Constructs a new FSWatcher instance with optional WatchOptions parameter. + */ + constructor(options?: WatchOptions); + + /** + * Add files, directories, or glob patterns for tracking. Takes an array of strings or just one + * string. + */ + add(paths: string | ReadonlyArray): this; + + /** + * Stop watching files, directories, or glob patterns. Takes an array of strings or just one + * string. + */ + unwatch(paths: string | ReadonlyArray): this; + + /** + * Returns an object representing all the paths on the file system being watched by this + * `FSWatcher` instance. The object's keys are all the directories (using absolute paths unless + * the `cwd` option was used), and the values are arrays of the names of the items contained in + * each directory. + */ + getWatched(): { + [directory: string]: string[]; + }; + + /** + * Removes all listeners from watched files. + */ + close(): Promise; + + on(event: 'add'|'addDir'|'change', listener: (path: string, stats?: fs.Stats) => void): this; + + on(event: 'all', listener: (eventName: 'add'|'addDir'|'change'|'unlink'|'unlinkDir', path: string, stats?: fs.Stats) => void): this; + + /** + * Error occurred + */ + on(event: 'error', listener: (error: Error) => void): this; + + /** + * Exposes the native Node `fs.FSWatcher events` + */ + on(event: 'raw', listener: (eventName: string, path: string, details: any) => void): this; + + /** + * Fires when the initial scan is complete + */ + on(event: 'ready', listener: () => void): this; + + on(event: 'unlink'|'unlinkDir', listener: (path: string) => void): this; + + on(event: string, listener: (...args: any[]) => void): this; +} + +export interface WatchOptions { + /** + * Indicates whether the process should continue to run as long as files are being watched. If + * set to `false` when using `fsevents` to watch, no more events will be emitted after `ready`, + * even if the process continues to run. + */ + persistent?: boolean; + + /** + * ([anymatch](https://github.com/micromatch/anymatch)-compatible definition) Defines files/paths to + * be ignored. The whole relative or absolute path is tested, not just filename. If a function + * with two arguments is provided, it gets called twice per path - once with a single argument + * (the path), second time with two arguments (the path and the + * [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object of that path). + */ + ignored?: Matcher; + + /** + * If set to `false` then `add`/`addDir` events are also emitted for matching paths while + * instantiating the watching as chokidar discovers these file paths (before the `ready` event). + */ + ignoreInitial?: boolean; + + /** + * When `false`, only the symlinks themselves will be watched for changes instead of following + * the link references and bubbling events through the link's path. + */ + followSymlinks?: boolean; + + /** + * The base directory from which watch `paths` are to be derived. Paths emitted with events will + * be relative to this. + */ + cwd?: string; + + /** + * If set to true then the strings passed to .watch() and .add() are treated as literal path + * names, even if they look like globs. Default: false. + */ + disableGlobbing?: boolean; + + /** + * Whether to use fs.watchFile (backed by polling), or fs.watch. If polling leads to high CPU + * utilization, consider setting this to `false`. It is typically necessary to **set this to + * `true` to successfully watch files over a network**, and it may be necessary to successfully + * watch files in other non-standard situations. Setting to `true` explicitly on OS X overrides + * the `useFsEvents` default. + */ + usePolling?: boolean; + + /** + * Whether to use the `fsevents` watching interface if available. When set to `true` explicitly + * and `fsevents` is available this supercedes the `usePolling` setting. When set to `false` on + * OS X, `usePolling: true` becomes the default. + */ + useFsEvents?: boolean; + + /** + * If relying upon the [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object that + * may get passed with `add`, `addDir`, and `change` events, set this to `true` to ensure it is + * provided even in cases where it wasn't already available from the underlying watch events. + */ + alwaysStat?: boolean; + + /** + * If set, limits how many levels of subdirectories will be traversed. + */ + depth?: number; + + /** + * Interval of file system polling. + */ + interval?: number; + + /** + * Interval of file system polling for binary files. ([see list of binary extensions](https://gi + * thub.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) + */ + binaryInterval?: number; + + /** + * Indicates whether to watch files that don't have read permissions if possible. If watching + * fails due to `EPERM` or `EACCES` with this set to `true`, the errors will be suppressed + * silently. + */ + ignorePermissionErrors?: boolean; + + /** + * `true` if `useFsEvents` and `usePolling` are `false`). Automatically filters out artifacts + * that occur when using editors that use "atomic writes" instead of writing directly to the + * source file. If a file is re-added within 100 ms of being deleted, Chokidar emits a `change` + * event rather than `unlink` then `add`. If the default of 100 ms does not work well for you, + * you can override it by setting `atomic` to a custom value, in milliseconds. + */ + atomic?: boolean | number; + + /** + * can be set to an object in order to adjust timing params: + */ + awaitWriteFinish?: AwaitWriteFinishOptions | boolean; +} + +export interface AwaitWriteFinishOptions { + /** + * Amount of time in milliseconds for a file size to remain constant before emitting its event. + */ + stabilityThreshold?: number; + + /** + * File size polling interval. + */ + pollInterval?: number; +} + +/** + * produces an instance of `FSWatcher`. + */ +export function watch( + paths: string | ReadonlyArray, + options?: WatchOptions +): FSWatcher; diff --git a/node_modules/commander/CHANGELOG.md b/node_modules/commander/CHANGELOG.md new file mode 100644 index 0000000..f00cb2b --- /dev/null +++ b/node_modules/commander/CHANGELOG.md @@ -0,0 +1,436 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). (Format adopted after v3.0.0.) + + + +## [4.1.1] (2020-02-02) + +### Fixed + +* TypeScript definition for `.action()` should include Promise for async ([#1157]) + +## [4.1.0] (2020-01-06) + +### Added + +* two routines to change how option values are handled, and eliminate name clashes with command properties ([#933] [#1102]) + * see storeOptionsAsProperties and passCommandToAction in README +* `.parseAsync` to use instead of `.parse` if supply async action handlers ([#806] [#1118]) + +### Fixed + +* Remove trailing blanks from wrapped help text ([#1096]) + +### Changed + +* update dependencies +* extend security coverage for Commander 2.x to 2020-02-03 +* improvements to README +* improvements to TypeScript definition documentation +* move old versions out of main CHANGELOG +* removed explicit use of `ts-node` in tests + +## [4.0.1] (2019-11-12) + +### Fixed + +* display help when requested, even if there are missing required options ([#1091]) + +## [4.0.0] (2019-11-02) + +### Added + +* automatically wrap and indent help descriptions for options and commands ([#1051]) +* `.exitOverride()` allows override of calls to `process.exit` for additional error handling and to keep program running ([#1040]) +* support for declaring required options with `.requiredOptions()` ([#1071]) +* GitHub Actions support ([#1027]) +* translation links in README + +### Changed + +* dev: switch tests from Sinon+Should to Jest with major rewrite of tests ([#1035]) +* call default subcommand even when there are unknown options ([#1047]) +* *Breaking* Commander is only officially supported on Node 8 and above, and requires Node 6 ([#1053]) + +### Fixed + +* *Breaking* keep command object out of program.args when action handler called ([#1048]) + * also, action handler now passed array of unknown arguments +* complain about unknown options when program argument supplied and action handler ([#1049]) + * this changes parameters to `command:*` event to include unknown arguments +* removed deprecated `customFds` option from call to `child_process.spawn` ([#1052]) +* rework TypeScript declarations to bring all types into imported namespace ([#1081]) + +### Migration Tips + +#### Testing for no arguments + +If you were previously using code like: + +```js +if (!program.args.length) ... +``` + +a partial replacement is: + +```js +if (program.rawArgs.length < 3) ... +``` + +## [4.0.0-1] Prerelease (2019-10-08) + +(Released in 4.0.0) + +## [4.0.0-0] Prerelease (2019-10-01) + +(Released in 4.0.0) + +## [2.20.1] (2019-09-29) + +### Fixed + +* Improve tracking of executable subcommands. + +### Changed + +* update development dependencies + +## [3.0.2] (2019-09-27) + +### Fixed + +* Improve tracking of executable subcommands. + +### Changed + +* update development dependencies + +## [3.0.1] (2019-08-30) + +### Added + +* .name and .usage to README ([#1010]) +* Table of Contents to README ([#1010]) +* TypeScript definition for `executableFile` in CommandOptions ([#1028]) + +### Changed + +* consistently use `const` rather than `var` in README ([#1026]) + +### Fixed + +* help for sub commands with custom executableFile ([#1018]) + +## [3.0.0] / 2019-08-08 + +* Add option to specify executable file name ([#999]) + * e.g. `.command('clone', 'clone description', { executableFile: 'myClone' })` +* Change docs for `.command` to contrast action handler vs git-style executable. ([#938] [#990]) +* **Breaking** Change TypeScript to use overloaded function for `.command`. ([#938] [#990]) +* Change to use straight quotes around strings in error messages (like 'this' instead of `this') ([#915]) +* Add TypeScript "reference types" for node ([#974]) +* Add support for hyphen as an option argument in subcommands ([#697]) +* Add support for a short option flag and its value to be concatenated for action handler subcommands ([#599]) + * e.g. `-p 80` can also be supplied as `-p80` +* Add executable arguments to spawn in win32, for git-style executables ([#611]) + * e.g. `node --harmony myCommand.js clone` +* Add parent command as prefix of subcommand in help ([#980]) +* Add optional custom description to `.version` ([#963]) + * e.g. `program.version('0.0.1', '-v, --vers', 'output the current version')` +* Add `.helpOption(flags, description)` routine to customise help flags and description ([#963]) + * e.g. `.helpOption('-e, --HELP', 'read more information')` +* Fix behavior of --no-* options ([#795]) + * can now define both `--foo` and `--no-foo` + * **Breaking** custom event listeners: `--no-foo` on cli now emits `option:no-foo` (previously `option:foo`) + * **Breaking** default value: defining `--no-foo` after defining `--foo` leaves the default value unchanged (previously set it to false) + * allow boolean default value, such as from environment ([#987]) +* Increment inspector port for spawned subcommands ([#991]) + * e.g. `node --inspect myCommand.js clone` + +### Migration Tips + +The custom event for a negated option like `--no-foo` is `option:no-foo` (previously `option:foo`). + +```js +program + .option('--no-foo') + .on('option:no-foo', () => { + console.log('removing foo'); + }); +``` + +When using TypeScript, adding a command does not allow an explicit `undefined` for an unwanted executable description (e.g +for a command with an action handler). + +```js +program + .command('action1', undefined, { noHelp: true }) // No longer valid + .command('action2', { noHelp: true }) // Correct +``` + +## 3.0.0-0 Prerelease / 2019-07-28 + +(Released as 3.0.0) + +## 2.20.0 / 2019-04-02 + +* fix: resolve symbolic links completely when hunting for subcommands (#935) +* Update index.d.ts (#930) +* Update Readme.md (#924) +* Remove --save option as it isn't required anymore (#918) +* Add link to the license file (#900) +* Added example of receiving args from options (#858) +* Added missing semicolon (#882) +* Add extension to .eslintrc (#876) + +## 2.19.0 / 2018-10-02 + +* Removed newline after Options and Commands headers (#864) +* Bugfix - Error output (#862) +* Fix to change default value to string (#856) + +## 2.18.0 / 2018-09-07 + +* Standardize help output (#853) +* chmod 644 travis.yml (#851) +* add support for execute typescript subcommand via ts-node (#849) + +## 2.17.1 / 2018-08-07 + +* Fix bug in command emit (#844) + +## 2.17.0 / 2018-08-03 + +* fixed newline output after help information (#833) +* Fix to emit the action even without command (#778) +* npm update (#823) + +## 2.16.0 / 2018-06-29 + +* Remove Makefile and `test/run` (#821) +* Make 'npm test' run on Windows (#820) +* Add badge to display install size (#807) +* chore: cache node_modules (#814) +* chore: remove Node.js 4 (EOL), add Node.js 10 (#813) +* fixed typo in readme (#812) +* Fix types (#804) +* Update eslint to resolve vulnerabilities in lodash (#799) +* updated readme with custom event listeners. (#791) +* fix tests (#794) + +## 2.15.0 / 2018-03-07 + +* Update downloads badge to point to graph of downloads over time instead of duplicating link to npm +* Arguments description + +## 2.14.1 / 2018-02-07 + +* Fix typing of help function + +## 2.14.0 / 2018-02-05 + +* only register the option:version event once +* Fixes issue #727: Passing empty string for option on command is set to undefined +* enable eqeqeq rule +* resolves #754 add linter configuration to project +* resolves #560 respect custom name for version option +* document how to override the version flag +* document using options per command + +## 2.13.0 / 2018-01-09 + +* Do not print default for --no- +* remove trailing spaces in command help +* Update CI's Node.js to LTS and latest version +* typedefs: Command and Option types added to commander namespace + +## 2.12.2 / 2017-11-28 + +* fix: typings are not shipped + +## 2.12.1 / 2017-11-23 + +* Move @types/node to dev dependency + +## 2.12.0 / 2017-11-22 + +* add attributeName() method to Option objects +* Documentation updated for options with --no prefix +* typings: `outputHelp` takes a string as the first parameter +* typings: use overloads +* feat(typings): update to match js api +* Print default value in option help +* Fix translation error +* Fail when using same command and alias (#491) +* feat(typings): add help callback +* fix bug when description is add after command with options (#662) +* Format js code +* Rename History.md to CHANGELOG.md (#668) +* feat(typings): add typings to support TypeScript (#646) +* use current node + +## 2.11.0 / 2017-07-03 + +* Fix help section order and padding (#652) +* feature: support for signals to subcommands (#632) +* Fixed #37, --help should not display first (#447) +* Fix translation errors. (#570) +* Add package-lock.json +* Remove engines +* Upgrade package version +* Prefix events to prevent conflicts between commands and options (#494) +* Removing dependency on graceful-readlink +* Support setting name in #name function and make it chainable +* Add .vscode directory to .gitignore (Visual Studio Code metadata) +* Updated link to ruby commander in readme files + +## 2.10.0 / 2017-06-19 + +* Update .travis.yml. drop support for older node.js versions. +* Fix require arguments in README.md +* On SemVer you do not start from 0.0.1 +* Add missing semi colon in readme +* Add save param to npm install +* node v6 travis test +* Update Readme_zh-CN.md +* Allow literal '--' to be passed-through as an argument +* Test subcommand alias help +* link build badge to master branch +* Support the alias of Git style sub-command +* added keyword commander for better search result on npm +* Fix Sub-Subcommands +* test node.js stable +* Fixes TypeError when a command has an option called `--description` +* Update README.md to make it beginner friendly and elaborate on the difference between angled and square brackets. +* Add chinese Readme file + +## 2.9.0 / 2015-10-13 + +* Add option `isDefault` to set default subcommand #415 @Qix- +* Add callback to allow filtering or post-processing of help text #434 @djulien +* Fix `undefined` text in help information close #414 #416 @zhiyelee + +## 2.8.1 / 2015-04-22 + +* Back out `support multiline description` Close #396 #397 + +## 2.8.0 / 2015-04-07 + +* Add `process.execArg` support, execution args like `--harmony` will be passed to sub-commands #387 @DigitalIO @zhiyelee +* Fix bug in Git-style sub-commands #372 @zhiyelee +* Allow commands to be hidden from help #383 @tonylukasavage +* When git-style sub-commands are in use, yet none are called, display help #382 @claylo +* Add ability to specify arguments syntax for top-level command #258 @rrthomas +* Support multiline descriptions #208 @zxqfox + +## 2.7.1 / 2015-03-11 + +* Revert #347 (fix collisions when option and first arg have same name) which causes a bug in #367. + +## 2.7.0 / 2015-03-09 + +* Fix git-style bug when installed globally. Close #335 #349 @zhiyelee +* Fix collisions when option and first arg have same name. Close #346 #347 @tonylukasavage +* Add support for camelCase on `opts()`. Close #353 @nkzawa +* Add node.js 0.12 and io.js to travis.yml +* Allow RegEx options. #337 @palanik +* Fixes exit code when sub-command failing. Close #260 #332 @pirelenito +* git-style `bin` files in $PATH make sense. Close #196 #327 @zhiyelee + +## 2.6.0 / 2014-12-30 + +* added `Command#allowUnknownOption` method. Close #138 #318 @doozr @zhiyelee +* Add application description to the help msg. Close #112 @dalssoft + +## 2.5.1 / 2014-12-15 + +* fixed two bugs incurred by variadic arguments. Close #291 @Quentin01 #302 @zhiyelee + +## 2.5.0 / 2014-10-24 + +* add support for variadic arguments. Closes #277 @whitlockjc + +## 2.4.0 / 2014-10-17 + +* fixed a bug on executing the coercion function of subcommands option. Closes #270 +* added `Command.prototype.name` to retrieve command name. Closes #264 #266 @tonylukasavage +* added `Command.prototype.opts` to retrieve all the options as a simple object of key-value pairs. Closes #262 @tonylukasavage +* fixed a bug on subcommand name. Closes #248 @jonathandelgado +* fixed function normalize doesn’t honor option terminator. Closes #216 @abbr + +## 2.3.0 / 2014-07-16 + +* add command alias'. Closes PR #210 +* fix: Typos. Closes #99 +* fix: Unused fs module. Closes #217 + +## 2.2.0 / 2014-03-29 + +* add passing of previous option value +* fix: support subcommands on windows. Closes #142 +* Now the defaultValue passed as the second argument of the coercion function. + +## 2.1.0 / 2013-11-21 + +* add: allow cflag style option params, unit test, fixes #174 + +## 2.0.0 / 2013-07-18 + +* remove input methods (.prompt, .confirm, etc) + +## Older versions + +* [1.x](./changelogs/CHANGELOG-1.md) +* [0.x](./changelogs/CHANGELOG-0.md) + +[#599]: https://github.com/tj/commander.js/issues/599 +[#611]: https://github.com/tj/commander.js/issues/611 +[#697]: https://github.com/tj/commander.js/issues/697 +[#795]: https://github.com/tj/commander.js/issues/795 +[#806]: https://github.com/tj/commander.js/issues/806 +[#915]: https://github.com/tj/commander.js/issues/915 +[#938]: https://github.com/tj/commander.js/issues/938 +[#963]: https://github.com/tj/commander.js/issues/963 +[#974]: https://github.com/tj/commander.js/issues/974 +[#980]: https://github.com/tj/commander.js/issues/980 +[#987]: https://github.com/tj/commander.js/issues/987 +[#990]: https://github.com/tj/commander.js/issues/990 +[#991]: https://github.com/tj/commander.js/issues/991 +[#993]: https://github.com/tj/commander.js/issues/993 +[#999]: https://github.com/tj/commander.js/issues/999 +[#1010]: https://github.com/tj/commander.js/pull/1010 +[#1018]: https://github.com/tj/commander.js/pull/1018 +[#1026]: https://github.com/tj/commander.js/pull/1026 +[#1027]: https://github.com/tj/commander.js/pull/1027 +[#1028]: https://github.com/tj/commander.js/pull/1028 +[#1035]: https://github.com/tj/commander.js/pull/1035 +[#1040]: https://github.com/tj/commander.js/pull/1040 +[#1047]: https://github.com/tj/commander.js/pull/1047 +[#1048]: https://github.com/tj/commander.js/pull/1048 +[#1049]: https://github.com/tj/commander.js/pull/1049 +[#1051]: https://github.com/tj/commander.js/pull/1051 +[#1052]: https://github.com/tj/commander.js/pull/1052 +[#1053]: https://github.com/tj/commander.js/pull/1053 +[#1071]: https://github.com/tj/commander.js/pull/1071 +[#1081]: https://github.com/tj/commander.js/pull/1081 +[#1091]: https://github.com/tj/commander.js/pull/1091 +[#1096]: https://github.com/tj/commander.js/pull/1096 +[#1102]: https://github.com/tj/commander.js/pull/1102 +[#1118]: https://github.com/tj/commander.js/pull/1118 +[#1157]: https://github.com/tj/commander.js/pull/1157 + +[Unreleased]: https://github.com/tj/commander.js/compare/master...develop +[4.1.1]: https://github.com/tj/commander.js/compare/v4.0.0..v4.1.1 +[4.1.0]: https://github.com/tj/commander.js/compare/v4.0.1..v4.1.0 +[4.0.1]: https://github.com/tj/commander.js/compare/v4.0.0..v4.0.1 +[4.0.0]: https://github.com/tj/commander.js/compare/v3.0.2..v4.0.0 +[4.0.0-1]: https://github.com/tj/commander.js/compare/v4.0.0-0..v4.0.0-1 +[4.0.0-0]: https://github.com/tj/commander.js/compare/v3.0.2...v4.0.0-0 +[3.0.2]: https://github.com/tj/commander.js/compare/v3.0.1...v3.0.2 +[3.0.1]: https://github.com/tj/commander.js/compare/v3.0.0...v3.0.1 +[3.0.0]: https://github.com/tj/commander.js/compare/v2.20.1...v3.0.0 +[2.20.1]: https://github.com/tj/commander.js/compare/v2.20.0...v2.20.1 diff --git a/node_modules/commander/LICENSE b/node_modules/commander/LICENSE new file mode 100644 index 0000000..10f997a --- /dev/null +++ b/node_modules/commander/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2011 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/commander/Readme.md b/node_modules/commander/Readme.md new file mode 100644 index 0000000..aa4f42b --- /dev/null +++ b/node_modules/commander/Readme.md @@ -0,0 +1,713 @@ +# Commander.js + +[![Build Status](https://api.travis-ci.org/tj/commander.js.svg?branch=master)](http://travis-ci.org/tj/commander.js) +[![NPM Version](http://img.shields.io/npm/v/commander.svg?style=flat)](https://www.npmjs.org/package/commander) +[![NPM Downloads](https://img.shields.io/npm/dm/commander.svg?style=flat)](https://npmcharts.com/compare/commander?minimal=true) +[![Install Size](https://packagephobia.now.sh/badge?p=commander)](https://packagephobia.now.sh/result?p=commander) + +The complete solution for [node.js](http://nodejs.org) command-line interfaces, inspired by Ruby's [commander](https://github.com/commander-rb/commander). + +Read this in other languages: English | [简体中文](./Readme_zh-CN.md) + +- [Commander.js](#commanderjs) + - [Installation](#installation) + - [Declaring program variable](#declaring-program-variable) + - [Options](#options) + - [Common option types, boolean and value](#common-option-types-boolean-and-value) + - [Default option value](#default-option-value) + - [Other option types, negatable boolean and flag|value](#other-option-types-negatable-boolean-and-flagvalue) + - [Custom option processing](#custom-option-processing) + - [Required option](#required-option) + - [Version option](#version-option) + - [Commands](#commands) + - [Specify the argument syntax](#specify-the-argument-syntax) + - [Action handler (sub)commands](#action-handler-subcommands) + - [Git-style executable (sub)commands](#git-style-executable-subcommands) + - [Automated --help](#automated---help) + - [Custom help](#custom-help) + - [.usage and .name](#usage-and-name) + - [.outputHelp(cb)](#outputhelpcb) + - [.helpOption(flags, description)](#helpoptionflags-description) + - [.help(cb)](#helpcb) + - [Custom event listeners](#custom-event-listeners) + - [Bits and pieces](#bits-and-pieces) + - [Avoiding option name clashes](#avoiding-option-name-clashes) + - [TypeScript](#typescript) + - [Node options such as --harmony](#node-options-such-as---harmony) + - [Node debugging](#node-debugging) + - [Override exit handling](#override-exit-handling) + - [Examples](#examples) + - [License](#license) + - [Support](#support) + - [Commander for enterprise](#commander-for-enterprise) + +## Installation + +```bash +npm install commander +``` + +## Declaring _program_ variable + +Commander exports a global object which is convenient for quick programs. +This is used in the examples in this README for brevity. + +```js +const program = require('commander'); +program.version('0.0.1'); +``` + +For larger programs which may use commander in multiple ways, including unit testing, it is better to create a local Command object to use. + + ```js + const commander = require('commander'); + const program = new commander.Command(); + program.version('0.0.1'); + ``` + +## Options + +Options are defined with the `.option()` method, also serving as documentation for the options. Each option can have a short flag (single character) and a long name, separated by a comma or space. + +The options can be accessed as properties on the Command object. Multi-word options such as "--template-engine" are camel-cased, becoming `program.templateEngine` etc. Multiple short flags may be combined as a single arg, for example `-abc` is equivalent to `-a -b -c`. + +See also optional new behaviour to [avoid name clashes](#avoiding-option-name-clashes). + +### Common option types, boolean and value + +The two most used option types are a boolean flag, and an option which takes a value (declared using angle brackets). Both are `undefined` unless specified on command line. + +```js +const program = require('commander'); + +program + .option('-d, --debug', 'output extra debugging') + .option('-s, --small', 'small pizza size') + .option('-p, --pizza-type ', 'flavour of pizza'); + +program.parse(process.argv); + +if (program.debug) console.log(program.opts()); +console.log('pizza details:'); +if (program.small) console.log('- small pizza size'); +if (program.pizzaType) console.log(`- ${program.pizzaType}`); +``` + +```bash +$ pizza-options -d +{ debug: true, small: undefined, pizzaType: undefined } +pizza details: +$ pizza-options -p +error: option '-p, --pizza-type ' argument missing +$ pizza-options -ds -p vegetarian +{ debug: true, small: true, pizzaType: 'vegetarian' } +pizza details: +- small pizza size +- vegetarian +$ pizza-options --pizza-type=cheese +pizza details: +- cheese +``` + +`program.parse(arguments)` processes the arguments, leaving any args not consumed by the options as the `program.args` array. + +### Default option value + +You can specify a default value for an option which takes a value. + +```js +const program = require('commander'); + +program + .option('-c, --cheese ', 'add the specified type of cheese', 'blue'); + +program.parse(process.argv); + +console.log(`cheese: ${program.cheese}`); +``` + +```bash +$ pizza-options +cheese: blue +$ pizza-options --cheese stilton +cheese: stilton +``` + +### Other option types, negatable boolean and flag|value + +You can specify a boolean option long name with a leading `no-` to set the option value to false when used. +Defined alone this also makes the option true by default. + +If you define `--foo` first, adding `--no-foo` does not change the default value from what it would +otherwise be. You can specify a default boolean value for a boolean flag and it can be overridden on command line. + +```js +const program = require('commander'); + +program + .option('--no-sauce', 'Remove sauce') + .option('--cheese ', 'cheese flavour', 'mozzarella') + .option('--no-cheese', 'plain with no cheese') + .parse(process.argv); + +const sauceStr = program.sauce ? 'sauce' : 'no sauce'; +const cheeseStr = (program.cheese === false) ? 'no cheese' : `${program.cheese} cheese`; +console.log(`You ordered a pizza with ${sauceStr} and ${cheeseStr}`); +``` + +```bash +$ pizza-options +You ordered a pizza with sauce and mozzarella cheese +$ pizza-options --sauce +error: unknown option '--sauce' +$ pizza-options --cheese=blue +You ordered a pizza with sauce and blue cheese +$ pizza-options --no-sauce --no-cheese +You ordered a pizza with no sauce and no cheese +``` + +You can specify an option which functions as a flag but may also take a value (declared using square brackets). + +```js +const program = require('commander'); + +program + .option('-c, --cheese [type]', 'Add cheese with optional type'); + +program.parse(process.argv); + +if (program.cheese === undefined) console.log('no cheese'); +else if (program.cheese === true) console.log('add cheese'); +else console.log(`add cheese type ${program.cheese}`); +``` + +```bash +$ pizza-options +no cheese +$ pizza-options --cheese +add cheese +$ pizza-options --cheese mozzarella +add cheese type mozzarella +``` + +### Custom option processing + +You may specify a function to do custom processing of option values. The callback function receives two parameters, the user specified value and the +previous value for the option. It returns the new value for the option. + +This allows you to coerce the option value to the desired type, or accumulate values, or do entirely custom processing. + +You can optionally specify the default/starting value for the option after the function. + +```js +const program = require('commander'); + +function myParseInt(value, dummyPrevious) { + // parseInt takes a string and an optional radix + return parseInt(value); +} + +function increaseVerbosity(dummyValue, previous) { + return previous + 1; +} + +function collect(value, previous) { + return previous.concat([value]); +} + +function commaSeparatedList(value, dummyPrevious) { + return value.split(','); +} + +program + .option('-f, --float ', 'float argument', parseFloat) + .option('-i, --integer ', 'integer argument', myParseInt) + .option('-v, --verbose', 'verbosity that can be increased', increaseVerbosity, 0) + .option('-c, --collect ', 'repeatable value', collect, []) + .option('-l, --list ', 'comma separated list', commaSeparatedList) +; + +program.parse(process.argv); + +if (program.float !== undefined) console.log(`float: ${program.float}`); +if (program.integer !== undefined) console.log(`integer: ${program.integer}`); +if (program.verbose > 0) console.log(`verbosity: ${program.verbose}`); +if (program.collect.length > 0) console.log(program.collect); +if (program.list !== undefined) console.log(program.list); +``` + +```bash +$ custom -f 1e2 +float: 100 +$ custom --integer 2 +integer: 2 +$ custom -v -v -v +verbose: 3 +$ custom -c a -c b -c c +[ 'a', 'b', 'c' ] +$ custom --list x,y,z +[ 'x', 'y', 'z' ] +``` + +### Required option + +You may specify a required (mandatory) option using `.requiredOption`. The option must be specified on the command line, or by having a default value. The method is otherwise the same as `.option` in format, taking flags and description, and optional default value or custom processing. + +```js +const program = require('commander'); + +program + .requiredOption('-c, --cheese ', 'pizza must have cheese'); + +program.parse(process.argv); +``` + +``` +$ pizza +error: required option '-c, --cheese ' not specified +``` + +### Version option + +The optional `version` method adds handling for displaying the command version. The default option flags are `-V` and `--version`, and when present the command prints the version number and exits. + +```js +program.version('0.0.1'); +``` + +```bash +$ ./examples/pizza -V +0.0.1 +``` + +You may change the flags and description by passing additional parameters to the `version` method, using +the same syntax for flags as the `option` method. The version flags can be named anything, but a long name is required. + +```js +program.version('0.0.1', '-v, --vers', 'output the current version'); +``` + +## Commands + +You can specify (sub)commands for your top-level command using `.command`. There are two ways these can be implemented: using an action handler attached to the command, or as a separate executable file (described in more detail later). In the first parameter to `.command` you specify the command name and any command arguments. The arguments may be `` or `[optional]`, and the last argument may also be `variadic...`. + +For example: + +```js +// Command implemented using action handler (description is supplied separately to `.command`) +// Returns new command for configuring. +program + .command('clone [destination]') + .description('clone a repository into a newly created directory') + .action((source, destination) => { + console.log('clone command called'); + }); + +// Command implemented using separate executable file (description is second parameter to `.command`) +// Returns top-level command for adding more commands. +program + .command('start ', 'start named service') + .command('stop [service]', 'stop named service, or all if no name supplied'); +``` + +### Specify the argument syntax + +You use `.arguments` to specify the arguments for the top-level command, and for subcommands they are included in the `.command` call. Angled brackets (e.g. ``) indicate required input. Square brackets (e.g. `[optional]`) indicate optional input. + +```js +const program = require('commander'); + +program + .version('0.1.0') + .arguments(' [env]') + .action(function (cmd, env) { + cmdValue = cmd; + envValue = env; + }); + +program.parse(process.argv); + +if (typeof cmdValue === 'undefined') { + console.error('no command given!'); + process.exit(1); +} +console.log('command:', cmdValue); +console.log('environment:', envValue || "no environment given"); +``` + + The last argument of a command can be variadic, and only the last argument. To make an argument variadic you + append `...` to the argument name. For example: + +```js +const program = require('commander'); + +program + .version('0.1.0') + .command('rmdir [otherDirs...]') + .action(function (dir, otherDirs) { + console.log('rmdir %s', dir); + if (otherDirs) { + otherDirs.forEach(function (oDir) { + console.log('rmdir %s', oDir); + }); + } + }); + +program.parse(process.argv); +``` + +The variadic argument is passed to the action handler as an array. (And this also applies to `program.args`.) + +### Action handler (sub)commands + +You can add options to a command that uses an action handler. +The action handler gets passed a parameter for each argument you declared, and one additional argument which is the +command object itself. This command argument has the values for the command-specific options added as properties. + +```js +const program = require('commander'); + +program + .command('rm ') + .option('-r, --recursive', 'Remove recursively') + .action(function (dir, cmdObj) { + console.log('remove ' + dir + (cmdObj.recursive ? ' recursively' : '')) + }) + +program.parse(process.argv) +``` + +You may supply an `async` action handler, in which case you call `.parseAsync` rather than `.parse`. + +```js +async function run() { /* code goes here */ } + +async function main() { + program + .command('run') + .action(run); + await program.parseAsync(process.argv); +} +``` + +A command's options on the command line are validated when the command is used. Any unknown options will be reported as an error. However, if an action-based command does not define an action, then the options are not validated. + +Configuration options can be passed with the call to `.command()`. Specifying `true` for `opts.noHelp` will remove the command from the generated help output. + +### Git-style executable (sub)commands + +When `.command()` is invoked with a description argument, this tells commander that you're going to use separate executables for sub-commands, much like `git(1)` and other popular tools. +Commander will search the executables in the directory of the entry script (like `./examples/pm`) with the name `program-subcommand`, like `pm-install`, `pm-search`. +You can specify a custom name with the `executableFile` configuration option. + +You handle the options for an executable (sub)command in the executable, and don't declare them at the top-level. + +```js +// file: ./examples/pm +const program = require('commander'); + +program + .version('0.1.0') + .command('install [name]', 'install one or more packages') + .command('search [query]', 'search with optional query') + .command('update', 'update installed packages', {executableFile: 'myUpdateSubCommand'}) + .command('list', 'list packages installed', {isDefault: true}) + .parse(process.argv); +``` + +Configuration options can be passed with the call to `.command()`. Specifying `true` for `opts.noHelp` will remove the command from the generated help output. Specifying `true` for `opts.isDefault` will run the subcommand if no other subcommand is specified. +Specifying a name with `executableFile` will override the default constructed name. + +If the program is designed to be installed globally, make sure the executables have proper modes, like `755`. + +## Automated --help + + The help information is auto-generated based on the information commander already knows about your program, so the following `--help` info is for free: + +```bash +$ ./examples/pizza --help +Usage: pizza [options] + +An application for pizzas ordering + +Options: + -V, --version output the version number + -p, --peppers Add peppers + -P, --pineapple Add pineapple + -b, --bbq Add bbq sauce + -c, --cheese Add the specified type of cheese (default: "marble") + -C, --no-cheese You do not want any cheese + -h, --help output usage information +``` + +### Custom help + + You can display arbitrary `-h, --help` information + by listening for "--help". Commander will automatically + exit once you are done so that the remainder of your program + does not execute causing undesired behaviors, for example + in the following executable "stuff" will not output when + `--help` is used. + +```js +#!/usr/bin/env node + +const program = require('commander'); + +program + .version('0.1.0') + .option('-f, --foo', 'enable some foo') + .option('-b, --bar', 'enable some bar') + .option('-B, --baz', 'enable some baz'); + +// must be before .parse() since +// node's emit() is immediate + +program.on('--help', function(){ + console.log('') + console.log('Examples:'); + console.log(' $ custom-help --help'); + console.log(' $ custom-help -h'); +}); + +program.parse(process.argv); + +console.log('stuff'); +``` + +Yields the following help output when `node script-name.js -h` or `node script-name.js --help` are run: + +```Text +Usage: custom-help [options] + +Options: + -h, --help output usage information + -V, --version output the version number + -f, --foo enable some foo + -b, --bar enable some bar + -B, --baz enable some baz + +Examples: + $ custom-help --help + $ custom-help -h +``` + +### .usage and .name + +These allow you to customise the usage description in the first line of the help. The name is otherwise +deduced from the (full) program arguments. Given: + +```js +program + .name("my-command") + .usage("[global options] command") +``` + +The help will start with: + +```Text +Usage: my-command [global options] command +``` + +### .outputHelp(cb) + +Output help information without exiting. +Optional callback cb allows post-processing of help text before it is displayed. + +If you want to display help by default (e.g. if no command was provided), you can use something like: + +```js +const program = require('commander'); +const colors = require('colors'); + +program + .version('0.1.0') + .command('getstream [url]', 'get stream URL') + .parse(process.argv); + +if (!process.argv.slice(2).length) { + program.outputHelp(make_red); +} + +function make_red(txt) { + return colors.red(txt); //display the help text in red on the console +} +``` + +### .helpOption(flags, description) + + Override the default help flags and description. + +```js +program + .helpOption('-e, --HELP', 'read more information'); +``` + +### .help(cb) + + Output help information and exit immediately. + Optional callback cb allows post-processing of help text before it is displayed. + +## Custom event listeners + + You can execute custom actions by listening to command and option events. + +```js +program.on('option:verbose', function () { + process.env.VERBOSE = this.verbose; +}); + +// error on unknown commands +program.on('command:*', function () { + console.error('Invalid command: %s\nSee --help for a list of available commands.', program.args.join(' ')); + process.exit(1); +}); +``` + +## Bits and pieces + +### Avoiding option name clashes + +The original and default behaviour is that the option values are stored +as properties on the program, and the action handler is passed a +command object with the options values stored as properties. +This is very convenient to code, but the downside is possible clashes with +existing properties of Command. + +There are two new routines to change the behaviour, and the default behaviour may change in the future: + +- `storeOptionsAsProperties`: whether to store option values as properties on command object, or store separately (specify false) and access using `.opts()` +- `passCommandToAction`: whether to pass command to action handler, +or just the options (specify false) + +```js +// file: ./examples/storeOptionsAsProperties.action.js +program + .storeOptionsAsProperties(false) + .passCommandToAction(false); + +program + .name('my-program-name') + .option('-n,--name '); + +program + .command('show') + .option('-a,--action ') + .action((options) => { + console.log(options.action); + }); + +program.parse(process.argv); + +const programOptions = program.opts(); +console.log(programOptions.name); +``` + +### TypeScript + +The Commander package includes its TypeScript Definition file, but also requires the node types which you need to install yourself. e.g. + +```bash +npm install commander +npm install --save-dev @types/node +``` + +If you use `ts-node` and git-style sub-commands written as `.ts` files, you need to call your program through node to get the sub-commands called correctly. e.g. + +```bash +node -r ts-node/register pm.ts +``` + +### Node options such as `--harmony` + +You can enable `--harmony` option in two ways: + +- Use `#! /usr/bin/env node --harmony` in the sub-commands scripts. (Note Windows does not support this pattern.) +- Use the `--harmony` option when call the command, like `node --harmony examples/pm publish`. The `--harmony` option will be preserved when spawning sub-command process. + +### Node debugging + +If you are using the node inspector for [debugging](https://nodejs.org/en/docs/guides/debugging-getting-started/) git-style executable (sub)commands using `node --inspect` et al, +the inspector port is incremented by 1 for the spawned subcommand. + +### Override exit handling + +By default Commander calls `process.exit` when it detects errors, or after displaying the help or version. You can override +this behaviour and optionally supply a callback. The default override throws a `CommanderError`. + +The override callback is passed a `CommanderError` with properties `exitCode` number, `code` string, and `message`. The default override behaviour is to throw the error, except for async handling of executable subcommand completion which carries on. The normal display of error messages or version or help +is not affected by the override which is called after the display. + +``` js +program.exitOverride(); + +try { + program.parse(process.argv); +} catch (err) { + // custom processing... +} +``` + +## Examples + +```js +const program = require('commander'); + +program + .version('0.1.0') + .option('-C, --chdir ', 'change the working directory') + .option('-c, --config ', 'set config path. defaults to ./deploy.conf') + .option('-T, --no-tests', 'ignore test hook'); + +program + .command('setup [env]') + .description('run setup commands for all envs') + .option("-s, --setup_mode [mode]", "Which setup mode to use") + .action(function(env, options){ + const mode = options.setup_mode || "normal"; + env = env || 'all'; + console.log('setup for %s env(s) with %s mode', env, mode); + }); + +program + .command('exec ') + .alias('ex') + .description('execute the given remote cmd') + .option("-e, --exec_mode ", "Which exec mode to use") + .action(function(cmd, options){ + console.log('exec "%s" using %s mode', cmd, options.exec_mode); + }).on('--help', function() { + console.log(''); + console.log('Examples:'); + console.log(''); + console.log(' $ deploy exec sequential'); + console.log(' $ deploy exec async'); + }); + +program + .command('*') + .action(function(env){ + console.log('deploying "%s"', env); + }); + +program.parse(process.argv); +``` + +More Demos can be found in the [examples](https://github.com/tj/commander.js/tree/master/examples) directory. + +## License + +[MIT](https://github.com/tj/commander.js/blob/master/LICENSE) + +## Support + +Commander 4.x is supported on Node 8 and above, and is likely to work with Node 6 but not tested. +(For versions of Node below Node 6, use Commander 3.x or 2.x.) + +The main forum for free and community support is the project [Issues](https://github.com/tj/commander.js/issues) on GitHub. + +### Commander for enterprise + +Available as part of the Tidelift Subscription + +The maintainers of Commander and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-commander?utm_source=npm-commander&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/commander/index.js b/node_modules/commander/index.js new file mode 100644 index 0000000..37d20cc --- /dev/null +++ b/node_modules/commander/index.js @@ -0,0 +1,1649 @@ +/** + * Module dependencies. + */ + +var EventEmitter = require('events').EventEmitter; +var spawn = require('child_process').spawn; +var path = require('path'); +var dirname = path.dirname; +var basename = path.basename; +var fs = require('fs'); + +/** + * Inherit `Command` from `EventEmitter.prototype`. + */ + +require('util').inherits(Command, EventEmitter); + +/** + * Expose the root command. + */ + +exports = module.exports = new Command(); + +/** + * Expose `Command`. + */ + +exports.Command = Command; + +/** + * Expose `Option`. + */ + +exports.Option = Option; + +/** + * Initialize a new `Option` with the given `flags` and `description`. + * + * @param {String} flags + * @param {String} description + * @api public + */ + +function Option(flags, description) { + this.flags = flags; + this.required = flags.indexOf('<') >= 0; // A value must be supplied when the option is specified. + this.optional = flags.indexOf('[') >= 0; // A value is optional when the option is specified. + this.mandatory = false; // The option must have a value after parsing, which usually means it must be specified on command line. + this.negate = flags.indexOf('-no-') !== -1; + flags = flags.split(/[ ,|]+/); + if (flags.length > 1 && !/^[[<]/.test(flags[1])) this.short = flags.shift(); + this.long = flags.shift(); + this.description = description || ''; +} + +/** + * Return option name. + * + * @return {String} + * @api private + */ + +Option.prototype.name = function() { + return this.long.replace(/^--/, ''); +}; + +/** + * Return option name, in a camelcase format that can be used + * as a object attribute key. + * + * @return {String} + * @api private + */ + +Option.prototype.attributeName = function() { + return camelcase(this.name().replace(/^no-/, '')); +}; + +/** + * Check if `arg` matches the short or long flag. + * + * @param {String} arg + * @return {Boolean} + * @api private + */ + +Option.prototype.is = function(arg) { + return this.short === arg || this.long === arg; +}; + +/** + * CommanderError class + * @class + */ +class CommanderError extends Error { + /** + * Constructs the CommanderError class + * @param {Number} exitCode suggested exit code which could be used with process.exit + * @param {String} code an id string representing the error + * @param {String} message human-readable description of the error + * @constructor + */ + constructor(exitCode, code, message) { + super(message); + // properly capture stack trace in Node.js + Error.captureStackTrace(this, this.constructor); + this.name = this.constructor.name; + this.code = code; + this.exitCode = exitCode; + } +} + +exports.CommanderError = CommanderError; + +/** + * Initialize a new `Command`. + * + * @param {String} [name] + * @api public + */ + +function Command(name) { + this.commands = []; + this.options = []; + this._execs = new Set(); + this._allowUnknownOption = false; + this._args = []; + this._name = name || ''; + this._optionValues = {}; + this._storeOptionsAsProperties = true; // backwards compatible by default + this._passCommandToAction = true; // backwards compatible by default + this._actionResults = []; + + this._helpFlags = '-h, --help'; + this._helpDescription = 'output usage information'; + this._helpShortFlag = '-h'; + this._helpLongFlag = '--help'; +} + +/** + * Define a command. + * + * There are two styles of command: pay attention to where to put the description. + * + * Examples: + * + * // Command implemented using action handler (description is supplied separately to `.command`) + * program + * .command('clone [destination]') + * .description('clone a repository into a newly created directory') + * .action((source, destination) => { + * console.log('clone command called'); + * }); + * + * // Command implemented using separate executable file (description is second parameter to `.command`) + * program + * .command('start ', 'start named service') + * .command('stop [service]', 'stop named service, or all if no name supplied'); + * + * @param {string} nameAndArgs - command name and arguments, args are `` or `[optional]` and last may also be `variadic...` + * @param {Object|string} [actionOptsOrExecDesc] - configuration options (for action), or description (for executable) + * @param {Object} [execOpts] - configuration options (for executable) + * @return {Command} returns new command for action handler, or top-level command for executable command + * @api public + */ + +Command.prototype.command = function(nameAndArgs, actionOptsOrExecDesc, execOpts) { + var desc = actionOptsOrExecDesc; + var opts = execOpts; + if (typeof desc === 'object' && desc !== null) { + opts = desc; + desc = null; + } + opts = opts || {}; + var args = nameAndArgs.split(/ +/); + var cmd = new Command(args.shift()); + + if (desc) { + cmd.description(desc); + this.executables = true; + this._execs.add(cmd._name); + if (opts.isDefault) this.defaultExecutable = cmd._name; + } + cmd._noHelp = !!opts.noHelp; + cmd._helpFlags = this._helpFlags; + cmd._helpDescription = this._helpDescription; + cmd._helpShortFlag = this._helpShortFlag; + cmd._helpLongFlag = this._helpLongFlag; + cmd._exitCallback = this._exitCallback; + cmd._storeOptionsAsProperties = this._storeOptionsAsProperties; + cmd._passCommandToAction = this._passCommandToAction; + + cmd._executableFile = opts.executableFile; // Custom name for executable file + this.commands.push(cmd); + cmd.parseExpectedArgs(args); + cmd.parent = this; + + if (desc) return this; + return cmd; +}; + +/** + * Define argument syntax for the top-level command. + * + * @api public + */ + +Command.prototype.arguments = function(desc) { + return this.parseExpectedArgs(desc.split(/ +/)); +}; + +/** + * Add an implicit `help [cmd]` subcommand + * which invokes `--help` for the given command. + * + * @api private + */ + +Command.prototype.addImplicitHelpCommand = function() { + this.command('help [cmd]', 'display help for [cmd]'); +}; + +/** + * Parse expected `args`. + * + * For example `["[type]"]` becomes `[{ required: false, name: 'type' }]`. + * + * @param {Array} args + * @return {Command} for chaining + * @api public + */ + +Command.prototype.parseExpectedArgs = function(args) { + if (!args.length) return; + var self = this; + args.forEach(function(arg) { + var argDetails = { + required: false, + name: '', + variadic: false + }; + + switch (arg[0]) { + case '<': + argDetails.required = true; + argDetails.name = arg.slice(1, -1); + break; + case '[': + argDetails.name = arg.slice(1, -1); + break; + } + + if (argDetails.name.length > 3 && argDetails.name.slice(-3) === '...') { + argDetails.variadic = true; + argDetails.name = argDetails.name.slice(0, -3); + } + if (argDetails.name) { + self._args.push(argDetails); + } + }); + return this; +}; + +/** + * Register callback to use as replacement for calling process.exit. + * + * @param {Function} [fn] optional callback which will be passed a CommanderError, defaults to throwing + * @return {Command} for chaining + * @api public + */ + +Command.prototype.exitOverride = function(fn) { + if (fn) { + this._exitCallback = fn; + } else { + this._exitCallback = function(err) { + if (err.code !== 'commander.executeSubCommandAsync') { + throw err; + } else { + // Async callback from spawn events, not useful to throw. + } + }; + } + return this; +}; + +/** + * Call process.exit, and _exitCallback if defined. + * + * @param {Number} exitCode exit code for using with process.exit + * @param {String} code an id string representing the error + * @param {String} message human-readable description of the error + * @return never + * @api private + */ + +Command.prototype._exit = function(exitCode, code, message) { + if (this._exitCallback) { + this._exitCallback(new CommanderError(exitCode, code, message)); + // Expecting this line is not reached. + } + process.exit(exitCode); +}; + +/** + * Register callback `fn` for the command. + * + * Examples: + * + * program + * .command('help') + * .description('display verbose help') + * .action(function() { + * // output help here + * }); + * + * @param {Function} fn + * @return {Command} for chaining + * @api public + */ + +Command.prototype.action = function(fn) { + var self = this; + var listener = function(args, unknown) { + // Parse any so-far unknown options + args = args || []; + unknown = unknown || []; + + var parsed = self.parseOptions(unknown); + + // Output help if necessary + outputHelpIfRequested(self, parsed.unknown); + self._checkForMissingMandatoryOptions(); + + // If there are still any unknown options, then we simply + // die, unless someone asked for help, in which case we give it + // to them, and then we die. + if (parsed.unknown.length > 0) { + self.unknownOption(parsed.unknown[0]); + } + + // Leftover arguments need to be pushed back. Fixes issue #56 + if (parsed.args.length) args = parsed.args.concat(args); + + self._args.forEach(function(arg, i) { + if (arg.required && args[i] == null) { + self.missingArgument(arg.name); + } else if (arg.variadic) { + if (i !== self._args.length - 1) { + self.variadicArgNotLast(arg.name); + } + + args[i] = args.splice(i); + } + }); + + // The .action callback takes an extra parameter which is the command itself. + var expectedArgsCount = self._args.length; + var actionArgs = args.slice(0, expectedArgsCount); + if (self._passCommandToAction) { + actionArgs[expectedArgsCount] = self; + } else { + actionArgs[expectedArgsCount] = self.opts(); + } + // Add the extra arguments so available too. + if (args.length > expectedArgsCount) { + actionArgs.push(args.slice(expectedArgsCount)); + } + + const actionResult = fn.apply(self, actionArgs); + // Remember result in case it is async. Assume parseAsync getting called on root. + let rootCommand = self; + while (rootCommand.parent) { + rootCommand = rootCommand.parent; + } + rootCommand._actionResults.push(actionResult); + }; + var parent = this.parent || this; + var name = parent === this ? '*' : this._name; + parent.on('command:' + name, listener); + if (this._alias) parent.on('command:' + this._alias, listener); + return this; +}; + +/** + * Internal implementation shared by .option() and .requiredOption() + * + * @param {Object} config + * @param {String} flags + * @param {String} description + * @param {Function|*} [fn] - custom option processing function or default vaue + * @param {*} [defaultValue] + * @return {Command} for chaining + * @api private + */ + +Command.prototype._optionEx = function(config, flags, description, fn, defaultValue) { + var self = this, + option = new Option(flags, description), + oname = option.name(), + name = option.attributeName(); + option.mandatory = !!config.mandatory; + + // default as 3rd arg + if (typeof fn !== 'function') { + if (fn instanceof RegExp) { + // This is a bit simplistic (especially no error messages), and probably better handled by caller using custom option processing. + // No longer documented in README, but still present for backwards compatibility. + var regex = fn; + fn = function(val, def) { + var m = regex.exec(val); + return m ? m[0] : def; + }; + } else { + defaultValue = fn; + fn = null; + } + } + + // preassign default value for --no-*, [optional], , or plain flag if boolean value + if (option.negate || option.optional || option.required || typeof defaultValue === 'boolean') { + // when --no-foo we make sure default is true, unless a --foo option is already defined + if (option.negate) { + const positiveLongFlag = option.long.replace(/^--no-/, '--'); + defaultValue = self.optionFor(positiveLongFlag) ? self._getOptionValue(name) : true; + } + // preassign only if we have a default + if (defaultValue !== undefined) { + self._setOptionValue(name, defaultValue); + option.defaultValue = defaultValue; + } + } + + // register the option + this.options.push(option); + + // when it's passed assign the value + // and conditionally invoke the callback + this.on('option:' + oname, function(val) { + // coercion + if (val !== null && fn) { + val = fn(val, self._getOptionValue(name) === undefined ? defaultValue : self._getOptionValue(name)); + } + + // unassigned or boolean value + if (typeof self._getOptionValue(name) === 'boolean' || typeof self._getOptionValue(name) === 'undefined') { + // if no value, negate false, and we have a default, then use it! + if (val == null) { + self._setOptionValue(name, option.negate + ? false + : defaultValue || true); + } else { + self._setOptionValue(name, val); + } + } else if (val !== null) { + // reassign + self._setOptionValue(name, option.negate ? false : val); + } + }); + + return this; +}; + +/** + * Define option with `flags`, `description` and optional + * coercion `fn`. + * + * The `flags` string should contain both the short and long flags, + * separated by comma, a pipe or space. The following are all valid + * all will output this way when `--help` is used. + * + * "-p, --pepper" + * "-p|--pepper" + * "-p --pepper" + * + * Examples: + * + * // simple boolean defaulting to undefined + * program.option('-p, --pepper', 'add pepper'); + * + * program.pepper + * // => undefined + * + * --pepper + * program.pepper + * // => true + * + * // simple boolean defaulting to true (unless non-negated option is also defined) + * program.option('-C, --no-cheese', 'remove cheese'); + * + * program.cheese + * // => true + * + * --no-cheese + * program.cheese + * // => false + * + * // required argument + * program.option('-C, --chdir ', 'change the working directory'); + * + * --chdir /tmp + * program.chdir + * // => "/tmp" + * + * // optional argument + * program.option('-c, --cheese [type]', 'add cheese [marble]'); + * + * @param {String} flags + * @param {String} description + * @param {Function|*} [fn] - custom option processing function or default vaue + * @param {*} [defaultValue] + * @return {Command} for chaining + * @api public + */ + +Command.prototype.option = function(flags, description, fn, defaultValue) { + return this._optionEx({}, flags, description, fn, defaultValue); +}; + +/* + * Add a required option which must have a value after parsing. This usually means + * the option must be specified on the command line. (Otherwise the same as .option().) + * + * The `flags` string should contain both the short and long flags, separated by comma, a pipe or space. + * + * @param {String} flags + * @param {String} description + * @param {Function|*} [fn] - custom option processing function or default vaue + * @param {*} [defaultValue] + * @return {Command} for chaining + * @api public + */ + +Command.prototype.requiredOption = function(flags, description, fn, defaultValue) { + return this._optionEx({ mandatory: true }, flags, description, fn, defaultValue); +}; + +/** + * Allow unknown options on the command line. + * + * @param {Boolean} arg if `true` or omitted, no error will be thrown + * for unknown options. + * @api public + */ +Command.prototype.allowUnknownOption = function(arg) { + this._allowUnknownOption = arguments.length === 0 || arg; + return this; +}; + +/** + * Whether to store option values as properties on command object, + * or store separately (specify false). In both cases the option values can be accessed using .opts(). + * + * @param {boolean} value + * @return {Command} Command for chaining + * @api public + */ + +Command.prototype.storeOptionsAsProperties = function(value) { + this._storeOptionsAsProperties = (value === undefined) || value; + if (this.options.length) { + // This is for programmer, not end user. + console.error('Commander usage error: call storeOptionsAsProperties before adding options'); + } + return this; +}; + +/** + * Whether to pass command to action handler, + * or just the options (specify false). + * + * @param {boolean} value + * @return {Command} Command for chaining + * @api public + */ + +Command.prototype.passCommandToAction = function(value) { + this._passCommandToAction = (value === undefined) || value; + return this; +}; + +/** + * Store option value + * + * @param {String} key + * @param {Object} value + * @api private + */ + +Command.prototype._setOptionValue = function(key, value) { + if (this._storeOptionsAsProperties) { + this[key] = value; + } else { + this._optionValues[key] = value; + } +}; + +/** + * Retrieve option value + * + * @param {String} key + * @return {Object} value + * @api private + */ + +Command.prototype._getOptionValue = function(key) { + if (this._storeOptionsAsProperties) { + return this[key]; + } + return this._optionValues[key]; +}; + +/** + * Parse `argv`, setting options and invoking commands when defined. + * + * @param {Array} argv + * @return {Command} for chaining + * @api public + */ + +Command.prototype.parse = function(argv) { + // implicit help + if (this.executables) this.addImplicitHelpCommand(); + + // store raw args + this.rawArgs = argv; + + // guess name + this._name = this._name || basename(argv[1], '.js'); + + // github-style sub-commands with no sub-command + if (this.executables && argv.length < 3 && !this.defaultExecutable) { + // this user needs help + argv.push(this._helpLongFlag); + } + + // process argv + var normalized = this.normalize(argv.slice(2)); + var parsed = this.parseOptions(normalized); + var args = this.args = parsed.args; + + var result = this.parseArgs(this.args, parsed.unknown); + + if (args[0] === 'help' && args.length === 1) this.help(); + + // Note for future: we could return early if we found an action handler in parseArgs, as none of following code needed? + + // --help + if (args[0] === 'help') { + args[0] = args[1]; + args[1] = this._helpLongFlag; + } else { + // If calling through to executable subcommand we could check for help flags before failing, + // but a somewhat unlikely case since program options not passed to executable subcommands. + // Wait for reports to see if check needed and what usage pattern is. + this._checkForMissingMandatoryOptions(); + } + + // executable sub-commands + // (Debugging note for future: args[0] is not right if an action has been called) + var name = result.args[0]; + var subCommand = null; + + // Look for subcommand + if (name) { + subCommand = this.commands.find(function(command) { + return command._name === name; + }); + } + + // Look for alias + if (!subCommand && name) { + subCommand = this.commands.find(function(command) { + return command.alias() === name; + }); + if (subCommand) { + name = subCommand._name; + args[0] = name; + } + } + + // Look for default subcommand + if (!subCommand && this.defaultExecutable) { + name = this.defaultExecutable; + args.unshift(name); + subCommand = this.commands.find(function(command) { + return command._name === name; + }); + } + + if (this._execs.has(name)) { + return this.executeSubCommand(argv, args, parsed.unknown, subCommand ? subCommand._executableFile : undefined); + } + + return result; +}; + +/** + * Parse `argv`, setting options and invoking commands when defined. + * + * Use parseAsync instead of parse if any of your action handlers are async. Returns a Promise. + * + * @param {Array} argv + * @return {Promise} + * @api public + */ +Command.prototype.parseAsync = function(argv) { + this.parse(argv); + return Promise.all(this._actionResults); +}; + +/** + * Execute a sub-command executable. + * + * @param {Array} argv + * @param {Array} args + * @param {Array} unknown + * @param {String} executableFile + * @api private + */ + +Command.prototype.executeSubCommand = function(argv, args, unknown, executableFile) { + args = args.concat(unknown); + + if (!args.length) this.help(); + + var isExplicitJS = false; // Whether to use node to launch "executable" + + // executable + var pm = argv[1]; + // name of the subcommand, like `pm-install` + var bin = basename(pm, path.extname(pm)) + '-' + args[0]; + if (executableFile != null) { + bin = executableFile; + // Check for same extensions as we scan for below so get consistent launch behaviour. + var executableExt = path.extname(executableFile); + isExplicitJS = executableExt === '.js' || executableExt === '.ts' || executableExt === '.mjs'; + } + + // In case of globally installed, get the base dir where executable + // subcommand file should be located at + var baseDir; + + var resolvedLink = fs.realpathSync(pm); + + baseDir = dirname(resolvedLink); + + // prefer local `./` to bin in the $PATH + var localBin = path.join(baseDir, bin); + + // whether bin file is a js script with explicit `.js` or `.ts` extension + if (exists(localBin + '.js')) { + bin = localBin + '.js'; + isExplicitJS = true; + } else if (exists(localBin + '.ts')) { + bin = localBin + '.ts'; + isExplicitJS = true; + } else if (exists(localBin + '.mjs')) { + bin = localBin + '.mjs'; + isExplicitJS = true; + } else if (exists(localBin)) { + bin = localBin; + } + + args = args.slice(1); + + var proc; + if (process.platform !== 'win32') { + if (isExplicitJS) { + args.unshift(bin); + // add executable arguments to spawn + args = incrementNodeInspectorPort(process.execArgv).concat(args); + + proc = spawn(process.argv[0], args, { stdio: 'inherit' }); + } else { + proc = spawn(bin, args, { stdio: 'inherit' }); + } + } else { + args.unshift(bin); + // add executable arguments to spawn + args = incrementNodeInspectorPort(process.execArgv).concat(args); + proc = spawn(process.execPath, args, { stdio: 'inherit' }); + } + + var signals = ['SIGUSR1', 'SIGUSR2', 'SIGTERM', 'SIGINT', 'SIGHUP']; + signals.forEach(function(signal) { + process.on(signal, function() { + if (proc.killed === false && proc.exitCode === null) { + proc.kill(signal); + } + }); + }); + + // By default terminate process when spawned process terminates. + // Suppressing the exit if exitCallback defined is a bit messy and of limited use, but does allow process to stay running! + const exitCallback = this._exitCallback; + if (!exitCallback) { + proc.on('close', process.exit.bind(process)); + } else { + proc.on('close', () => { + exitCallback(new CommanderError(process.exitCode || 0, 'commander.executeSubCommandAsync', '(close)')); + }); + } + proc.on('error', function(err) { + if (err.code === 'ENOENT') { + console.error('error: %s(1) does not exist, try --help', bin); + } else if (err.code === 'EACCES') { + console.error('error: %s(1) not executable. try chmod or run with root', bin); + } + if (!exitCallback) { + process.exit(1); + } else { + const wrappedError = new CommanderError(1, 'commander.executeSubCommandAsync', '(error)'); + wrappedError.nestedError = err; + exitCallback(wrappedError); + } + }); + + // Store the reference to the child process + this.runningCommand = proc; +}; + +/** + * Normalize `args`, splitting joined short flags. For example + * the arg "-abc" is equivalent to "-a -b -c". + * This also normalizes equal sign and splits "--abc=def" into "--abc def". + * + * @param {Array} args + * @return {Array} + * @api private + */ + +Command.prototype.normalize = function(args) { + var ret = [], + arg, + lastOpt, + index, + short, + opt; + + for (var i = 0, len = args.length; i < len; ++i) { + arg = args[i]; + if (i > 0) { + lastOpt = this.optionFor(args[i - 1]); + } + + if (arg === '--') { + // Honor option terminator + ret = ret.concat(args.slice(i)); + break; + } else if (lastOpt && lastOpt.required) { + ret.push(arg); + } else if (arg.length > 2 && arg[0] === '-' && arg[1] !== '-') { + short = arg.slice(0, 2); + opt = this.optionFor(short); + if (opt && (opt.required || opt.optional)) { + ret.push(short); + ret.push(arg.slice(2)); + } else { + arg.slice(1).split('').forEach(function(c) { + ret.push('-' + c); + }); + } + } else if (/^--/.test(arg) && ~(index = arg.indexOf('='))) { + ret.push(arg.slice(0, index), arg.slice(index + 1)); + } else { + ret.push(arg); + } + } + + return ret; +}; + +/** + * Parse command `args`. + * + * When listener(s) are available those + * callbacks are invoked, otherwise the "*" + * event is emitted and those actions are invoked. + * + * @param {Array} args + * @return {Command} for chaining + * @api private + */ + +Command.prototype.parseArgs = function(args, unknown) { + var name; + + if (args.length) { + name = args[0]; + if (this.listeners('command:' + name).length) { + this.emit('command:' + args.shift(), args, unknown); + } else { + this.emit('command:*', args, unknown); + } + } else { + outputHelpIfRequested(this, unknown); + + // If there were no args and we have unknown options, + // then they are extraneous and we need to error. + if (unknown.length > 0 && !this.defaultExecutable) { + this.unknownOption(unknown[0]); + } + if (this.commands.length === 0 && + this._args.filter(function(a) { return a.required; }).length === 0) { + this.emit('command:*'); + } + } + + return this; +}; + +/** + * Return an option matching `arg` if any. + * + * @param {String} arg + * @return {Option} + * @api private + */ + +Command.prototype.optionFor = function(arg) { + for (var i = 0, len = this.options.length; i < len; ++i) { + if (this.options[i].is(arg)) { + return this.options[i]; + } + } +}; + +/** + * Display an error message if a mandatory option does not have a value. + * + * @api private + */ + +Command.prototype._checkForMissingMandatoryOptions = function() { + // Walk up hierarchy so can call from action handler after checking for displaying help. + for (var cmd = this; cmd; cmd = cmd.parent) { + cmd.options.forEach((anOption) => { + if (anOption.mandatory && (cmd._getOptionValue(anOption.attributeName()) === undefined)) { + cmd.missingMandatoryOptionValue(anOption); + } + }); + } +}; + +/** + * Parse options from `argv` returning `argv` + * void of these options. + * + * @param {Array} argv + * @return {{args: Array, unknown: Array}} + * @api public + */ + +Command.prototype.parseOptions = function(argv) { + var args = [], + len = argv.length, + literal, + option, + arg; + + var unknownOptions = []; + + // parse options + for (var i = 0; i < len; ++i) { + arg = argv[i]; + + // literal args after -- + if (literal) { + args.push(arg); + continue; + } + + if (arg === '--') { + literal = true; + continue; + } + + // find matching Option + option = this.optionFor(arg); + + // option is defined + if (option) { + // requires arg + if (option.required) { + arg = argv[++i]; + if (arg == null) return this.optionMissingArgument(option); + this.emit('option:' + option.name(), arg); + // optional arg + } else if (option.optional) { + arg = argv[i + 1]; + if (arg == null || (arg[0] === '-' && arg !== '-')) { + arg = null; + } else { + ++i; + } + this.emit('option:' + option.name(), arg); + // flag + } else { + this.emit('option:' + option.name()); + } + continue; + } + + // looks like an option + if (arg.length > 1 && arg[0] === '-') { + unknownOptions.push(arg); + + // If the next argument looks like it might be + // an argument for this option, we pass it on. + // If it isn't, then it'll simply be ignored + if ((i + 1) < argv.length && (argv[i + 1][0] !== '-' || argv[i + 1] === '-')) { + unknownOptions.push(argv[++i]); + } + continue; + } + + // arg + args.push(arg); + } + + return { args: args, unknown: unknownOptions }; +}; + +/** + * Return an object containing options as key-value pairs + * + * @return {Object} + * @api public + */ +Command.prototype.opts = function() { + if (this._storeOptionsAsProperties) { + // Preserve original behaviour so backwards compatible when still using properties + var result = {}, + len = this.options.length; + + for (var i = 0; i < len; i++) { + var key = this.options[i].attributeName(); + result[key] = key === this._versionOptionName ? this._version : this[key]; + } + return result; + } + + return this._optionValues; +}; + +/** + * Argument `name` is missing. + * + * @param {String} name + * @api private + */ + +Command.prototype.missingArgument = function(name) { + const message = `error: missing required argument '${name}'`; + console.error(message); + this._exit(1, 'commander.missingArgument', message); +}; + +/** + * `Option` is missing an argument, but received `flag` or nothing. + * + * @param {Option} option + * @param {String} [flag] + * @api private + */ + +Command.prototype.optionMissingArgument = function(option, flag) { + let message; + if (flag) { + message = `error: option '${option.flags}' argument missing, got '${flag}'`; + } else { + message = `error: option '${option.flags}' argument missing`; + } + console.error(message); + this._exit(1, 'commander.optionMissingArgument', message); +}; + +/** + * `Option` does not have a value, and is a mandatory option. + * + * @param {Option} option + * @api private + */ + +Command.prototype.missingMandatoryOptionValue = function(option) { + const message = `error: required option '${option.flags}' not specified`; + console.error(message); + this._exit(1, 'commander.missingMandatoryOptionValue', message); +}; + +/** + * Unknown option `flag`. + * + * @param {String} flag + * @api private + */ + +Command.prototype.unknownOption = function(flag) { + if (this._allowUnknownOption) return; + const message = `error: unknown option '${flag}'`; + console.error(message); + this._exit(1, 'commander.unknownOption', message); +}; + +/** + * Variadic argument with `name` is not the last argument as required. + * + * @param {String} name + * @api private + */ + +Command.prototype.variadicArgNotLast = function(name) { + const message = `error: variadic arguments must be last '${name}'`; + console.error(message); + this._exit(1, 'commander.variadicArgNotLast', message); +}; + +/** + * Set the program version to `str`. + * + * This method auto-registers the "-V, --version" flag + * which will print the version number when passed. + * + * You can optionally supply the flags and description to override the defaults. + * + * @param {String} str + * @param {String} [flags] + * @param {String} [description] + * @return {Command} for chaining + * @api public + */ + +Command.prototype.version = function(str, flags, description) { + if (arguments.length === 0) return this._version; + this._version = str; + flags = flags || '-V, --version'; + description = description || 'output the version number'; + var versionOption = new Option(flags, description); + this._versionOptionName = versionOption.long.substr(2) || 'version'; + this.options.push(versionOption); + var self = this; + this.on('option:' + this._versionOptionName, function() { + process.stdout.write(str + '\n'); + self._exit(0, 'commander.version', str); + }); + return this; +}; + +/** + * Set the description to `str`. + * + * @param {String} str + * @param {Object} [argsDescription] + * @return {String|Command} + * @api public + */ + +Command.prototype.description = function(str, argsDescription) { + if (arguments.length === 0) return this._description; + this._description = str; + this._argsDescription = argsDescription; + return this; +}; + +/** + * Set an alias for the command + * + * @param {String} alias + * @return {String|Command} + * @api public + */ + +Command.prototype.alias = function(alias) { + var command = this; + if (this.commands.length !== 0) { + command = this.commands[this.commands.length - 1]; + } + + if (arguments.length === 0) return command._alias; + + if (alias === command._name) throw new Error('Command alias can\'t be the same as its name'); + + command._alias = alias; + return this; +}; + +/** + * Set / get the command usage `str`. + * + * @param {String} [str] + * @return {String|Command} + * @api public + */ + +Command.prototype.usage = function(str) { + var args = this._args.map(function(arg) { + return humanReadableArgName(arg); + }); + + var usage = '[options]' + + (this.commands.length ? ' [command]' : '') + + (this._args.length ? ' ' + args.join(' ') : ''); + + if (arguments.length === 0) return this._usage || usage; + this._usage = str; + + return this; +}; + +/** + * Get or set the name of the command + * + * @param {String} [str] + * @return {String|Command} + * @api public + */ + +Command.prototype.name = function(str) { + if (arguments.length === 0) return this._name; + this._name = str; + return this; +}; + +/** + * Return prepared commands. + * + * @return {Array} + * @api private + */ + +Command.prototype.prepareCommands = function() { + return this.commands.filter(function(cmd) { + return !cmd._noHelp; + }).map(function(cmd) { + var args = cmd._args.map(function(arg) { + return humanReadableArgName(arg); + }).join(' '); + + return [ + cmd._name + + (cmd._alias ? '|' + cmd._alias : '') + + (cmd.options.length ? ' [options]' : '') + + (args ? ' ' + args : ''), + cmd._description + ]; + }); +}; + +/** + * Return the largest command length. + * + * @return {Number} + * @api private + */ + +Command.prototype.largestCommandLength = function() { + var commands = this.prepareCommands(); + return commands.reduce(function(max, command) { + return Math.max(max, command[0].length); + }, 0); +}; + +/** + * Return the largest option length. + * + * @return {Number} + * @api private + */ + +Command.prototype.largestOptionLength = function() { + var options = [].slice.call(this.options); + options.push({ + flags: this._helpFlags + }); + + return options.reduce(function(max, option) { + return Math.max(max, option.flags.length); + }, 0); +}; + +/** + * Return the largest arg length. + * + * @return {Number} + * @api private + */ + +Command.prototype.largestArgLength = function() { + return this._args.reduce(function(max, arg) { + return Math.max(max, arg.name.length); + }, 0); +}; + +/** + * Return the pad width. + * + * @return {Number} + * @api private + */ + +Command.prototype.padWidth = function() { + var width = this.largestOptionLength(); + if (this._argsDescription && this._args.length) { + if (this.largestArgLength() > width) { + width = this.largestArgLength(); + } + } + + if (this.commands && this.commands.length) { + if (this.largestCommandLength() > width) { + width = this.largestCommandLength(); + } + } + + return width; +}; + +/** + * Return help for options. + * + * @return {String} + * @api private + */ + +Command.prototype.optionHelp = function() { + var width = this.padWidth(); + + var columns = process.stdout.columns || 80; + var descriptionWidth = columns - width - 4; + + // Append the help information + return this.options.map(function(option) { + const fullDesc = option.description + + ((!option.negate && option.defaultValue !== undefined) ? ' (default: ' + JSON.stringify(option.defaultValue) + ')' : ''); + return pad(option.flags, width) + ' ' + optionalWrap(fullDesc, descriptionWidth, width + 2); + }).concat([pad(this._helpFlags, width) + ' ' + optionalWrap(this._helpDescription, descriptionWidth, width + 2)]) + .join('\n'); +}; + +/** + * Return command help documentation. + * + * @return {String} + * @api private + */ + +Command.prototype.commandHelp = function() { + if (!this.commands.length) return ''; + + var commands = this.prepareCommands(); + var width = this.padWidth(); + + var columns = process.stdout.columns || 80; + var descriptionWidth = columns - width - 4; + + return [ + 'Commands:', + commands.map(function(cmd) { + var desc = cmd[1] ? ' ' + cmd[1] : ''; + return (desc ? pad(cmd[0], width) : cmd[0]) + optionalWrap(desc, descriptionWidth, width + 2); + }).join('\n').replace(/^/gm, ' '), + '' + ].join('\n'); +}; + +/** + * Return program help documentation. + * + * @return {String} + * @api private + */ + +Command.prototype.helpInformation = function() { + var desc = []; + if (this._description) { + desc = [ + this._description, + '' + ]; + + var argsDescription = this._argsDescription; + if (argsDescription && this._args.length) { + var width = this.padWidth(); + var columns = process.stdout.columns || 80; + var descriptionWidth = columns - width - 5; + desc.push('Arguments:'); + desc.push(''); + this._args.forEach(function(arg) { + desc.push(' ' + pad(arg.name, width) + ' ' + wrap(argsDescription[arg.name], descriptionWidth, width + 4)); + }); + desc.push(''); + } + } + + var cmdName = this._name; + if (this._alias) { + cmdName = cmdName + '|' + this._alias; + } + var parentCmdNames = ''; + for (var parentCmd = this.parent; parentCmd; parentCmd = parentCmd.parent) { + parentCmdNames = parentCmd.name() + ' ' + parentCmdNames; + } + var usage = [ + 'Usage: ' + parentCmdNames + cmdName + ' ' + this.usage(), + '' + ]; + + var cmds = []; + var commandHelp = this.commandHelp(); + if (commandHelp) cmds = [commandHelp]; + + var options = [ + 'Options:', + '' + this.optionHelp().replace(/^/gm, ' '), + '' + ]; + + return usage + .concat(desc) + .concat(options) + .concat(cmds) + .join('\n'); +}; + +/** + * Output help information for this command. + * + * When listener(s) are available for the helpLongFlag + * those callbacks are invoked. + * + * @api public + */ + +Command.prototype.outputHelp = function(cb) { + if (!cb) { + cb = function(passthru) { + return passthru; + }; + } + const cbOutput = cb(this.helpInformation()); + if (typeof cbOutput !== 'string' && !Buffer.isBuffer(cbOutput)) { + throw new Error('outputHelp callback must return a string or a Buffer'); + } + process.stdout.write(cbOutput); + this.emit(this._helpLongFlag); +}; + +/** + * You can pass in flags and a description to override the help + * flags and help description for your command. + * + * @param {String} [flags] + * @param {String} [description] + * @return {Command} + * @api public + */ + +Command.prototype.helpOption = function(flags, description) { + this._helpFlags = flags || this._helpFlags; + this._helpDescription = description || this._helpDescription; + + var splitFlags = this._helpFlags.split(/[ ,|]+/); + + if (splitFlags.length > 1) this._helpShortFlag = splitFlags.shift(); + + this._helpLongFlag = splitFlags.shift(); + + return this; +}; + +/** + * Output help information and exit. + * + * @param {Function} [cb] + * @api public + */ + +Command.prototype.help = function(cb) { + this.outputHelp(cb); + // exitCode: preserving original behaviour which was calling process.exit() + // message: do not have all displayed text available so only passing placeholder. + this._exit(process.exitCode || 0, 'commander.help', '(outputHelp)'); +}; + +/** + * Camel-case the given `flag` + * + * @param {String} flag + * @return {String} + * @api private + */ + +function camelcase(flag) { + return flag.split('-').reduce(function(str, word) { + return str + word[0].toUpperCase() + word.slice(1); + }); +} + +/** + * Pad `str` to `width`. + * + * @param {String} str + * @param {Number} width + * @return {String} + * @api private + */ + +function pad(str, width) { + var len = Math.max(0, width - str.length); + return str + Array(len + 1).join(' '); +} + +/** + * Wraps the given string with line breaks at the specified width while breaking + * words and indenting every but the first line on the left. + * + * @param {String} str + * @param {Number} width + * @param {Number} indent + * @return {String} + * @api private + */ +function wrap(str, width, indent) { + var regex = new RegExp('.{1,' + (width - 1) + '}([\\s\u200B]|$)|[^\\s\u200B]+?([\\s\u200B]|$)', 'g'); + var lines = str.match(regex) || []; + return lines.map(function(line, i) { + if (line.slice(-1) === '\n') { + line = line.slice(0, line.length - 1); + } + return ((i > 0 && indent) ? Array(indent + 1).join(' ') : '') + line.trimRight(); + }).join('\n'); +} + +/** + * Optionally wrap the given str to a max width of width characters per line + * while indenting with indent spaces. Do not wrap if insufficient width or + * string is manually formatted. + * + * @param {String} str + * @param {Number} width + * @param {Number} indent + * @return {String} + * @api private + */ +function optionalWrap(str, width, indent) { + // Detect manually wrapped and indented strings by searching for line breaks + // followed by multiple spaces/tabs. + if (str.match(/[\n]\s+/)) return str; + // Do not wrap to narrow columns (or can end up with a word per line). + const minWidth = 40; + if (width < minWidth) return str; + + return wrap(str, width, indent); +} + +/** + * Output help information if help flags specified + * + * @param {Command} cmd - command to output help for + * @param {Array} options - array of options to search for -h or --help + * @api private + */ + +function outputHelpIfRequested(cmd, options) { + options = options || []; + + for (var i = 0; i < options.length; i++) { + if (options[i] === cmd._helpLongFlag || options[i] === cmd._helpShortFlag) { + cmd.outputHelp(); + // (Do not have all displayed text available so only passing placeholder.) + cmd._exit(0, 'commander.helpDisplayed', '(outputHelp)'); + } + } +} + +/** + * Takes an argument and returns its human readable equivalent for help usage. + * + * @param {Object} arg + * @return {String} + * @api private + */ + +function humanReadableArgName(arg) { + var nameOutput = arg.name + (arg.variadic === true ? '...' : ''); + + return arg.required + ? '<' + nameOutput + '>' + : '[' + nameOutput + ']'; +} + +// for versions before node v0.8 when there weren't `fs.existsSync` +function exists(file) { + try { + if (fs.statSync(file).isFile()) { + return true; + } + } catch (e) { + return false; + } +} + +/** + * Scan arguments and increment port number for inspect calls (to avoid conflicts when spawning new command). + * + * @param {string[]} args - array of arguments from node.execArgv + * @returns {string[]} + * @api private + */ + +function incrementNodeInspectorPort(args) { + // Testing for these options: + // --inspect[=[host:]port] + // --inspect-brk[=[host:]port] + // --inspect-port=[host:]port + return args.map((arg) => { + var result = arg; + if (arg.indexOf('--inspect') === 0) { + var debugOption; + var debugHost = '127.0.0.1'; + var debugPort = '9229'; + var match; + if ((match = arg.match(/^(--inspect(-brk)?)$/)) !== null) { + // e.g. --inspect + debugOption = match[1]; + } else if ((match = arg.match(/^(--inspect(-brk|-port)?)=([^:]+)$/)) !== null) { + debugOption = match[1]; + if (/^\d+$/.test(match[3])) { + // e.g. --inspect=1234 + debugPort = match[3]; + } else { + // e.g. --inspect=localhost + debugHost = match[3]; + } + } else if ((match = arg.match(/^(--inspect(-brk|-port)?)=([^:]+):(\d+)$/)) !== null) { + // e.g. --inspect=localhost:1234 + debugOption = match[1]; + debugHost = match[3]; + debugPort = match[4]; + } + + if (debugOption && debugPort !== '0') { + result = `${debugOption}=${debugHost}:${parseInt(debugPort) + 1}`; + } + } + return result; + }); +} diff --git a/node_modules/commander/package.json b/node_modules/commander/package.json new file mode 100644 index 0000000..e4781e5 --- /dev/null +++ b/node_modules/commander/package.json @@ -0,0 +1,41 @@ +{ + "name": "commander", + "version": "4.1.1", + "description": "the complete solution for node.js command-line programs", + "keywords": [ + "commander", + "command", + "option", + "parser" + ], + "author": "TJ Holowaychuk ", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/tj/commander.js.git" + }, + "scripts": { + "lint": "eslint index.js \"tests/**/*.js\"", + "test": "jest && npm run test-typings", + "test-typings": "tsc -p tsconfig.json" + }, + "main": "index", + "files": [ + "index.js", + "typings/index.d.ts" + ], + "dependencies": {}, + "devDependencies": { + "@types/jest": "^24.0.23", + "@types/node": "^12.12.11", + "eslint": "^6.7.0", + "eslint-plugin-jest": "^22.21.0", + "jest": "^24.8.0", + "standard": "^14.3.1", + "typescript": "^3.7.2" + }, + "typings": "typings/index.d.ts", + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/commander/typings/index.d.ts b/node_modules/commander/typings/index.d.ts new file mode 100644 index 0000000..082a3a3 --- /dev/null +++ b/node_modules/commander/typings/index.d.ts @@ -0,0 +1,311 @@ +// Type definitions for commander +// Original definitions by: Alan Agius , Marcelo Dezem , vvakame , Jules Randolph + +/// + +declare namespace commander { + + interface CommanderError extends Error { + code: string; + exitCode: number; + message: string; + nestedError?: string; + } + type CommanderErrorConstructor = { new (exitCode: number, code: string, message: string): CommanderError }; + + interface Option { + flags: string; + required: boolean; // A value must be supplied when the option is specified. + optional: boolean; // A value is optional when the option is specified. + mandatory: boolean; // The option must have a value after parsing, which usually means it must be specified on command line. + bool: boolean; + short?: string; + long: string; + description: string; + } + type OptionConstructor = { new (flags: string, description?: string): Option }; + + interface Command extends NodeJS.EventEmitter { + [key: string]: any; // options as properties + + args: string[]; + + /** + * Set the program version to `str`. + * + * This method auto-registers the "-V, --version" flag + * which will print the version number when passed. + * + * You can optionally supply the flags and description to override the defaults. + */ + version(str: string, flags?: string, description?: string): Command; + + /** + * Define a command, implemented using an action handler. + * + * @remarks + * The command description is supplied using `.description`, not as a parameter to `.command`. + * + * @example + * ```ts + * program + * .command('clone [destination]') + * .description('clone a repository into a newly created directory') + * .action((source, destination) => { + * console.log('clone command called'); + * }); + * ``` + * + * @param nameAndArgs - command name and arguments, args are `` or `[optional]` and last may also be `variadic...` + * @param opts - configuration options + * @returns new command + */ + command(nameAndArgs: string, opts?: CommandOptions): Command; + /** + * Define a command, implemented in a separate executable file. + * + * @remarks + * The command description is supplied as the second parameter to `.command`. + * + * @example + * ```ts + * program + * .command('start ', 'start named service') + * .command('stop [service]', 'stop named serice, or all if no name supplied'); + * ``` + * + * @param nameAndArgs - command name and arguments, args are `` or `[optional]` and last may also be `variadic...` + * @param description - description of executable command + * @param opts - configuration options + * @returns top level command for chaining more command definitions + */ + command(nameAndArgs: string, description: string, opts?: commander.CommandOptions): Command; + + /** + * Define argument syntax for the top-level command. + * + * @returns Command for chaining + */ + arguments(desc: string): Command; + + /** + * Parse expected `args`. + * + * For example `["[type]"]` becomes `[{ required: false, name: 'type' }]`. + * + * @returns Command for chaining + */ + parseExpectedArgs(args: string[]): Command; + + /** + * Register callback to use as replacement for calling process.exit. + */ + exitOverride(callback?: (err: CommanderError) => never|void): Command; + + /** + * Register callback `fn` for the command. + * + * @example + * program + * .command('help') + * .description('display verbose help') + * .action(function() { + * // output help here + * }); + * + * @returns Command for chaining + */ + action(fn: (...args: any[]) => void | Promise): Command; + + /** + * Define option with `flags`, `description` and optional + * coercion `fn`. + * + * The `flags` string should contain both the short and long flags, + * separated by comma, a pipe or space. The following are all valid + * all will output this way when `--help` is used. + * + * "-p, --pepper" + * "-p|--pepper" + * "-p --pepper" + * + * @example + * // simple boolean defaulting to false + * program.option('-p, --pepper', 'add pepper'); + * + * --pepper + * program.pepper + * // => Boolean + * + * // simple boolean defaulting to true + * program.option('-C, --no-cheese', 'remove cheese'); + * + * program.cheese + * // => true + * + * --no-cheese + * program.cheese + * // => false + * + * // required argument + * program.option('-C, --chdir ', 'change the working directory'); + * + * --chdir /tmp + * program.chdir + * // => "/tmp" + * + * // optional argument + * program.option('-c, --cheese [type]', 'add cheese [marble]'); + * + * @returns Command for chaining + */ + option(flags: string, description?: string, fn?: ((arg1: any, arg2: any) => void) | RegExp, defaultValue?: any): Command; + option(flags: string, description?: string, defaultValue?: any): Command; + + /** + * Define a required option, which must have a value after parsing. This usually means + * the option must be specified on the command line. (Otherwise the same as .option().) + * + * The `flags` string should contain both the short and long flags, separated by comma, a pipe or space. + */ + requiredOption(flags: string, description?: string, fn?: ((arg1: any, arg2: any) => void) | RegExp, defaultValue?: any): Command; + requiredOption(flags: string, description?: string, defaultValue?: any): Command; + + + /** + * Whether to store option values as properties on command object, + * or store separately (specify false). In both cases the option values can be accessed using .opts(). + * + * @return Command for chaining + */ + storeOptionsAsProperties(value?: boolean): Command; + + /** + * Whether to pass command to action handler, + * or just the options (specify false). + * + * @return Command for chaining + */ + passCommandToAction(value?: boolean): Command; + + /** + * Allow unknown options on the command line. + * + * @param [arg] if `true` or omitted, no error will be thrown for unknown options. + * @returns Command for chaining + */ + allowUnknownOption(arg?: boolean): Command; + + /** + * Parse `argv`, setting options and invoking commands when defined. + * + * @returns Command for chaining + */ + parse(argv: string[]): Command; + + /** + * Parse `argv`, setting options and invoking commands when defined. + * + * Use parseAsync instead of parse if any of your action handlers are async. Returns a Promise. + * + * @returns Promise + */ + parseAsync(argv: string[]): Promise; + + /** + * Parse options from `argv` returning `argv` void of these options. + */ + parseOptions(argv: string[]): commander.ParseOptionsResult; + + /** + * Return an object containing options as key-value pairs + */ + opts(): { [key: string]: any }; + + /** + * Set the description. + * + * @returns Command for chaining + */ + description(str: string, argsDescription?: {[argName: string]: string}): Command; + /** + * Get the description. + */ + description(): string; + + /** + * Set an alias for the command. + * + * @returns Command for chaining + */ + alias(alias: string): Command; + /** + * Get alias for the command. + */ + alias(): string; + + /** + * Set the command usage. + * + * @returns Command for chaining + */ + usage(str: string): Command; + /** + * Get the command usage. + */ + usage(): string; + + /** + * Set the name of the command. + * + * @returns Command for chaining + */ + name(str: string): Command; + /** + * Get the name of the command. + */ + name(): string; + + /** + * Output help information for this command. + * + * When listener(s) are available for the helpLongFlag + * those callbacks are invoked. + */ + outputHelp(cb?: (str: string) => string): void; + + /** + * You can pass in flags and a description to override the help + * flags and help description for your command. + */ + helpOption(flags?: string, description?: string): Command; + + /** + * Output help information and exit. + */ + help(cb?: (str: string) => string): never; + } + type CommandConstructor = { new (name?: string): Command }; + + + interface CommandOptions { + noHelp?: boolean; + isDefault?: boolean; + executableFile?: string; + } + + interface ParseOptionsResult { + args: string[]; + unknown: string[]; + } + + interface CommanderStatic extends Command { + Command: CommandConstructor; + Option: OptionConstructor; + CommanderError:CommanderErrorConstructor; + } + +} + +declare const commander: commander.CommanderStatic; +export = commander; diff --git a/node_modules/cssesc/LICENSE-MIT.txt b/node_modules/cssesc/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/cssesc/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/cssesc/README.md b/node_modules/cssesc/README.md new file mode 100644 index 0000000..58fb8fe --- /dev/null +++ b/node_modules/cssesc/README.md @@ -0,0 +1,201 @@ +# cssesc [![Build status](https://travis-ci.org/mathiasbynens/cssesc.svg?branch=master)](https://travis-ci.org/mathiasbynens/cssesc) [![Code coverage status](https://img.shields.io/codecov/c/github/mathiasbynens/cssesc.svg)](https://codecov.io/gh/mathiasbynens/cssesc) + +A JavaScript library for escaping CSS strings and identifiers while generating the shortest possible ASCII-only output. + +This is a JavaScript library for [escaping text for use in CSS strings or identifiers](https://mathiasbynens.be/notes/css-escapes) while generating the shortest possible valid ASCII-only output. [Here’s an online demo.](https://mothereff.in/css-escapes) + +[A polyfill for the CSSOM `CSS.escape()` method is available in a separate repository.](https://mths.be/cssescape) (In comparison, _cssesc_ is much more powerful.) + +Feel free to fork if you see possible improvements! + +## Installation + +Via [npm](https://www.npmjs.com/): + +```bash +npm install cssesc +``` + +In a browser: + +```html + +``` + +In [Node.js](https://nodejs.org/): + +```js +const cssesc = require('cssesc'); +``` + +In Ruby using [the `ruby-cssesc` wrapper gem](https://github.com/borodean/ruby-cssesc): + +```bash +gem install ruby-cssesc +``` + +```ruby +require 'ruby-cssesc' +CSSEsc.escape('I ♥ Ruby', is_identifier: true) +``` + +In Sass using [`sassy-escape`](https://github.com/borodean/sassy-escape): + +```bash +gem install sassy-escape +``` + +```scss +body { + content: escape('I ♥ Sass', $is-identifier: true); +} +``` + +## API + +### `cssesc(value, options)` + +This function takes a value and returns an escaped version of the value where any characters that are not printable ASCII symbols are escaped using the shortest possible (but valid) [escape sequences for use in CSS strings or identifiers](https://mathiasbynens.be/notes/css-escapes). + +```js +cssesc('Ich ♥ Bücher'); +// → 'Ich \\2665 B\\FC cher' + +cssesc('foo 𝌆 bar'); +// → 'foo \\1D306 bar' +``` + +By default, `cssesc` returns a string that can be used as part of a CSS string. If the target is a CSS identifier rather than a CSS string, use the `isIdentifier: true` setting (see below). + +The optional `options` argument accepts an object with the following options: + +#### `isIdentifier` + +The default value for the `isIdentifier` option is `false`. This means that the input text will be escaped for use in a CSS string literal. If you want to use the result as a CSS identifier instead (in a selector, for example), set this option to `true`. + +```js +cssesc('123a2b'); +// → '123a2b' + +cssesc('123a2b', { + 'isIdentifier': true +}); +// → '\\31 23a2b' +``` + +#### `quotes` + +The default value for the `quotes` option is `'single'`. This means that any occurences of `'` in the input text will be escaped as `\'`, so that the output can be used in a CSS string literal wrapped in single quotes. + +```js +cssesc('Lorem ipsum "dolor" sit \'amet\' etc.'); +// → 'Lorem ipsum "dolor" sit \\\'amet\\\' etc.' +// → "Lorem ipsum \"dolor\" sit \\'amet\\' etc." + +cssesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'single' +}); +// → 'Lorem ipsum "dolor" sit \\\'amet\\\' etc.' +// → "Lorem ipsum \"dolor\" sit \\'amet\\' etc." +``` + +If you want to use the output as part of a CSS string literal wrapped in double quotes, set the `quotes` option to `'double'`. + +```js +cssesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'double' +}); +// → 'Lorem ipsum \\"dolor\\" sit \'amet\' etc.' +// → "Lorem ipsum \\\"dolor\\\" sit 'amet' etc." +``` + +#### `wrap` + +The `wrap` option takes a boolean value (`true` or `false`), and defaults to `false` (disabled). When enabled, the output will be a valid CSS string literal wrapped in quotes. The type of quotes can be specified through the `quotes` setting. + +```js +cssesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'single', + 'wrap': true +}); +// → '\'Lorem ipsum "dolor" sit \\\'amet\\\' etc.\'' +// → "\'Lorem ipsum \"dolor\" sit \\\'amet\\\' etc.\'" + +cssesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'double', + 'wrap': true +}); +// → '"Lorem ipsum \\"dolor\\" sit \'amet\' etc."' +// → "\"Lorem ipsum \\\"dolor\\\" sit \'amet\' etc.\"" +``` + +#### `escapeEverything` + +The `escapeEverything` option takes a boolean value (`true` or `false`), and defaults to `false` (disabled). When enabled, all the symbols in the output will be escaped, even printable ASCII symbols. + +```js +cssesc('lolwat"foo\'bar', { + 'escapeEverything': true +}); +// → '\\6C\\6F\\6C\\77\\61\\74\\"\\66\\6F\\6F\\\'\\62\\61\\72' +// → "\\6C\\6F\\6C\\77\\61\\74\\\"\\66\\6F\\6F\\'\\62\\61\\72" +``` + +#### Overriding the default options globally + +The global default settings can be overridden by modifying the `css.options` object. This saves you from passing in an `options` object for every call to `encode` if you want to use the non-default setting. + +```js +// Read the global default setting for `escapeEverything`: +cssesc.options.escapeEverything; +// → `false` by default + +// Override the global default setting for `escapeEverything`: +cssesc.options.escapeEverything = true; + +// Using the global default setting for `escapeEverything`, which is now `true`: +cssesc('foo © bar ≠ baz 𝌆 qux'); +// → '\\66\\6F\\6F\\ \\A9\\ \\62\\61\\72\\ \\2260\\ \\62\\61\\7A\\ \\1D306\\ \\71\\75\\78' +``` + +### `cssesc.version` + +A string representing the semantic version number. + +### Using the `cssesc` binary + +To use the `cssesc` binary in your shell, simply install cssesc globally using npm: + +```bash +npm install -g cssesc +``` + +After that you will be able to escape text for use in CSS strings or identifiers from the command line: + +```bash +$ cssesc 'föo ♥ bår 𝌆 baz' +f\F6o \2665 b\E5r \1D306 baz +``` + +If the output needs to be a CSS identifier rather than part of a string literal, use the `-i`/`--identifier` option: + +```bash +$ cssesc --identifier 'föo ♥ bår 𝌆 baz' +f\F6o\ \2665\ b\E5r\ \1D306\ baz +``` + +See `cssesc --help` for the full list of options. + +## Support + +This library supports the Node.js and browser versions mentioned in [`.babelrc`](https://github.com/mathiasbynens/cssesc/blob/master/.babelrc). For a version that supports a wider variety of legacy browsers and environments out-of-the-box, [see v0.1.0](https://github.com/mathiasbynens/cssesc/releases/tag/v0.1.0). + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## License + +This library is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/cssesc/bin/cssesc b/node_modules/cssesc/bin/cssesc new file mode 100755 index 0000000..188c034 --- /dev/null +++ b/node_modules/cssesc/bin/cssesc @@ -0,0 +1,116 @@ +#!/usr/bin/env node +const fs = require('fs'); +const cssesc = require('../cssesc.js'); +const strings = process.argv.splice(2); +const stdin = process.stdin; +const options = {}; +const log = console.log; + +const main = function() { + const option = strings[0]; + + if (/^(?:-h|--help|undefined)$/.test(option)) { + log( + 'cssesc v%s - https://mths.be/cssesc', + cssesc.version + ); + log([ + '\nUsage:\n', + '\tcssesc [string]', + '\tcssesc [-i | --identifier] [string]', + '\tcssesc [-s | --single-quotes] [string]', + '\tcssesc [-d | --double-quotes] [string]', + '\tcssesc [-w | --wrap] [string]', + '\tcssesc [-e | --escape-everything] [string]', + '\tcssesc [-v | --version]', + '\tcssesc [-h | --help]', + '\nExamples:\n', + '\tcssesc \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tcssesc --identifier \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tcssesc --escape-everything \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tcssesc --double-quotes --wrap \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\techo \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\' | cssesc' + ].join('\n')); + return process.exit(1); + } + + if (/^(?:-v|--version)$/.test(option)) { + log('v%s', cssesc.version); + return process.exit(1); + } + + strings.forEach(function(string) { + // Process options + if (/^(?:-i|--identifier)$/.test(string)) { + options.isIdentifier = true; + return; + } + if (/^(?:-s|--single-quotes)$/.test(string)) { + options.quotes = 'single'; + return; + } + if (/^(?:-d|--double-quotes)$/.test(string)) { + options.quotes = 'double'; + return; + } + if (/^(?:-w|--wrap)$/.test(string)) { + options.wrap = true; + return; + } + if (/^(?:-e|--escape-everything)$/.test(string)) { + options.escapeEverything = true; + return; + } + + // Process string(s) + let result; + try { + result = cssesc(string, options); + log(result); + } catch (exception) { + log(exception.message + '\n'); + log('Error: failed to escape.'); + log('If you think this is a bug in cssesc, please report it:'); + log('https://github.com/mathiasbynens/cssesc/issues/new'); + log( + '\nStack trace using cssesc@%s:\n', + cssesc.version + ); + log(exception.stack); + return process.exit(1); + } + }); + // Return with exit status 0 outside of the `forEach` loop, in case + // multiple strings were passed in. + return process.exit(0); + +}; + +if (stdin.isTTY) { + // handle shell arguments + main(); +} else { + let timeout; + // Either the script is called from within a non-TTY context, or `stdin` + // content is being piped in. + if (!process.stdout.isTTY) { + // The script was called from a non-TTY context. This is a rather uncommon + // use case we don’t actively support. However, we don’t want the script + // to wait forever in such cases, so… + timeout = setTimeout(function() { + // …if no piped data arrived after a whole minute, handle shell + // arguments instead. + main(); + }, 60000); + } + let data = ''; + stdin.on('data', function(chunk) { + clearTimeout(timeout); + data += chunk; + }); + stdin.on('end', function() { + strings.push(data.trim()); + main(); + }); + stdin.resume(); +} diff --git a/node_modules/cssesc/cssesc.js b/node_modules/cssesc/cssesc.js new file mode 100644 index 0000000..1c0928e --- /dev/null +++ b/node_modules/cssesc/cssesc.js @@ -0,0 +1,110 @@ +/*! https://mths.be/cssesc v3.0.0 by @mathias */ +'use strict'; + +var object = {}; +var hasOwnProperty = object.hasOwnProperty; +var merge = function merge(options, defaults) { + if (!options) { + return defaults; + } + var result = {}; + for (var key in defaults) { + // `if (defaults.hasOwnProperty(key) { … }` is not needed here, since + // only recognized option names are used. + result[key] = hasOwnProperty.call(options, key) ? options[key] : defaults[key]; + } + return result; +}; + +var regexAnySingleEscape = /[ -,\.\/:-@\[-\^`\{-~]/; +var regexSingleEscape = /[ -,\.\/:-@\[\]\^`\{-~]/; +var regexAlwaysEscape = /['"\\]/; +var regexExcessiveSpaces = /(^|\\+)?(\\[A-F0-9]{1,6})\x20(?![a-fA-F0-9\x20])/g; + +// https://mathiasbynens.be/notes/css-escapes#css +var cssesc = function cssesc(string, options) { + options = merge(options, cssesc.options); + if (options.quotes != 'single' && options.quotes != 'double') { + options.quotes = 'single'; + } + var quote = options.quotes == 'double' ? '"' : '\''; + var isIdentifier = options.isIdentifier; + + var firstChar = string.charAt(0); + var output = ''; + var counter = 0; + var length = string.length; + while (counter < length) { + var character = string.charAt(counter++); + var codePoint = character.charCodeAt(); + var value = void 0; + // If it’s not a printable ASCII character… + if (codePoint < 0x20 || codePoint > 0x7E) { + if (codePoint >= 0xD800 && codePoint <= 0xDBFF && counter < length) { + // It’s a high surrogate, and there is a next character. + var extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { + // next character is low surrogate + codePoint = ((codePoint & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000; + } else { + // It’s an unmatched surrogate; only append this code unit, in case + // the next code unit is the high surrogate of a surrogate pair. + counter--; + } + } + value = '\\' + codePoint.toString(16).toUpperCase() + ' '; + } else { + if (options.escapeEverything) { + if (regexAnySingleEscape.test(character)) { + value = '\\' + character; + } else { + value = '\\' + codePoint.toString(16).toUpperCase() + ' '; + } + } else if (/[\t\n\f\r\x0B]/.test(character)) { + value = '\\' + codePoint.toString(16).toUpperCase() + ' '; + } else if (character == '\\' || !isIdentifier && (character == '"' && quote == character || character == '\'' && quote == character) || isIdentifier && regexSingleEscape.test(character)) { + value = '\\' + character; + } else { + value = character; + } + } + output += value; + } + + if (isIdentifier) { + if (/^-[-\d]/.test(output)) { + output = '\\-' + output.slice(1); + } else if (/\d/.test(firstChar)) { + output = '\\3' + firstChar + ' ' + output.slice(1); + } + } + + // Remove spaces after `\HEX` escapes that are not followed by a hex digit, + // since they’re redundant. Note that this is only possible if the escape + // sequence isn’t preceded by an odd number of backslashes. + output = output.replace(regexExcessiveSpaces, function ($0, $1, $2) { + if ($1 && $1.length % 2) { + // It’s not safe to remove the space, so don’t. + return $0; + } + // Strip the space. + return ($1 || '') + $2; + }); + + if (!isIdentifier && options.wrap) { + return quote + output + quote; + } + return output; +}; + +// Expose default options (so they can be overridden globally). +cssesc.options = { + 'escapeEverything': false, + 'isIdentifier': false, + 'quotes': 'single', + 'wrap': false +}; + +cssesc.version = '3.0.0'; + +module.exports = cssesc; diff --git a/node_modules/cssesc/man/cssesc.1 b/node_modules/cssesc/man/cssesc.1 new file mode 100644 index 0000000..eee4996 --- /dev/null +++ b/node_modules/cssesc/man/cssesc.1 @@ -0,0 +1,70 @@ +.Dd August 9, 2013 +.Dt cssesc 1 +.Sh NAME +.Nm cssesc +.Nd escape text for use in CSS string literals or identifiers +.Sh SYNOPSIS +.Nm +.Op Fl i | -identifier Ar string +.br +.Op Fl s | -single-quotes Ar string +.br +.Op Fl d | -double-quotes Ar string +.br +.Op Fl w | -wrap Ar string +.br +.Op Fl e | -escape-everything Ar string +.br +.Op Fl v | -version +.br +.Op Fl h | -help +.Sh DESCRIPTION +.Nm +escapes strings for use in CSS string literals or identifiers while generating the shortest possible valid ASCII-only output. +.Sh OPTIONS +.Bl -ohang -offset +.It Sy "-s, --single-quotes" +Escape any occurences of ' in the input string as \\', so that the output can be used in a CSS string literal wrapped in single quotes. +.It Sy "-d, --double-quotes" +Escape any occurences of " in the input string as \\", so that the output can be used in a CSS string literal wrapped in double quotes. +.It Sy "-w, --wrap" +Make sure the output is a valid CSS string literal wrapped in quotes. The type of quotes can be specified using the +.Ar -s | --single-quotes +or +.Ar -d | --double-quotes +settings. +.It Sy "-e, --escape-everything" +Escape all the symbols in the output, even printable ASCII symbols. +.It Sy "-v, --version" +Print cssesc's version. +.It Sy "-h, --help" +Show the help screen. +.El +.Sh EXIT STATUS +The +.Nm cssesc +utility exits with one of the following values: +.Pp +.Bl -tag -width flag -compact +.It Li 0 +.Nm +successfully escaped the given text and printed the result. +.It Li 1 +.Nm +wasn't instructed to escape anything (for example, the +.Ar --help +flag was set); or, an error occurred. +.El +.Sh EXAMPLES +.Bl -ohang -offset +.It Sy "cssesc 'foo bar baz'" +Print an escaped version of the given text. +.It Sy echo\ 'foo bar baz'\ |\ cssesc +Print an escaped version of the text that gets piped in. +.El +.Sh BUGS +cssesc's bug tracker is located at . +.Sh AUTHOR +Mathias Bynens +.Sh WWW + diff --git a/node_modules/cssesc/package.json b/node_modules/cssesc/package.json new file mode 100644 index 0000000..076c84d --- /dev/null +++ b/node_modules/cssesc/package.json @@ -0,0 +1,51 @@ +{ + "name": "cssesc", + "version": "3.0.0", + "description": "A JavaScript library for escaping CSS strings and identifiers while generating the shortest possible ASCII-only output.", + "homepage": "https://mths.be/cssesc", + "engines": { + "node": ">=4" + }, + "main": "cssesc.js", + "bin": "bin/cssesc", + "man": "man/cssesc.1", + "keywords": [ + "css", + "escape", + "identifier", + "string", + "tool" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/cssesc.git" + }, + "bugs": "https://github.com/mathiasbynens/cssesc/issues", + "files": [ + "LICENSE-MIT.txt", + "cssesc.js", + "bin/", + "man/" + ], + "scripts": { + "build": "grunt template && babel cssesc.js -o cssesc.js", + "test": "mocha tests", + "cover": "istanbul cover --report html node_modules/.bin/_mocha tests -- -u exports -R spec" + }, + "devDependencies": { + "babel-cli": "^6.26.0", + "babel-preset-env": "^1.6.1", + "codecov": "^1.0.1", + "grunt": "^1.0.1", + "grunt-template": "^1.0.0", + "istanbul": "^0.4.4", + "mocha": "^2.5.3", + "regenerate": "^1.2.1", + "requirejs": "^2.1.16" + } +} diff --git a/node_modules/didyoumean/LICENSE b/node_modules/didyoumean/LICENSE new file mode 100644 index 0000000..32c23db --- /dev/null +++ b/node_modules/didyoumean/LICENSE @@ -0,0 +1,14 @@ +## License + +didYouMean.js copyright (c) 2013 Dave Porter. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License +[here](http://www.apache.org/licenses/LICENSE-2.0). + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/didyoumean/README.md b/node_modules/didyoumean/README.md new file mode 100644 index 0000000..cd16698 --- /dev/null +++ b/node_modules/didyoumean/README.md @@ -0,0 +1,134 @@ +didYouMean.js - A simple JavaScript matching engine +=================================================== + +[Available on GitHub](https://github.com/dcporter/didyoumean.js). + +A super-simple, highly optimized JS library for matching human-quality input to a list of potential +matches. You can use it to suggest a misspelled command-line utility option to a user, or to offer +links to nearby valid URLs on your 404 page. (The examples below are taken from a personal project, +my [HTML5 business card](http://dcporter.aws.af.cm/me), which uses didYouMean.js to suggest correct +URLs from misspelled ones, such as [dcporter.aws.af.cm/me/instagarm](http://dcporter.aws.af.cm/me/instagarm).) +Uses the [Levenshtein distance algorithm](https://en.wikipedia.org/wiki/Levenshtein_distance). + +didYouMean.js works in the browser as well as in node.js. To install it for use in node: + +``` +npm install didyoumean +``` + + +Examples +-------- + +Matching against a list of strings: +``` +var input = 'insargrm' +var list = ['facebook', 'twitter', 'instagram', 'linkedin']; +console.log(didYouMean(input, list)); +> 'instagram' +// The method matches 'insargrm' to 'instagram'. + +input = 'google plus'; +console.log(didYouMean(input, list)); +> null +// The method was unable to find 'google plus' in the list of options. +``` + +Matching against a list of objects: +``` +var input = 'insargrm'; +var list = [ { id: 'facebook' }, { id: 'twitter' }, { id: 'instagram' }, { id: 'linkedin' } ]; +var key = 'id'; +console.log(didYouMean(input, list, key)); +> 'instagram' +// The method returns the matching value. + +didYouMean.returnWinningObject = true; +console.log(didYouMean(input, list, key)); +> { id: 'instagram' } +// The method returns the matching object. +``` + + +didYouMean(str, list, [key]) +---------------------------- + +- str: The string input to match. +- list: An array of strings or objects to match against. +- key (OPTIONAL): If your list array contains objects, you must specify the key which contains the string + to match against. + +Returns: the closest matching string, or null if no strings exceed the threshold. + + +Options +------- + +Options are set on the didYouMean function object. You may change them at any time. + +### threshold + + By default, the method will only return strings whose edit distance is less than 40% (0.4x) of their length. + For example, if a ten-letter string is five edits away from its nearest match, the method will return null. + + You can control this by setting the "threshold" value on the didYouMean function. For example, to set the + edit distance threshold to 50% of the input string's length: + + ``` + didYouMean.threshold = 0.5; + ``` + + To return the nearest match no matter the threshold, set this value to null. + +### thresholdAbsolute + + This option behaves the same as threshold, but instead takes an integer number of edit steps. For example, + if thresholdAbsolute is set to 20 (the default), then the method will only return strings whose edit distance + is less than 20. Both options apply. + +### caseSensitive + + By default, the method will perform case-insensitive comparisons. If you wish to force case sensitivity, set + the "caseSensitive" value to true: + + ``` + didYouMean.caseSensitive = true; + ``` + +### nullResultValue + + By default, the method will return null if there is no sufficiently close match. You can change this value here. + +### returnWinningObject + + By default, the method will return the winning string value (if any). If your list contains objects rather + than strings, you may set returnWinningObject to true. + + ``` + didYouMean.returnWinningObject = true; + ``` + + This option has no effect on lists of strings. + +### returnFirstMatch + + By default, the method will search all values and return the closest match. If you're simply looking for a "good- + enough" match, you can set your thresholds appropriately and set returnFirstMatch to true to substantially speed + things up. + + +License +------- + +didYouMean copyright (c) 2013-2014 Dave Porter. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License +[here](http://www.apache.org/licenses/LICENSE-2.0). + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/didyoumean/didYouMean-1.2.1.js b/node_modules/didyoumean/didYouMean-1.2.1.js new file mode 100644 index 0000000..febb30e --- /dev/null +++ b/node_modules/didyoumean/didYouMean-1.2.1.js @@ -0,0 +1,274 @@ +/* + +didYouMean.js - A simple JavaScript matching engine +=================================================== + +[Available on GitHub](https://github.com/dcporter/didyoumean.js). + +A super-simple, highly optimized JS library for matching human-quality input to a list of potential +matches. You can use it to suggest a misspelled command-line utility option to a user, or to offer +links to nearby valid URLs on your 404 page. (The examples below are taken from a personal project, +my [HTML5 business card](http://dcporter.aws.af.cm/me), which uses didYouMean.js to suggest correct +URLs from misspelled ones, such as [dcporter.aws.af.cm/me/instagarm](http://dcporter.aws.af.cm/me/instagarm).) +Uses the [Levenshtein distance algorithm](https://en.wikipedia.org/wiki/Levenshtein_distance). + +didYouMean.js works in the browser as well as in node.js. To install it for use in node: + +``` +npm install didyoumean +``` + + +Examples +-------- + +Matching against a list of strings: +``` +var input = 'insargrm' +var list = ['facebook', 'twitter', 'instagram', 'linkedin']; +console.log(didYouMean(input, list)); +> 'instagram' +// The method matches 'insargrm' to 'instagram'. + +input = 'google plus'; +console.log(didYouMean(input, list)); +> null +// The method was unable to find 'google plus' in the list of options. +``` + +Matching against a list of objects: +``` +var input = 'insargrm'; +var list = [ { id: 'facebook' }, { id: 'twitter' }, { id: 'instagram' }, { id: 'linkedin' } ]; +var key = 'id'; +console.log(didYouMean(input, list, key)); +> 'instagram' +// The method returns the matching value. + +didYouMean.returnWinningObject = true; +console.log(didYouMean(input, list, key)); +> { id: 'instagram' } +// The method returns the matching object. +``` + + +didYouMean(str, list, [key]) +---------------------------- + +- str: The string input to match. +- list: An array of strings or objects to match against. +- key (OPTIONAL): If your list array contains objects, you must specify the key which contains the string + to match against. + +Returns: the closest matching string, or null if no strings exceed the threshold. + + +Options +------- + +Options are set on the didYouMean function object. You may change them at any time. + +### threshold + + By default, the method will only return strings whose edit distance is less than 40% (0.4x) of their length. + For example, if a ten-letter string is five edits away from its nearest match, the method will return null. + + You can control this by setting the "threshold" value on the didYouMean function. For example, to set the + edit distance threshold to 50% of the input string's length: + + ``` + didYouMean.threshold = 0.5; + ``` + + To return the nearest match no matter the threshold, set this value to null. + +### thresholdAbsolute + + This option behaves the same as threshold, but instead takes an integer number of edit steps. For example, + if thresholdAbsolute is set to 20 (the default), then the method will only return strings whose edit distance + is less than 20. Both options apply. + +### caseSensitive + + By default, the method will perform case-insensitive comparisons. If you wish to force case sensitivity, set + the "caseSensitive" value to true: + + ``` + didYouMean.caseSensitive = true; + ``` + +### nullResultValue + + By default, the method will return null if there is no sufficiently close match. You can change this value here. + +### returnWinningObject + + By default, the method will return the winning string value (if any). If your list contains objects rather + than strings, you may set returnWinningObject to true. + + ``` + didYouMean.returnWinningObject = true; + ``` + + This option has no effect on lists of strings. + +### returnFirstMatch + + By default, the method will search all values and return the closest match. If you're simply looking for a "good- + enough" match, you can set your thresholds appropriately and set returnFirstMatch to true to substantially speed + things up. + + +License +------- + +didYouMean copyright (c) 2013-2014 Dave Porter. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License +[here](http://www.apache.org/licenses/LICENSE-2.0). + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +*/ +(function() { + "use strict"; + + // The didYouMean method. + function didYouMean(str, list, key) { + if (!str) return null; + + // If we're running a case-insensitive search, smallify str. + if (!didYouMean.caseSensitive) { str = str.toLowerCase(); } + + // Calculate the initial value (the threshold) if present. + var thresholdRelative = didYouMean.threshold === null ? null : didYouMean.threshold * str.length, + thresholdAbsolute = didYouMean.thresholdAbsolute, + winningVal; + if (thresholdRelative !== null && thresholdAbsolute !== null) winningVal = Math.min(thresholdRelative, thresholdAbsolute); + else if (thresholdRelative !== null) winningVal = thresholdRelative; + else if (thresholdAbsolute !== null) winningVal = thresholdAbsolute; + else winningVal = null; + + // Get the edit distance to each option. If the closest one is less than 40% (by default) of str's length, + // then return it. + var winner, candidate, testCandidate, val, + i, len = list.length; + for (i = 0; i < len; i++) { + // Get item. + candidate = list[i]; + // If there's a key, get the candidate value out of the object. + if (key) { candidate = candidate[key]; } + // Gatekeep. + if (!candidate) { continue; } + // If we're running a case-insensitive search, smallify the candidate. + if (!didYouMean.caseSensitive) { testCandidate = candidate.toLowerCase(); } + else { testCandidate = candidate; } + // Get and compare edit distance. + val = getEditDistance(str, testCandidate, winningVal); + // If this value is smaller than our current winning value, OR if we have no winning val yet (i.e. the + // threshold option is set to null, meaning the caller wants a match back no matter how bad it is), then + // this is our new winner. + if (winningVal === null || val < winningVal) { + winningVal = val; + // Set the winner to either the value or its object, depending on the returnWinningObject option. + if (key && didYouMean.returnWinningObject) winner = list[i]; + else winner = candidate; + // If we're returning the first match, return it now. + if (didYouMean.returnFirstMatch) return winner; + } + } + + // If we have a winner, return it. + return winner || didYouMean.nullResultValue; + } + + // Set default options. + didYouMean.threshold = 0.4; + didYouMean.thresholdAbsolute = 20; + didYouMean.caseSensitive = false; + didYouMean.nullResultValue = null; + didYouMean.returnWinningObject = null; + didYouMean.returnFirstMatch = false; + + // Expose. + // In node... + if (typeof module !== 'undefined' && module.exports) { + module.exports = didYouMean; + } + // Otherwise... + else { + window.didYouMean = didYouMean; + } + + var MAX_INT = Math.pow(2,32) - 1; // We could probably go higher than this, but for practical reasons let's not. + function getEditDistance(a, b, max) { + // Handle null or undefined max. + max = max || max === 0 ? max : MAX_INT; + + var lena = a.length; + var lenb = b.length; + + // Fast path - no A or B. + if (lena === 0) return Math.min(max + 1, lenb); + if (lenb === 0) return Math.min(max + 1, lena); + + // Fast path - length diff larger than max. + if (Math.abs(lena - lenb) > max) return max + 1; + + // Slow path. + var matrix = [], + i, j, colMin, minJ, maxJ; + + // Set up the first row ([0, 1, 2, 3, etc]). + for (i = 0; i <= lenb; i++) { matrix[i] = [i]; } + + // Set up the first column (same). + for (j = 0; j <= lena; j++) { matrix[0][j] = j; } + + // Loop over the rest of the columns. + for (i = 1; i <= lenb; i++) { + colMin = MAX_INT; + minJ = 1; + if (i > max) minJ = i - max; + maxJ = lenb + 1; + if (maxJ > max + i) maxJ = max + i; + // Loop over the rest of the rows. + for (j = 1; j <= lena; j++) { + // If j is out of bounds, just put a large value in the slot. + if (j < minJ || j > maxJ) { + matrix[i][j] = max + 1; + } + + // Otherwise do the normal Levenshtein thing. + else { + // If the characters are the same, there's no change in edit distance. + if (b.charAt(i - 1) === a.charAt(j - 1)) { + matrix[i][j] = matrix[i - 1][j - 1]; + } + // Otherwise, see if we're substituting, inserting or deleting. + else { + matrix[i][j] = Math.min(matrix[i - 1][j - 1] + 1, // Substitute + Math.min(matrix[i][j - 1] + 1, // Insert + matrix[i - 1][j] + 1)); // Delete + } + } + + // Either way, update colMin. + if (matrix[i][j] < colMin) colMin = matrix[i][j]; + } + + // If this column's minimum is greater than the allowed maximum, there's no point + // in going on with life. + if (colMin > max) return max + 1; + } + // If we made it this far without running into the max, then return the final matrix value. + return matrix[lenb][lena]; + } + +})(); diff --git a/node_modules/didyoumean/didYouMean-1.2.1.min.js b/node_modules/didyoumean/didYouMean-1.2.1.min.js new file mode 100644 index 0000000..c41abd8 --- /dev/null +++ b/node_modules/didyoumean/didYouMean-1.2.1.min.js @@ -0,0 +1,17 @@ +/* + didYouMean.js copyright (c) 2013-2014 Dave Porter. + + [Available on GitHub](https://github.com/dcporter/didyoumean.js). + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License + [here](http://www.apache.org/licenses/LICENSE-2.0). + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ +(function(){"use strict";function e(t,r,i){if(!t)return null;if(!e.caseSensitive){t=t.toLowerCase()}var s=e.threshold===null?null:e.threshold*t.length,o=e.thresholdAbsolute,u;if(s!==null&&o!==null)u=Math.min(s,o);else if(s!==null)u=s;else if(o!==null)u=o;else u=null;var a,f,l,c,h,p=r.length;for(h=0;hr)return r+1;var o=[],u,a,f,l,c;for(u=0;u<=s;u++){o[u]=[u]}for(a=0;a<=i;a++){o[0][a]=a}for(u=1;u<=s;u++){f=t;l=1;if(u>r)l=u-r;c=s+1;if(c>r+u)c=r+u;for(a=1;a<=i;a++){if(ac){o[u][a]=r+1}else{if(n.charAt(u-1)===e.charAt(a-1)){o[u][a]=o[u-1][a-1]}else{o[u][a]=Math.min(o[u-1][a-1]+1,Math.min(o[u][a-1]+1,o[u-1][a]+1))}}if(o[u][a]r)return r+1}return o[s][i]}e.threshold=.4;e.thresholdAbsolute=20;e.caseSensitive=false;e.nullResultValue=null;e.returnWinningObject=null;e.returnFirstMatch=false;if(typeof module!=="undefined"&&module.exports){module.exports=e}else{window.didYouMean=e}var t=Math.pow(2,32)-1})(); \ No newline at end of file diff --git a/node_modules/didyoumean/package.json b/node_modules/didyoumean/package.json new file mode 100755 index 0000000..1301d03 --- /dev/null +++ b/node_modules/didyoumean/package.json @@ -0,0 +1,27 @@ +{ + "name": "didyoumean", + "version": "1.2.2", + "description": "Match human-quality input to potential matches by edit distance.", + "homepage": "https://github.com/dcporter/didyoumean.js", + "author": { + "name": "Dave Porter", + "email": "dcporter@gmail.com", + "url": "http://dcporter.net/" + }, + "keywords": [ + "didyoumean", + "mean", + "edit", + "distance", + "levenshtein" + ], + "main": "./didYouMean-1.2.1.js", + "repository": { + "type": "git", + "url": "https://github.com/dcporter/didyoumean.js.git" + }, + "bugs": { + "url": "https://github.com/dcporter/didyoumean.js/issues" + }, + "license": "Apache-2.0" +} diff --git a/node_modules/dlv/README.md b/node_modules/dlv/README.md new file mode 100644 index 0000000..6a8429d --- /dev/null +++ b/node_modules/dlv/README.md @@ -0,0 +1,76 @@ +# `dlv(obj, keypath)` [![NPM](https://img.shields.io/npm/v/dlv.svg)](https://npmjs.com/package/dlv) [![Build](https://travis-ci.org/developit/dlv.svg?branch=master)](https://travis-ci.org/developit/dlv) + +> Safely get a dot-notated path within a nested object, with ability to return a default if the full key path does not exist or the value is undefined + + +### Why? + +Smallest possible implementation: only **130 bytes.** + +You could write this yourself, but then you'd have to write [tests]. + +Supports ES Modules, CommonJS and globals. + + +### Installation + +`npm install --save dlv` + + +### Usage + +`delve(object, keypath, [default])` + +```js +import delve from 'dlv'; + +let obj = { + a: { + b: { + c: 1, + d: undefined, + e: null + } + } +}; + +//use string dot notation for keys +delve(obj, 'a.b.c') === 1; + +//or use an array key +delve(obj, ['a', 'b', 'c']) === 1; + +delve(obj, 'a.b') === obj.a.b; + +//returns undefined if the full key path does not exist and no default is specified +delve(obj, 'a.b.f') === undefined; + +//optional third parameter for default if the full key in path is missing +delve(obj, 'a.b.f', 'foo') === 'foo'; + +//or if the key exists but the value is undefined +delve(obj, 'a.b.d', 'foo') === 'foo'; + +//Non-truthy defined values are still returned if they exist at the full keypath +delve(obj, 'a.b.e', 'foo') === null; + +//undefined obj or key returns undefined, unless a default is supplied +delve(undefined, 'a.b.c') === undefined; +delve(undefined, 'a.b.c', 'foo') === 'foo'; +delve(obj, undefined, 'foo') === 'foo'; +``` + + +### Setter Counterparts + +- [dset](https://github.com/lukeed/dset) by [@lukeed](https://github.com/lukeed) is the spiritual "set" counterpart of `dlv` and very fast. +- [bury](https://github.com/kalmbach/bury) by [@kalmbach](https://github.com/kalmbach) does the opposite of `dlv` and is implemented in a very similar manner. + + +### License + +[MIT](https://oss.ninja/mit/developit/) + + +[preact]: https://github.com/developit/preact +[tests]: https://github.com/developit/dlv/blob/master/test.js diff --git a/node_modules/dlv/dist/dlv.es.js b/node_modules/dlv/dist/dlv.es.js new file mode 100644 index 0000000..06b981b --- /dev/null +++ b/node_modules/dlv/dist/dlv.es.js @@ -0,0 +1,2 @@ +export default function(t,e,l,n,r){for(e=e.split?e.split("."):e,n=0;n (http://jasonformat.com)", + "repository": "developit/dlv", + "license": "MIT", + "devDependencies": { + "microbundle": "^0.11.0" + } +} diff --git a/node_modules/fast-glob/LICENSE b/node_modules/fast-glob/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/fast-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fast-glob/README.md b/node_modules/fast-glob/README.md new file mode 100644 index 0000000..393f4fe --- /dev/null +++ b/node_modules/fast-glob/README.md @@ -0,0 +1,795 @@ +# fast-glob + +> It's a very fast and efficient [glob][glob_definition] library for [Node.js][node_js]. + +This package provides methods for traversing the file system and returning pathnames that matched a defined set of a specified pattern according to the rules used by the Unix Bash shell with some simplifications, meanwhile results are returned in **arbitrary order**. Quick, simple, effective. + +## Table of Contents + +
+Details + +* [Highlights](#highlights) +* [Donation](#donation) +* [Old and modern mode](#old-and-modern-mode) +* [Pattern syntax](#pattern-syntax) + * [Basic syntax](#basic-syntax) + * [Advanced syntax](#advanced-syntax) +* [Installation](#installation) +* [API](#api) + * [Asynchronous](#asynchronous) + * [Synchronous](#synchronous) + * [Stream](#stream) + * [patterns](#patterns) + * [[options]](#options) + * [Helpers](#helpers) + * [generateTasks](#generatetaskspatterns-options) + * [isDynamicPattern](#isdynamicpatternpattern-options) + * [escapePath](#escapepathpattern) +* [Options](#options-3) + * [Common](#common) + * [concurrency](#concurrency) + * [cwd](#cwd) + * [deep](#deep) + * [followSymbolicLinks](#followsymboliclinks) + * [fs](#fs) + * [ignore](#ignore) + * [suppressErrors](#suppresserrors) + * [throwErrorOnBrokenSymbolicLink](#throwerroronbrokensymboliclink) + * [Output control](#output-control) + * [absolute](#absolute) + * [markDirectories](#markdirectories) + * [objectMode](#objectmode) + * [onlyDirectories](#onlydirectories) + * [onlyFiles](#onlyfiles) + * [stats](#stats) + * [unique](#unique) + * [Matching control](#matching-control) + * [braceExpansion](#braceexpansion) + * [caseSensitiveMatch](#casesensitivematch) + * [dot](#dot) + * [extglob](#extglob) + * [globstar](#globstar) + * [baseNameMatch](#basenamematch) +* [FAQ](#faq) + * [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) + * [How to write patterns on Windows?](#how-to-write-patterns-on-windows) + * [Why are parentheses match wrong?](#why-are-parentheses-match-wrong) + * [How to exclude directory from reading?](#how-to-exclude-directory-from-reading) + * [How to use UNC path?](#how-to-use-unc-path) + * [Compatible with `node-glob`?](#compatible-with-node-glob) +* [Benchmarks](#benchmarks) + * [Server](#server) + * [Nettop](#nettop) +* [Changelog](#changelog) +* [License](#license) + +
+ +## Highlights + +* Fast. Probably the fastest. +* Supports multiple and negative patterns. +* Synchronous, Promise and Stream API. +* Object mode. Can return more than just strings. +* Error-tolerant. + +## Donation + +Do you like this project? Support it by donating, creating an issue or pull request. + +[![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)][paypal_mrmlnc] + +## Old and modern mode + +This package works in two modes, depending on the environment in which it is used. + +* **Old mode**. Node.js below 10.10 or when the [`stats`](#stats) option is *enabled*. +* **Modern mode**. Node.js 10.10+ and the [`stats`](#stats) option is *disabled*. + +The modern mode is faster. Learn more about the [internal mechanism][nodelib_fs_scandir_old_and_modern_modern]. + +## Pattern syntax + +> :warning: Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. + +There is more than one form of syntax: basic and advanced. Below is a brief overview of the supported features. Also pay attention to our [FAQ](#faq). + +> :book: This package uses a [`micromatch`][micromatch] as a library for pattern matching. + +### Basic syntax + +* An asterisk (`*`) — matches everything except slashes (path separators), hidden files (names starting with `.`). +* A double star or globstar (`**`) — matches zero or more directories. +* Question mark (`?`) – matches any single character except slashes (path separators). +* Sequence (`[seq]`) — matches any character in sequence. + +> :book: A few additional words about the [basic matching behavior][picomatch_matching_behavior]. + +Some examples: + +* `src/**/*.js` — matches all files in the `src` directory (any level of nesting) that have the `.js` extension. +* `src/*.??` — matches all files in the `src` directory (only first level of nesting) that have a two-character extension. +* `file-[01].js` — matches files: `file-0.js`, `file-1.js`. + +### Advanced syntax + +* [Escapes characters][micromatch_backslashes] (`\\`) — matching special characters (`$^*+?()[]`) as literals. +* [POSIX character classes][picomatch_posix_brackets] (`[[:digit:]]`). +* [Extended globs][micromatch_extglobs] (`?(pattern-list)`). +* [Bash style brace expansions][micromatch_braces] (`{}`). +* [Regexp character classes][micromatch_regex_character_classes] (`[1-5]`). +* [Regex groups][regular_expressions_brackets] (`(a|b)`). + +> :book: A few additional words about the [advanced matching behavior][micromatch_extended_globbing]. + +Some examples: + +* `src/**/*.{css,scss}` — matches all files in the `src` directory (any level of nesting) that have the `.css` or `.scss` extension. +* `file-[[:digit:]].js` — matches files: `file-0.js`, `file-1.js`, …, `file-9.js`. +* `file-{1..3}.js` — matches files: `file-1.js`, `file-2.js`, `file-3.js`. +* `file-(1|2)` — matches files: `file-1.js`, `file-2.js`. + +## Installation + +```console +npm install fast-glob +``` + +## API + +### Asynchronous + +```js +fg(patterns, [options]) +``` + +Returns a `Promise` with an array of matching entries. + +```js +const fg = require('fast-glob'); + +const entries = await fg(['.editorconfig', '**/index.js'], { dot: true }); + +// ['.editorconfig', 'services/index.js'] +``` + +### Synchronous + +```js +fg.sync(patterns, [options]) +``` + +Returns an array of matching entries. + +```js +const fg = require('fast-glob'); + +const entries = fg.sync(['.editorconfig', '**/index.js'], { dot: true }); + +// ['.editorconfig', 'services/index.js'] +``` + +### Stream + +```js +fg.stream(patterns, [options]) +``` + +Returns a [`ReadableStream`][node_js_stream_readable_streams] when the `data` event will be emitted with matching entry. + +```js +const fg = require('fast-glob'); + +const stream = fg.stream(['.editorconfig', '**/index.js'], { dot: true }); + +for await (const entry of stream) { + // .editorconfig + // services/index.js +} +``` + +#### patterns + +* Required: `true` +* Type: `string | string[]` + +Any correct pattern(s). + +> :1234: [Pattern syntax](#pattern-syntax) +> +> :warning: This package does not respect the order of patterns. First, all the negative patterns are applied, and only then the positive patterns. If you want to get a certain order of records, use sorting or split calls. + +#### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +### Helpers + +#### `generateTasks(patterns, [options])` + +Returns the internal representation of patterns ([`Task`](./src/managers/tasks.ts) is a combining patterns by base directory). + +```js +fg.generateTasks('*'); + +[{ + base: '.', // Parent directory for all patterns inside this task + dynamic: true, // Dynamic or static patterns are in this task + patterns: ['*'], + positive: ['*'], + negative: [] +}] +``` + +##### patterns + +* Required: `true` +* Type: `string | string[]` + +Any correct pattern(s). + +##### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +#### `isDynamicPattern(pattern, [options])` + +Returns `true` if the passed pattern is a dynamic pattern. + +> :1234: [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) + +```js +fg.isDynamicPattern('*'); // true +fg.isDynamicPattern('abc'); // false +``` + +##### pattern + +* Required: `true` +* Type: `string` + +Any correct pattern. + +##### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +#### `escapePath(pattern)` + +Returns a path with escaped special characters (`*?|(){}[]`, `!` at the beginning of line, `@+!` before the opening parenthesis). + +```js +fg.escapePath('!abc'); // \\!abc +fg.escapePath('C:/Program Files (x86)'); // C:/Program Files \\(x86\\) +``` + +##### pattern + +* Required: `true` +* Type: `string` + +Any string, for example, a path to a file. + +## Options + +### Common options + +#### concurrency + +* Type: `number` +* Default: `os.cpus().length` + +Specifies the maximum number of concurrent requests from a reader to read directories. + +> :book: The higher the number, the higher the performance and load on the file system. If you want to read in quiet mode, set the value to a comfortable number or `1`. + +#### cwd + +* Type: `string` +* Default: `process.cwd()` + +The current working directory in which to search. + +#### deep + +* Type: `number` +* Default: `Infinity` + +Specifies the maximum depth of a read directory relative to the start directory. + +For example, you have the following tree: + +```js +dir/ +└── one/ // 1 + └── two/ // 2 + └── file.js // 3 +``` + +```js +// With base directory +fg.sync('dir/**', { onlyFiles: false, deep: 1 }); // ['dir/one'] +fg.sync('dir/**', { onlyFiles: false, deep: 2 }); // ['dir/one', 'dir/one/two'] + +// With cwd option +fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 1 }); // ['one'] +fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 2 }); // ['one', 'one/two'] +``` + +> :book: If you specify a pattern with some base directory, this directory will not participate in the calculation of the depth of the found directories. Think of it as a [`cwd`](#cwd) option. + +#### followSymbolicLinks + +* Type: `boolean` +* Default: `true` + +Indicates whether to traverse descendants of symbolic link directories when expanding `**` patterns. + +> :book: Note that this option does not affect the base directory of the pattern. For example, if `./a` is a symlink to directory `./b` and you specified `['./a**', './b/**']` patterns, then directory `./a` will still be read. + +> :book: If the [`stats`](#stats) option is specified, the information about the symbolic link (`fs.lstat`) will be replaced with information about the entry (`fs.stat`) behind it. + +#### fs + +* Type: `FileSystemAdapter` +* Default: `fs.*` + +Custom implementation of methods for working with the file system. + +```ts +export interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; + readdir?: typeof fs.readdir; + readdirSync?: typeof fs.readdirSync; +} +``` + +#### ignore + +* Type: `string[]` +* Default: `[]` + +An array of glob patterns to exclude matches. This is an alternative way to use negative patterns. + +```js +dir/ +├── package-lock.json +└── package.json +``` + +```js +fg.sync(['*.json', '!package-lock.json']); // ['package.json'] +fg.sync('*.json', { ignore: ['package-lock.json'] }); // ['package.json'] +``` + +#### suppressErrors + +* Type: `boolean` +* Default: `false` + +By default this package suppress only `ENOENT` errors. Set to `true` to suppress any error. + +> :book: Can be useful when the directory has entries with a special level of access. + +#### throwErrorOnBrokenSymbolicLink + +* Type: `boolean` +* Default: `false` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +> :book: This option has no effect on errors when reading the symbolic link directory. + +### Output control + +#### absolute + +* Type: `boolean` +* Default: `false` + +Return the absolute path for entries. + +```js +fg.sync('*.js', { absolute: false }); // ['index.js'] +fg.sync('*.js', { absolute: true }); // ['/home/user/index.js'] +``` + +> :book: This option is required if you want to use negative patterns with absolute path, for example, `!${__dirname}/*.js`. + +#### markDirectories + +* Type: `boolean` +* Default: `false` + +Mark the directory path with the final slash. + +```js +fg.sync('*', { onlyFiles: false, markDirectories: false }); // ['index.js', 'controllers'] +fg.sync('*', { onlyFiles: false, markDirectories: true }); // ['index.js', 'controllers/'] +``` + +#### objectMode + +* Type: `boolean` +* Default: `false` + +Returns objects (instead of strings) describing entries. + +```js +fg.sync('*', { objectMode: false }); // ['src/index.js'] +fg.sync('*', { objectMode: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: }] +``` + +The object has the following fields: + +* name (`string`) — the last part of the path (basename) +* path (`string`) — full path relative to the pattern base directory +* dirent ([`fs.Dirent`][node_js_fs_class_fs_dirent]) — instance of `fs.Dirent` + +> :book: An object is an internal representation of entry, so getting it does not affect performance. + +#### onlyDirectories + +* Type: `boolean` +* Default: `false` + +Return only directories. + +```js +fg.sync('*', { onlyDirectories: false }); // ['index.js', 'src'] +fg.sync('*', { onlyDirectories: true }); // ['src'] +``` + +> :book: If `true`, the [`onlyFiles`](#onlyfiles) option is automatically `false`. + +#### onlyFiles + +* Type: `boolean` +* Default: `true` + +Return only files. + +```js +fg.sync('*', { onlyFiles: false }); // ['index.js', 'src'] +fg.sync('*', { onlyFiles: true }); // ['index.js'] +``` + +#### stats + +* Type: `boolean` +* Default: `false` + +Enables an [object mode](#objectmode) with an additional field: + +* stats ([`fs.Stats`][node_js_fs_class_fs_stats]) — instance of `fs.Stats` + +```js +fg.sync('*', { stats: false }); // ['src/index.js'] +fg.sync('*', { stats: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: , stats: }] +``` + +> :book: Returns `fs.stat` instead of `fs.lstat` for symbolic links when the [`followSymbolicLinks`](#followsymboliclinks) option is specified. +> +> :warning: Unlike [object mode](#objectmode) this mode requires additional calls to the file system. On average, this mode is slower at least twice. See [old and modern mode](#old-and-modern-mode) for more details. + +#### unique + +* Type: `boolean` +* Default: `true` + +Ensures that the returned entries are unique. + +```js +fg.sync(['*.json', 'package.json'], { unique: false }); // ['package.json', 'package.json'] +fg.sync(['*.json', 'package.json'], { unique: true }); // ['package.json'] +``` + +If `true` and similar entries are found, the result is the first found. + +### Matching control + +#### braceExpansion + +* Type: `boolean` +* Default: `true` + +Enables Bash-like brace expansion. + +> :1234: [Syntax description][bash_hackers_syntax_expansion_brace] or more [detailed description][micromatch_braces]. + +```js +dir/ +├── abd +├── acd +└── a{b,c}d +``` + +```js +fg.sync('a{b,c}d', { braceExpansion: false }); // ['a{b,c}d'] +fg.sync('a{b,c}d', { braceExpansion: true }); // ['abd', 'acd'] +``` + +#### caseSensitiveMatch + +* Type: `boolean` +* Default: `true` + +Enables a [case-sensitive][wikipedia_case_sensitivity] mode for matching files. + +```js +dir/ +├── file.txt +└── File.txt +``` + +```js +fg.sync('file.txt', { caseSensitiveMatch: false }); // ['file.txt', 'File.txt'] +fg.sync('file.txt', { caseSensitiveMatch: true }); // ['file.txt'] +``` + +#### dot + +* Type: `boolean` +* Default: `false` + +Allow patterns to match entries that begin with a period (`.`). + +> :book: Note that an explicit dot in a portion of the pattern will always match dot files. + +```js +dir/ +├── .editorconfig +└── package.json +``` + +```js +fg.sync('*', { dot: false }); // ['package.json'] +fg.sync('*', { dot: true }); // ['.editorconfig', 'package.json'] +``` + +#### extglob + +* Type: `boolean` +* Default: `true` + +Enables Bash-like `extglob` functionality. + +> :1234: [Syntax description][micromatch_extglobs]. + +```js +dir/ +├── README.md +└── package.json +``` + +```js +fg.sync('*.+(json|md)', { extglob: false }); // [] +fg.sync('*.+(json|md)', { extglob: true }); // ['README.md', 'package.json'] +``` + +#### globstar + +* Type: `boolean` +* Default: `true` + +Enables recursively repeats a pattern containing `**`. If `false`, `**` behaves exactly like `*`. + +```js +dir/ +└── a + └── b +``` + +```js +fg.sync('**', { onlyFiles: false, globstar: false }); // ['a'] +fg.sync('**', { onlyFiles: false, globstar: true }); // ['a', 'a/b'] +``` + +#### baseNameMatch + +* Type: `boolean` +* Default: `false` + +If set to `true`, then patterns without slashes will be matched against the basename of the path if it contains slashes. + +```js +dir/ +└── one/ + └── file.md +``` + +```js +fg.sync('*.md', { baseNameMatch: false }); // [] +fg.sync('*.md', { baseNameMatch: true }); // ['one/file.md'] +``` + +## FAQ + +## What is a static or dynamic pattern? + +All patterns can be divided into two types: + +* **static**. A pattern is considered static if it can be used to get an entry on the file system without using matching mechanisms. For example, the `file.js` pattern is a static pattern because we can just verify that it exists on the file system. +* **dynamic**. A pattern is considered dynamic if it cannot be used directly to find occurrences without using a matching mechanisms. For example, the `*` pattern is a dynamic pattern because we cannot use this pattern directly. + +A pattern is considered dynamic if it contains the following characters (`…` — any characters or their absence) or options: + +* The [`caseSensitiveMatch`](#casesensitivematch) option is disabled +* `\\` (the escape character) +* `*`, `?`, `!` (at the beginning of line) +* `[…]` +* `(…|…)` +* `@(…)`, `!(…)`, `*(…)`, `?(…)`, `+(…)` (respects the [`extglob`](#extglob) option) +* `{…,…}`, `{…..…}` (respects the [`braceExpansion`](#braceexpansion) option) + +## How to write patterns on Windows? + +Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. With the [`cwd`](#cwd) option use a convenient format. + +**Bad** + +```ts +[ + 'directory\\*', + path.join(process.cwd(), '**') +] +``` + +**Good** + +```ts +[ + 'directory/*', + path.join(process.cwd(), '**').replace(/\\/g, '/') +] +``` + +> :book: Use the [`normalize-path`][npm_normalize_path] or the [`unixify`][npm_unixify] package to convert Windows-style path to a Unix-style path. + +Read more about [matching with backslashes][micromatch_backslashes]. + +## Why are parentheses match wrong? + +```js +dir/ +└── (special-*file).txt +``` + +```js +fg.sync(['(special-*file).txt']) // [] +``` + +Refers to Bash. You need to escape special characters: + +```js +fg.sync(['\\(special-*file\\).txt']) // ['(special-*file).txt'] +``` + +Read more about [matching special characters as literals][picomatch_matching_special_characters_as_literals]. + +## How to exclude directory from reading? + +You can use a negative pattern like this: `!**/node_modules` or `!**/node_modules/**`. Also you can use [`ignore`](#ignore) option. Just look at the example below. + +```js +first/ +├── file.md +└── second/ + └── file.txt +``` + +If you don't want to read the `second` directory, you must write the following pattern: `!**/second` or `!**/second/**`. + +```js +fg.sync(['**/*.md', '!**/second']); // ['first/file.md'] +fg.sync(['**/*.md'], { ignore: ['**/second/**'] }); // ['first/file.md'] +``` + +> :warning: When you write `!**/second/**/*` it means that the directory will be **read**, but all the entries will not be included in the results. + +You have to understand that if you write the pattern to exclude directories, then the directory will not be read under any circumstances. + +## How to use UNC path? + +You cannot use [Uniform Naming Convention (UNC)][unc_path] paths as patterns (due to syntax), but you can use them as [`cwd`](#cwd) directory. + +```ts +fg.sync('*', { cwd: '\\\\?\\C:\\Python27' /* or //?/C:/Python27 */ }); +fg.sync('Python27/*', { cwd: '\\\\?\\C:\\' /* or //?/C:/ */ }); +``` + +## Compatible with `node-glob`? + +| node-glob | fast-glob | +| :----------: | :-------: | +| `cwd` | [`cwd`](#cwd) | +| `root` | – | +| `dot` | [`dot`](#dot) | +| `nomount` | – | +| `mark` | [`markDirectories`](#markdirectories) | +| `nosort` | – | +| `nounique` | [`unique`](#unique) | +| `nobrace` | [`braceExpansion`](#braceexpansion) | +| `noglobstar` | [`globstar`](#globstar) | +| `noext` | [`extglob`](#extglob) | +| `nocase` | [`caseSensitiveMatch`](#casesensitivematch) | +| `matchBase` | [`baseNameMatch`](#basenamematch) | +| `nodir` | [`onlyFiles`](#onlyfiles) | +| `ignore` | [`ignore`](#ignore) | +| `follow` | [`followSymbolicLinks`](#followsymboliclinks) | +| `realpath` | – | +| `absolute` | [`absolute`](#absolute) | + +## Benchmarks + +### Server + +Link: [Vultr Bare Metal][vultr_pricing_baremetal] + +* Processor: E3-1270v6 (8 CPU) +* RAM: 32GB +* Disk: SSD ([Intel DC S3520 SSDSC2BB240G7][intel_ssd]) + +You can see results [here][github_gist_benchmark_server] for latest release. + +### Nettop + +Link: [Zotac bi323][zotac_bi323] + +* Processor: Intel N3150 (4 CPU) +* RAM: 8GB +* Disk: SSD ([Silicon Power SP060GBSS3S55S25][silicon_power_ssd]) + +You can see results [here][github_gist_benchmark_nettop] for latest release. + +## Changelog + +See the [Releases section of our GitHub project][github_releases] for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. + +[bash_hackers_syntax_expansion_brace]: https://wiki.bash-hackers.org/syntax/expansion/brace +[github_gist_benchmark_nettop]: https://gist.github.com/mrmlnc/f06246b197f53c356895fa35355a367c#file-fg-benchmark-nettop-product-txt +[github_gist_benchmark_server]: https://gist.github.com/mrmlnc/f06246b197f53c356895fa35355a367c#file-fg-benchmark-server-product-txt +[github_releases]: https://github.com/mrmlnc/fast-glob/releases +[glob_definition]: https://en.wikipedia.org/wiki/Glob_(programming) +[glob_linux_man]: http://man7.org/linux/man-pages/man3/glob.3.html +[intel_ssd]: https://ark.intel.com/content/www/us/en/ark/products/93012/intel-ssd-dc-s3520-series-240gb-2-5in-sata-6gb-s-3d1-mlc.html +[micromatch_backslashes]: https://github.com/micromatch/micromatch#backslashes +[micromatch_braces]: https://github.com/micromatch/braces +[micromatch_extended_globbing]: https://github.com/micromatch/micromatch#extended-globbing +[micromatch_extglobs]: https://github.com/micromatch/micromatch#extglobs +[micromatch_regex_character_classes]: https://github.com/micromatch/micromatch#regex-character-classes +[micromatch]: https://github.com/micromatch/micromatch +[node_js_fs_class_fs_dirent]: https://nodejs.org/api/fs.html#fs_class_fs_dirent +[node_js_fs_class_fs_stats]: https://nodejs.org/api/fs.html#fs_class_fs_stats +[node_js_stream_readable_streams]: https://nodejs.org/api/stream.html#stream_readable_streams +[node_js]: https://nodejs.org/en +[nodelib_fs_scandir_old_and_modern_modern]: https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode +[npm_normalize_path]: https://www.npmjs.com/package/normalize-path +[npm_unixify]: https://www.npmjs.com/package/unixify +[paypal_mrmlnc]:https://paypal.me/mrmlnc +[picomatch_matching_behavior]: https://github.com/micromatch/picomatch#matching-behavior-vs-bash +[picomatch_matching_special_characters_as_literals]: https://github.com/micromatch/picomatch#matching-special-characters-as-literals +[picomatch_posix_brackets]: https://github.com/micromatch/picomatch#posix-brackets +[regular_expressions_brackets]: https://www.regular-expressions.info/brackets.html +[silicon_power_ssd]: https://www.silicon-power.com/web/product-1 +[unc_path]: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-dtyp/62e862f4-2a51-452e-8eeb-dc4ff5ee33cc +[vultr_pricing_baremetal]: https://www.vultr.com/pricing/baremetal +[wikipedia_case_sensitivity]: https://en.wikipedia.org/wiki/Case_sensitivity +[zotac_bi323]: https://www.zotac.com/ee/product/mini_pcs/zbox-bi323 diff --git a/node_modules/fast-glob/node_modules/glob-parent/CHANGELOG.md b/node_modules/fast-glob/node_modules/glob-parent/CHANGELOG.md new file mode 100644 index 0000000..fb9de96 --- /dev/null +++ b/node_modules/fast-glob/node_modules/glob-parent/CHANGELOG.md @@ -0,0 +1,110 @@ +### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) + + +### Bug Fixes + +* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) + +### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) + + +### Bug Fixes + +* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) + +## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) + + +### Features + +* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) + +## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* Drop support for node <6 & bump dependencies + +### Miscellaneous Chores + +* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) + +## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* question marks are valid path characters on Windows so avoid flagging as a glob when alone +* Update is-glob dependency + +### Features + +* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) +* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) +* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) + +## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) + + +### Features + +* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) +* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) +* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) +* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) +* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) + + +### Bug Fixes + +* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) + +### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) + + +### Features + +* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) + + +### Bug Fixes + +* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) + +## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* update is-glob dependency + +### Features + +* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) + +## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) + + +### Features + +* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) + +## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) + +## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) + + +### Reverts + +* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) + +## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) + + +### Features + +* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) + +## 1.0.0 (2021-01-27) + diff --git a/node_modules/fast-glob/node_modules/glob-parent/LICENSE b/node_modules/fast-glob/node_modules/glob-parent/LICENSE new file mode 100644 index 0000000..63222d7 --- /dev/null +++ b/node_modules/fast-glob/node_modules/glob-parent/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015, 2019 Elan Shanker + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fast-glob/node_modules/glob-parent/README.md b/node_modules/fast-glob/node_modules/glob-parent/README.md new file mode 100644 index 0000000..36a2793 --- /dev/null +++ b/node_modules/fast-glob/node_modules/glob-parent/README.md @@ -0,0 +1,137 @@ +

+ + + +

+ +# glob-parent + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] + +Extract the non-magic parent path from a glob string. + +## Usage + +```js +var globParent = require('glob-parent'); + +globParent('path/to/*.js'); // 'path/to' +globParent('/root/path/to/*.js'); // '/root/path/to' +globParent('/*.js'); // '/' +globParent('*.js'); // '.' +globParent('**/*.js'); // '.' +globParent('path/{to,from}'); // 'path' +globParent('path/!(to|from)'); // 'path' +globParent('path/?(to|from)'); // 'path' +globParent('path/+(to|from)'); // 'path' +globParent('path/*(to|from)'); // 'path' +globParent('path/@(to|from)'); // 'path' +globParent('path/**/*'); // 'path' + +// if provided a non-glob path, returns the nearest dir +globParent('path/foo/bar.js'); // 'path/foo' +globParent('path/foo/'); // 'path/foo' +globParent('path/foo'); // 'path' (see issue #3 for details) +``` + +## API + +### `globParent(maybeGlobString, [options])` + +Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. + +#### options + +```js +{ + // Disables the automatic conversion of slashes for Windows + flipBackslashes: true +} +``` + +## Escaping + +The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: + +- `?` (question mark) unless used as a path segment alone +- `*` (asterisk) +- `|` (pipe) +- `(` (opening parenthesis) +- `)` (closing parenthesis) +- `{` (opening curly brace) +- `}` (closing curly brace) +- `[` (opening bracket) +- `]` (closing bracket) + +**Example** + +```js +globParent('foo/[bar]/') // 'foo' +globParent('foo/\\[bar]/') // 'foo/[bar]' +``` + +## Limitations + +### Braces & Brackets +This library attempts a quick and imperfect method of determining which path +parts have glob magic without fully parsing/lexing the pattern. There are some +advanced use cases that can trip it up, such as nested braces where the outer +pair is escaped and the inner one contains a path separator. If you find +yourself in the unlikely circumstance of being affected by this or need to +ensure higher-fidelity glob handling in your library, it is recommended that you +pre-process your input with [expand-braces] and/or [expand-brackets]. + +### Windows +Backslashes are not valid path separators for globs. If a path with backslashes +is provided anyway, for simple cases, glob-parent will replace the path +separator for you and return the non-glob parent path (now with +forward-slashes, which are still valid as Windows path separators). + +This cannot be used in conjunction with escape characters. + +```js +// BAD +globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' + +// GOOD +globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' +``` + +If you are using escape characters for a pattern without path parts (i.e. +relative to `cwd`), prefix with `./` to avoid confusing glob-parent. + +```js +// BAD +globParent('foo \\[bar]') // 'foo ' +globParent('foo \\[bar]*') // 'foo ' + +// GOOD +globParent('./foo \\[bar]') // 'foo [bar]' +globParent('./foo \\[bar]*') // '.' +``` + +## License + +ISC + +[expand-braces]: https://github.com/jonschlinkert/expand-braces +[expand-brackets]: https://github.com/jonschlinkert/expand-brackets + +[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg +[npm-url]: https://www.npmjs.com/package/glob-parent +[npm-image]: https://img.shields.io/npm/v/glob-parent.svg + +[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master +[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master + +[travis-url]: https://travis-ci.org/gulpjs/glob-parent +[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci + +[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent +[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor + +[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent +[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg + +[gitter-url]: https://gitter.im/gulpjs/gulp +[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/fast-glob/node_modules/glob-parent/index.js b/node_modules/fast-glob/node_modules/glob-parent/index.js new file mode 100644 index 0000000..09e257e --- /dev/null +++ b/node_modules/fast-glob/node_modules/glob-parent/index.js @@ -0,0 +1,42 @@ +'use strict'; + +var isGlob = require('is-glob'); +var pathPosixDirname = require('path').posix.dirname; +var isWin32 = require('os').platform() === 'win32'; + +var slash = '/'; +var backslash = /\\/g; +var enclosure = /[\{\[].*[\}\]]$/; +var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; +var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; + +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + * @returns {string} + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); + + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); + } + + // special case for strings ending in enclosure containing path separator + if (enclosure.test(str)) { + str += slash; + } + + // preserves full path in case of trailing path separator + str += 'a'; + + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlob(str) || globby.test(str)); + + // remove escape chars and return result + return str.replace(escaped, '$1'); +}; diff --git a/node_modules/fast-glob/node_modules/glob-parent/package.json b/node_modules/fast-glob/node_modules/glob-parent/package.json new file mode 100644 index 0000000..125c971 --- /dev/null +++ b/node_modules/fast-glob/node_modules/glob-parent/package.json @@ -0,0 +1,48 @@ +{ + "name": "glob-parent", + "version": "5.1.2", + "description": "Extract the non-magic parent path from a glob string.", + "author": "Gulp Team (https://gulpjs.com/)", + "contributors": [ + "Elan Shanker (https://github.com/es128)", + "Blaine Bublitz " + ], + "repository": "gulpjs/glob-parent", + "license": "ISC", + "engines": { + "node": ">= 6" + }, + "main": "index.js", + "files": [ + "LICENSE", + "index.js" + ], + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "nyc mocha --async-only", + "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "dependencies": { + "is-glob": "^4.0.1" + }, + "devDependencies": { + "coveralls": "^3.0.11", + "eslint": "^2.13.1", + "eslint-config-gulp": "^3.0.1", + "expect": "^1.20.2", + "mocha": "^6.0.2", + "nyc": "^13.3.0" + }, + "keywords": [ + "glob", + "parent", + "strip", + "path", + "dirname", + "directory", + "base", + "wildcard" + ] +} diff --git a/node_modules/fast-glob/out/index.d.ts b/node_modules/fast-glob/out/index.d.ts new file mode 100644 index 0000000..553e514 --- /dev/null +++ b/node_modules/fast-glob/out/index.d.ts @@ -0,0 +1,27 @@ +/// +import * as taskManager from './managers/tasks'; +import { Options as OptionsInternal } from './settings'; +import { Entry as EntryInternal, FileSystemAdapter as FileSystemAdapterInternal, Pattern as PatternInternal } from './types'; +declare type EntryObjectModePredicate = { + [TKey in keyof Pick]-?: true; +}; +declare type EntryStatsPredicate = { + [TKey in keyof Pick]-?: true; +}; +declare type EntryObjectPredicate = EntryObjectModePredicate | EntryStatsPredicate; +declare function FastGlob(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): Promise; +declare function FastGlob(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Promise; +declare namespace FastGlob { + type Options = OptionsInternal; + type Entry = EntryInternal; + type Task = taskManager.Task; + type Pattern = PatternInternal; + type FileSystemAdapter = FileSystemAdapterInternal; + function sync(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): EntryInternal[]; + function sync(source: PatternInternal | PatternInternal[], options?: OptionsInternal): string[]; + function stream(source: PatternInternal | PatternInternal[], options?: OptionsInternal): NodeJS.ReadableStream; + function generateTasks(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Task[]; + function isDynamicPattern(source: PatternInternal, options?: OptionsInternal): boolean; + function escapePath(source: PatternInternal): PatternInternal; +} +export = FastGlob; diff --git a/node_modules/fast-glob/out/index.js b/node_modules/fast-glob/out/index.js new file mode 100644 index 0000000..24ebffa --- /dev/null +++ b/node_modules/fast-glob/out/index.js @@ -0,0 +1,68 @@ +"use strict"; +const taskManager = require("./managers/tasks"); +const patternManager = require("./managers/patterns"); +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +const utils = require("./utils"); +async function FastGlob(source, options) { + assertPatternsInput(source); + const works = getWorks(source, async_1.default, options); + const result = await Promise.all(works); + return utils.array.flatten(result); +} +// https://github.com/typescript-eslint/typescript-eslint/issues/60 +// eslint-disable-next-line no-redeclare +(function (FastGlob) { + function sync(source, options) { + assertPatternsInput(source); + const works = getWorks(source, sync_1.default, options); + return utils.array.flatten(works); + } + FastGlob.sync = sync; + function stream(source, options) { + assertPatternsInput(source); + const works = getWorks(source, stream_1.default, options); + /** + * The stream returned by the provider cannot work with an asynchronous iterator. + * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. + * This affects performance (+25%). I don't see best solution right now. + */ + return utils.stream.merge(works); + } + FastGlob.stream = stream; + function generateTasks(source, options) { + assertPatternsInput(source); + const patterns = patternManager.transform([].concat(source)); + const settings = new settings_1.default(options); + return taskManager.generate(patterns, settings); + } + FastGlob.generateTasks = generateTasks; + function isDynamicPattern(source, options) { + assertPatternsInput(source); + const settings = new settings_1.default(options); + return utils.pattern.isDynamicPattern(source, settings); + } + FastGlob.isDynamicPattern = isDynamicPattern; + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escape(source); + } + FastGlob.escapePath = escapePath; +})(FastGlob || (FastGlob = {})); +function getWorks(source, _Provider, options) { + const patterns = patternManager.transform([].concat(source)); + const settings = new settings_1.default(options); + const tasks = taskManager.generate(patterns, settings); + const provider = new _Provider(settings); + return tasks.map(provider.read, provider); +} +function assertPatternsInput(input) { + const source = [].concat(input); + const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); + if (!isValidSource) { + throw new TypeError('Patterns must be a string (non empty) or an array of strings'); + } +} +module.exports = FastGlob; diff --git a/node_modules/fast-glob/out/managers/patterns.d.ts b/node_modules/fast-glob/out/managers/patterns.d.ts new file mode 100644 index 0000000..aa32460 --- /dev/null +++ b/node_modules/fast-glob/out/managers/patterns.d.ts @@ -0,0 +1,6 @@ +export declare function transform(patterns: string[]): string[]; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +export declare function removeDuplicateSlashes(pattern: string): string; diff --git a/node_modules/fast-glob/out/managers/patterns.js b/node_modules/fast-glob/out/managers/patterns.js new file mode 100644 index 0000000..db0b407 --- /dev/null +++ b/node_modules/fast-glob/out/managers/patterns.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.removeDuplicateSlashes = exports.transform = void 0; +/** + * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. + * The latter is due to the presence of the device path at the beginning of the UNC path. + * @todo rewrite to negative lookbehind with the next major release. + */ +const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; +function transform(patterns) { + return patterns.map((pattern) => removeDuplicateSlashes(pattern)); +} +exports.transform = transform; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +function removeDuplicateSlashes(pattern) { + return pattern.replace(DOUBLE_SLASH_RE, '/'); +} +exports.removeDuplicateSlashes = removeDuplicateSlashes; diff --git a/node_modules/fast-glob/out/managers/tasks.d.ts b/node_modules/fast-glob/out/managers/tasks.d.ts new file mode 100644 index 0000000..a1c1095 --- /dev/null +++ b/node_modules/fast-glob/out/managers/tasks.d.ts @@ -0,0 +1,22 @@ +import Settings from '../settings'; +import { Pattern, PatternsGroup } from '../types'; +export declare type Task = { + base: string; + dynamic: boolean; + patterns: Pattern[]; + positive: Pattern[]; + negative: Pattern[]; +}; +export declare function generate(patterns: Pattern[], settings: Settings): Task[]; +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +export declare function convertPatternsToTasks(positive: Pattern[], negative: Pattern[], dynamic: boolean): Task[]; +export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; +export declare function getNegativePatternsAsPositive(patterns: Pattern[], ignore: Pattern[]): Pattern[]; +export declare function groupPatternsByBaseDirectory(patterns: Pattern[]): PatternsGroup; +export declare function convertPatternGroupsToTasks(positive: PatternsGroup, negative: Pattern[], dynamic: boolean): Task[]; +export declare function convertPatternGroupToTask(base: string, positive: Pattern[], negative: Pattern[], dynamic: boolean): Task; diff --git a/node_modules/fast-glob/out/managers/tasks.js b/node_modules/fast-glob/out/managers/tasks.js new file mode 100644 index 0000000..6b8658d --- /dev/null +++ b/node_modules/fast-glob/out/managers/tasks.js @@ -0,0 +1,80 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; +const utils = require("../utils"); +function generate(patterns, settings) { + const positivePatterns = getPositivePatterns(patterns); + const negativePatterns = getNegativePatternsAsPositive(patterns, settings.ignore); + const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); + const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); + const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); + const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); + return staticTasks.concat(dynamicTasks); +} +exports.generate = generate; +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +function convertPatternsToTasks(positive, negative, dynamic) { + const tasks = []; + const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); + const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); + const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); + const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); + tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); + /* + * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory + * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. + */ + if ('.' in insideCurrentDirectoryGroup) { + tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); + } + else { + tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); + } + return tasks; +} +exports.convertPatternsToTasks = convertPatternsToTasks; +function getPositivePatterns(patterns) { + return utils.pattern.getPositivePatterns(patterns); +} +exports.getPositivePatterns = getPositivePatterns; +function getNegativePatternsAsPositive(patterns, ignore) { + const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); + const positive = negative.map(utils.pattern.convertToPositivePattern); + return positive; +} +exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; +function groupPatternsByBaseDirectory(patterns) { + const group = {}; + return patterns.reduce((collection, pattern) => { + const base = utils.pattern.getBaseDirectory(pattern); + if (base in collection) { + collection[base].push(pattern); + } + else { + collection[base] = [pattern]; + } + return collection; + }, group); +} +exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; +function convertPatternGroupsToTasks(positive, negative, dynamic) { + return Object.keys(positive).map((base) => { + return convertPatternGroupToTask(base, positive[base], negative, dynamic); + }); +} +exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; +function convertPatternGroupToTask(base, positive, negative, dynamic) { + return { + dynamic, + positive, + negative, + base, + patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) + }; +} +exports.convertPatternGroupToTask = convertPatternGroupToTask; diff --git a/node_modules/fast-glob/out/providers/async.d.ts b/node_modules/fast-glob/out/providers/async.d.ts new file mode 100644 index 0000000..2742616 --- /dev/null +++ b/node_modules/fast-glob/out/providers/async.d.ts @@ -0,0 +1,9 @@ +import { Task } from '../managers/tasks'; +import { Entry, EntryItem, ReaderOptions } from '../types'; +import ReaderAsync from '../readers/async'; +import Provider from './provider'; +export default class ProviderAsync extends Provider> { + protected _reader: ReaderAsync; + read(task: Task): Promise; + api(root: string, task: Task, options: ReaderOptions): Promise; +} diff --git a/node_modules/fast-glob/out/providers/async.js b/node_modules/fast-glob/out/providers/async.js new file mode 100644 index 0000000..0c5286e --- /dev/null +++ b/node_modules/fast-glob/out/providers/async.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +const provider_1 = require("./provider"); +class ProviderAsync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new async_1.default(this._settings); + } + async read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = await this.api(root, task, options); + return entries.map((entry) => options.transform(entry)); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderAsync; diff --git a/node_modules/fast-glob/out/providers/filters/deep.d.ts b/node_modules/fast-glob/out/providers/filters/deep.d.ts new file mode 100644 index 0000000..377fab8 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/deep.d.ts @@ -0,0 +1,16 @@ +import { MicromatchOptions, EntryFilterFunction, Pattern } from '../../types'; +import Settings from '../../settings'; +export default class DeepFilter { + private readonly _settings; + private readonly _micromatchOptions; + constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); + getFilter(basePath: string, positive: Pattern[], negative: Pattern[]): EntryFilterFunction; + private _getMatcher; + private _getNegativePatternsRe; + private _filter; + private _isSkippedByDeep; + private _getEntryLevel; + private _isSkippedSymbolicLink; + private _isSkippedByPositivePatterns; + private _isSkippedByNegativePatterns; +} diff --git a/node_modules/fast-glob/out/providers/filters/deep.js b/node_modules/fast-glob/out/providers/filters/deep.js new file mode 100644 index 0000000..644bf41 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/deep.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +const partial_1 = require("../matchers/partial"); +class DeepFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + } + getFilter(basePath, positive, negative) { + const matcher = this._getMatcher(positive); + const negativeRe = this._getNegativePatternsRe(negative); + return (entry) => this._filter(basePath, entry, matcher, negativeRe); + } + _getMatcher(patterns) { + return new partial_1.default(patterns, this._settings, this._micromatchOptions); + } + _getNegativePatternsRe(patterns) { + const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); + return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); + } + _filter(basePath, entry, matcher, negativeRe) { + if (this._isSkippedByDeep(basePath, entry.path)) { + return false; + } + if (this._isSkippedSymbolicLink(entry)) { + return false; + } + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._isSkippedByPositivePatterns(filepath, matcher)) { + return false; + } + return this._isSkippedByNegativePatterns(filepath, negativeRe); + } + _isSkippedByDeep(basePath, entryPath) { + /** + * Avoid unnecessary depth calculations when it doesn't matter. + */ + if (this._settings.deep === Infinity) { + return false; + } + return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; + } + _getEntryLevel(basePath, entryPath) { + const entryPathDepth = entryPath.split('/').length; + if (basePath === '') { + return entryPathDepth; + } + const basePathDepth = basePath.split('/').length; + return entryPathDepth - basePathDepth; + } + _isSkippedSymbolicLink(entry) { + return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); + } + _isSkippedByPositivePatterns(entryPath, matcher) { + return !this._settings.baseNameMatch && !matcher.match(entryPath); + } + _isSkippedByNegativePatterns(entryPath, patternsRe) { + return !utils.pattern.matchAny(entryPath, patternsRe); + } +} +exports.default = DeepFilter; diff --git a/node_modules/fast-glob/out/providers/filters/entry.d.ts b/node_modules/fast-glob/out/providers/filters/entry.d.ts new file mode 100644 index 0000000..ee71281 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/entry.d.ts @@ -0,0 +1,16 @@ +import Settings from '../../settings'; +import { EntryFilterFunction, MicromatchOptions, Pattern } from '../../types'; +export default class EntryFilter { + private readonly _settings; + private readonly _micromatchOptions; + readonly index: Map; + constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); + getFilter(positive: Pattern[], negative: Pattern[]): EntryFilterFunction; + private _filter; + private _isDuplicateEntry; + private _createIndexRecord; + private _onlyFileFilter; + private _onlyDirectoryFilter; + private _isSkippedByAbsoluteNegativePatterns; + private _isMatchToPatterns; +} diff --git a/node_modules/fast-glob/out/providers/filters/entry.js b/node_modules/fast-glob/out/providers/filters/entry.js new file mode 100644 index 0000000..e04e8b3 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/entry.js @@ -0,0 +1,64 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class EntryFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this.index = new Map(); + } + getFilter(positive, negative) { + const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions); + const negativeRe = utils.pattern.convertPatternsToRe(negative, this._micromatchOptions); + return (entry) => this._filter(entry, positiveRe, negativeRe); + } + _filter(entry, positiveRe, negativeRe) { + if (this._settings.unique && this._isDuplicateEntry(entry)) { + return false; + } + if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { + return false; + } + if (this._isSkippedByAbsoluteNegativePatterns(entry.path, negativeRe)) { + return false; + } + const filepath = this._settings.baseNameMatch ? entry.name : entry.path; + const isDirectory = entry.dirent.isDirectory(); + const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(entry.path, negativeRe, isDirectory); + if (this._settings.unique && isMatched) { + this._createIndexRecord(entry); + } + return isMatched; + } + _isDuplicateEntry(entry) { + return this.index.has(entry.path); + } + _createIndexRecord(entry) { + this.index.set(entry.path, undefined); + } + _onlyFileFilter(entry) { + return this._settings.onlyFiles && !entry.dirent.isFile(); + } + _onlyDirectoryFilter(entry) { + return this._settings.onlyDirectories && !entry.dirent.isDirectory(); + } + _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { + if (!this._settings.absolute) { + return false; + } + const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath); + return utils.pattern.matchAny(fullpath, patternsRe); + } + _isMatchToPatterns(entryPath, patternsRe, isDirectory) { + const filepath = utils.path.removeLeadingDotSegment(entryPath); + // Trying to match files and directories by patterns. + const isMatched = utils.pattern.matchAny(filepath, patternsRe); + // A pattern with a trailling slash can be used for directory matching. + // To apply such pattern, we need to add a tralling slash to the path. + if (!isMatched && isDirectory) { + return utils.pattern.matchAny(filepath + '/', patternsRe); + } + return isMatched; + } +} +exports.default = EntryFilter; diff --git a/node_modules/fast-glob/out/providers/filters/error.d.ts b/node_modules/fast-glob/out/providers/filters/error.d.ts new file mode 100644 index 0000000..170eb25 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/error.d.ts @@ -0,0 +1,8 @@ +import Settings from '../../settings'; +import { ErrorFilterFunction } from '../../types'; +export default class ErrorFilter { + private readonly _settings; + constructor(_settings: Settings); + getFilter(): ErrorFilterFunction; + private _isNonFatalError; +} diff --git a/node_modules/fast-glob/out/providers/filters/error.js b/node_modules/fast-glob/out/providers/filters/error.js new file mode 100644 index 0000000..1c6f241 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/error.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class ErrorFilter { + constructor(_settings) { + this._settings = _settings; + } + getFilter() { + return (error) => this._isNonFatalError(error); + } + _isNonFatalError(error) { + return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; + } +} +exports.default = ErrorFilter; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.d.ts b/node_modules/fast-glob/out/providers/matchers/matcher.d.ts new file mode 100644 index 0000000..f3773d4 --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/matcher.d.ts @@ -0,0 +1,33 @@ +import { Pattern, MicromatchOptions, PatternRe } from '../../types'; +import Settings from '../../settings'; +export declare type PatternSegment = StaticPatternSegment | DynamicPatternSegment; +declare type StaticPatternSegment = { + dynamic: false; + pattern: Pattern; +}; +declare type DynamicPatternSegment = { + dynamic: true; + pattern: Pattern; + patternRe: PatternRe; +}; +export declare type PatternSection = PatternSegment[]; +export declare type PatternInfo = { + /** + * Indicates that the pattern has a globstar (more than a single section). + */ + complete: boolean; + pattern: Pattern; + segments: PatternSegment[]; + sections: PatternSection[]; +}; +export default abstract class Matcher { + private readonly _patterns; + private readonly _settings; + private readonly _micromatchOptions; + protected readonly _storage: PatternInfo[]; + constructor(_patterns: Pattern[], _settings: Settings, _micromatchOptions: MicromatchOptions); + private _fillStorage; + private _getPatternSegments; + private _splitSegmentsIntoSections; +} +export {}; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.js b/node_modules/fast-glob/out/providers/matchers/matcher.js new file mode 100644 index 0000000..752d2c2 --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/matcher.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class Matcher { + constructor(_patterns, _settings, _micromatchOptions) { + this._patterns = _patterns; + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this._storage = []; + this._fillStorage(); + } + _fillStorage() { + /** + * The original pattern may include `{,*,**,a/*}`, which will lead to problems with matching (unresolved level). + * So, before expand patterns with brace expansion into separated patterns. + */ + const patterns = utils.pattern.expandPatternsWithBraceExpansion(this._patterns); + for (const pattern of patterns) { + const segments = this._getPatternSegments(pattern); + const sections = this._splitSegmentsIntoSections(segments); + this._storage.push({ + complete: sections.length <= 1, + pattern, + segments, + sections + }); + } + } + _getPatternSegments(pattern) { + const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); + return parts.map((part) => { + const dynamic = utils.pattern.isDynamicPattern(part, this._settings); + if (!dynamic) { + return { + dynamic: false, + pattern: part + }; + } + return { + dynamic: true, + pattern: part, + patternRe: utils.pattern.makeRe(part, this._micromatchOptions) + }; + }); + } + _splitSegmentsIntoSections(segments) { + return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); + } +} +exports.default = Matcher; diff --git a/node_modules/fast-glob/out/providers/matchers/partial.d.ts b/node_modules/fast-glob/out/providers/matchers/partial.d.ts new file mode 100644 index 0000000..91520f6 --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/partial.d.ts @@ -0,0 +1,4 @@ +import Matcher from './matcher'; +export default class PartialMatcher extends Matcher { + match(filepath: string): boolean; +} diff --git a/node_modules/fast-glob/out/providers/matchers/partial.js b/node_modules/fast-glob/out/providers/matchers/partial.js new file mode 100644 index 0000000..1dfffeb --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/partial.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const matcher_1 = require("./matcher"); +class PartialMatcher extends matcher_1.default { + match(filepath) { + const parts = filepath.split('/'); + const levels = parts.length; + const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); + for (const pattern of patterns) { + const section = pattern.sections[0]; + /** + * In this case, the pattern has a globstar and we must read all directories unconditionally, + * but only if the level has reached the end of the first group. + * + * fixtures/{a,b}/** + * ^ true/false ^ always true + */ + if (!pattern.complete && levels > section.length) { + return true; + } + const match = parts.every((part, index) => { + const segment = pattern.segments[index]; + if (segment.dynamic && segment.patternRe.test(part)) { + return true; + } + if (!segment.dynamic && segment.pattern === part) { + return true; + } + return false; + }); + if (match) { + return true; + } + } + return false; + } +} +exports.default = PartialMatcher; diff --git a/node_modules/fast-glob/out/providers/provider.d.ts b/node_modules/fast-glob/out/providers/provider.d.ts new file mode 100644 index 0000000..1053460 --- /dev/null +++ b/node_modules/fast-glob/out/providers/provider.d.ts @@ -0,0 +1,19 @@ +import { Task } from '../managers/tasks'; +import Settings from '../settings'; +import { MicromatchOptions, ReaderOptions } from '../types'; +import DeepFilter from './filters/deep'; +import EntryFilter from './filters/entry'; +import ErrorFilter from './filters/error'; +import EntryTransformer from './transformers/entry'; +export default abstract class Provider { + protected readonly _settings: Settings; + readonly errorFilter: ErrorFilter; + readonly entryFilter: EntryFilter; + readonly deepFilter: DeepFilter; + readonly entryTransformer: EntryTransformer; + constructor(_settings: Settings); + abstract read(_task: Task): T; + protected _getRootDirectory(task: Task): string; + protected _getReaderOptions(task: Task): ReaderOptions; + protected _getMicromatchOptions(): MicromatchOptions; +} diff --git a/node_modules/fast-glob/out/providers/provider.js b/node_modules/fast-glob/out/providers/provider.js new file mode 100644 index 0000000..da88ee0 --- /dev/null +++ b/node_modules/fast-glob/out/providers/provider.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const deep_1 = require("./filters/deep"); +const entry_1 = require("./filters/entry"); +const error_1 = require("./filters/error"); +const entry_2 = require("./transformers/entry"); +class Provider { + constructor(_settings) { + this._settings = _settings; + this.errorFilter = new error_1.default(this._settings); + this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); + this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); + this.entryTransformer = new entry_2.default(this._settings); + } + _getRootDirectory(task) { + return path.resolve(this._settings.cwd, task.base); + } + _getReaderOptions(task) { + const basePath = task.base === '.' ? '' : task.base; + return { + basePath, + pathSegmentSeparator: '/', + concurrency: this._settings.concurrency, + deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), + entryFilter: this.entryFilter.getFilter(task.positive, task.negative), + errorFilter: this.errorFilter.getFilter(), + followSymbolicLinks: this._settings.followSymbolicLinks, + fs: this._settings.fs, + stats: this._settings.stats, + throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, + transform: this.entryTransformer.getTransformer() + }; + } + _getMicromatchOptions() { + return { + dot: this._settings.dot, + matchBase: this._settings.baseNameMatch, + nobrace: !this._settings.braceExpansion, + nocase: !this._settings.caseSensitiveMatch, + noext: !this._settings.extglob, + noglobstar: !this._settings.globstar, + posix: true, + strictSlashes: false + }; + } +} +exports.default = Provider; diff --git a/node_modules/fast-glob/out/providers/stream.d.ts b/node_modules/fast-glob/out/providers/stream.d.ts new file mode 100644 index 0000000..3d02a1f --- /dev/null +++ b/node_modules/fast-glob/out/providers/stream.d.ts @@ -0,0 +1,11 @@ +/// +import { Readable } from 'stream'; +import { Task } from '../managers/tasks'; +import ReaderStream from '../readers/stream'; +import { ReaderOptions } from '../types'; +import Provider from './provider'; +export default class ProviderStream extends Provider { + protected _reader: ReaderStream; + read(task: Task): Readable; + api(root: string, task: Task, options: ReaderOptions): Readable; +} diff --git a/node_modules/fast-glob/out/providers/stream.js b/node_modules/fast-glob/out/providers/stream.js new file mode 100644 index 0000000..85da62e --- /dev/null +++ b/node_modules/fast-glob/out/providers/stream.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const stream_2 = require("../readers/stream"); +const provider_1 = require("./provider"); +class ProviderStream extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new stream_2.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const source = this.api(root, task, options); + const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); + source + .once('error', (error) => destination.emit('error', error)) + .on('data', (entry) => destination.emit('data', options.transform(entry))) + .once('end', () => destination.emit('end')); + destination + .once('close', () => source.destroy()); + return destination; + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderStream; diff --git a/node_modules/fast-glob/out/providers/sync.d.ts b/node_modules/fast-glob/out/providers/sync.d.ts new file mode 100644 index 0000000..9c0fe1e --- /dev/null +++ b/node_modules/fast-glob/out/providers/sync.d.ts @@ -0,0 +1,9 @@ +import { Task } from '../managers/tasks'; +import ReaderSync from '../readers/sync'; +import { Entry, EntryItem, ReaderOptions } from '../types'; +import Provider from './provider'; +export default class ProviderSync extends Provider { + protected _reader: ReaderSync; + read(task: Task): EntryItem[]; + api(root: string, task: Task, options: ReaderOptions): Entry[]; +} diff --git a/node_modules/fast-glob/out/providers/sync.js b/node_modules/fast-glob/out/providers/sync.js new file mode 100644 index 0000000..d70aa1b --- /dev/null +++ b/node_modules/fast-glob/out/providers/sync.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +const provider_1 = require("./provider"); +class ProviderSync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new sync_1.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = this.api(root, task, options); + return entries.map(options.transform); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderSync; diff --git a/node_modules/fast-glob/out/providers/transformers/entry.d.ts b/node_modules/fast-glob/out/providers/transformers/entry.d.ts new file mode 100644 index 0000000..e9b85fa --- /dev/null +++ b/node_modules/fast-glob/out/providers/transformers/entry.d.ts @@ -0,0 +1,8 @@ +import Settings from '../../settings'; +import { EntryTransformerFunction } from '../../types'; +export default class EntryTransformer { + private readonly _settings; + constructor(_settings: Settings); + getTransformer(): EntryTransformerFunction; + private _transform; +} diff --git a/node_modules/fast-glob/out/providers/transformers/entry.js b/node_modules/fast-glob/out/providers/transformers/entry.js new file mode 100644 index 0000000..d11903c --- /dev/null +++ b/node_modules/fast-glob/out/providers/transformers/entry.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class EntryTransformer { + constructor(_settings) { + this._settings = _settings; + } + getTransformer() { + return (entry) => this._transform(entry); + } + _transform(entry) { + let filepath = entry.path; + if (this._settings.absolute) { + filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); + filepath = utils.path.unixify(filepath); + } + if (this._settings.markDirectories && entry.dirent.isDirectory()) { + filepath += '/'; + } + if (!this._settings.objectMode) { + return filepath; + } + return Object.assign(Object.assign({}, entry), { path: filepath }); + } +} +exports.default = EntryTransformer; diff --git a/node_modules/fast-glob/out/readers/async.d.ts b/node_modules/fast-glob/out/readers/async.d.ts new file mode 100644 index 0000000..fbca428 --- /dev/null +++ b/node_modules/fast-glob/out/readers/async.d.ts @@ -0,0 +1,10 @@ +import * as fsWalk from '@nodelib/fs.walk'; +import { Entry, ReaderOptions, Pattern } from '../types'; +import Reader from './reader'; +import ReaderStream from './stream'; +export default class ReaderAsync extends Reader> { + protected _walkAsync: typeof fsWalk.walk; + protected _readerStream: ReaderStream; + dynamic(root: string, options: ReaderOptions): Promise; + static(patterns: Pattern[], options: ReaderOptions): Promise; +} diff --git a/node_modules/fast-glob/out/readers/async.js b/node_modules/fast-glob/out/readers/async.js new file mode 100644 index 0000000..d024145 --- /dev/null +++ b/node_modules/fast-glob/out/readers/async.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +const stream_1 = require("./stream"); +class ReaderAsync extends reader_1.default { + constructor() { + super(...arguments); + this._walkAsync = fsWalk.walk; + this._readerStream = new stream_1.default(this._settings); + } + dynamic(root, options) { + return new Promise((resolve, reject) => { + this._walkAsync(root, options, (error, entries) => { + if (error === null) { + resolve(entries); + } + else { + reject(error); + } + }); + }); + } + async static(patterns, options) { + const entries = []; + const stream = this._readerStream.static(patterns, options); + // After #235, replace it with an asynchronous iterator. + return new Promise((resolve, reject) => { + stream.once('error', reject); + stream.on('data', (entry) => entries.push(entry)); + stream.once('end', () => resolve(entries)); + }); + } +} +exports.default = ReaderAsync; diff --git a/node_modules/fast-glob/out/readers/reader.d.ts b/node_modules/fast-glob/out/readers/reader.d.ts new file mode 100644 index 0000000..2af16b6 --- /dev/null +++ b/node_modules/fast-glob/out/readers/reader.d.ts @@ -0,0 +1,15 @@ +/// +import * as fs from 'fs'; +import * as fsStat from '@nodelib/fs.stat'; +import Settings from '../settings'; +import { Entry, ErrnoException, Pattern, ReaderOptions } from '../types'; +export default abstract class Reader { + protected readonly _settings: Settings; + protected readonly _fsStatSettings: fsStat.Settings; + constructor(_settings: Settings); + abstract dynamic(root: string, options: ReaderOptions): T; + abstract static(patterns: Pattern[], options: ReaderOptions): T; + protected _getFullEntryPath(filepath: string): string; + protected _makeEntry(stats: fs.Stats, pattern: Pattern): Entry; + protected _isFatalError(error: ErrnoException): boolean; +} diff --git a/node_modules/fast-glob/out/readers/reader.js b/node_modules/fast-glob/out/readers/reader.js new file mode 100644 index 0000000..7b40255 --- /dev/null +++ b/node_modules/fast-glob/out/readers/reader.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const utils = require("../utils"); +class Reader { + constructor(_settings) { + this._settings = _settings; + this._fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this._settings.followSymbolicLinks, + fs: this._settings.fs, + throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks + }); + } + _getFullEntryPath(filepath) { + return path.resolve(this._settings.cwd, filepath); + } + _makeEntry(stats, pattern) { + const entry = { + name: pattern, + path: pattern, + dirent: utils.fs.createDirentFromStats(pattern, stats) + }; + if (this._settings.stats) { + entry.stats = stats; + } + return entry; + } + _isFatalError(error) { + return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; + } +} +exports.default = Reader; diff --git a/node_modules/fast-glob/out/readers/stream.d.ts b/node_modules/fast-glob/out/readers/stream.d.ts new file mode 100644 index 0000000..1c74cac --- /dev/null +++ b/node_modules/fast-glob/out/readers/stream.d.ts @@ -0,0 +1,14 @@ +/// +import { Readable } from 'stream'; +import * as fsStat from '@nodelib/fs.stat'; +import * as fsWalk from '@nodelib/fs.walk'; +import { Pattern, ReaderOptions } from '../types'; +import Reader from './reader'; +export default class ReaderStream extends Reader { + protected _walkStream: typeof fsWalk.walkStream; + protected _stat: typeof fsStat.stat; + dynamic(root: string, options: ReaderOptions): Readable; + static(patterns: Pattern[], options: ReaderOptions): Readable; + private _getEntry; + private _getStat; +} diff --git a/node_modules/fast-glob/out/readers/stream.js b/node_modules/fast-glob/out/readers/stream.js new file mode 100644 index 0000000..317c6d5 --- /dev/null +++ b/node_modules/fast-glob/out/readers/stream.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const fsStat = require("@nodelib/fs.stat"); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +class ReaderStream extends reader_1.default { + constructor() { + super(...arguments); + this._walkStream = fsWalk.walkStream; + this._stat = fsStat.stat; + } + dynamic(root, options) { + return this._walkStream(root, options); + } + static(patterns, options) { + const filepaths = patterns.map(this._getFullEntryPath, this); + const stream = new stream_1.PassThrough({ objectMode: true }); + stream._write = (index, _enc, done) => { + return this._getEntry(filepaths[index], patterns[index], options) + .then((entry) => { + if (entry !== null && options.entryFilter(entry)) { + stream.push(entry); + } + if (index === filepaths.length - 1) { + stream.end(); + } + done(); + }) + .catch(done); + }; + for (let i = 0; i < filepaths.length; i++) { + stream.write(i); + } + return stream; + } + _getEntry(filepath, pattern, options) { + return this._getStat(filepath) + .then((stats) => this._makeEntry(stats, pattern)) + .catch((error) => { + if (options.errorFilter(error)) { + return null; + } + throw error; + }); + } + _getStat(filepath) { + return new Promise((resolve, reject) => { + this._stat(filepath, this._fsStatSettings, (error, stats) => { + return error === null ? resolve(stats) : reject(error); + }); + }); + } +} +exports.default = ReaderStream; diff --git a/node_modules/fast-glob/out/readers/sync.d.ts b/node_modules/fast-glob/out/readers/sync.d.ts new file mode 100644 index 0000000..c96ffee --- /dev/null +++ b/node_modules/fast-glob/out/readers/sync.d.ts @@ -0,0 +1,12 @@ +import * as fsStat from '@nodelib/fs.stat'; +import * as fsWalk from '@nodelib/fs.walk'; +import { Entry, Pattern, ReaderOptions } from '../types'; +import Reader from './reader'; +export default class ReaderSync extends Reader { + protected _walkSync: typeof fsWalk.walkSync; + protected _statSync: typeof fsStat.statSync; + dynamic(root: string, options: ReaderOptions): Entry[]; + static(patterns: Pattern[], options: ReaderOptions): Entry[]; + private _getEntry; + private _getStat; +} diff --git a/node_modules/fast-glob/out/readers/sync.js b/node_modules/fast-glob/out/readers/sync.js new file mode 100644 index 0000000..4704d65 --- /dev/null +++ b/node_modules/fast-glob/out/readers/sync.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsStat = require("@nodelib/fs.stat"); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +class ReaderSync extends reader_1.default { + constructor() { + super(...arguments); + this._walkSync = fsWalk.walkSync; + this._statSync = fsStat.statSync; + } + dynamic(root, options) { + return this._walkSync(root, options); + } + static(patterns, options) { + const entries = []; + for (const pattern of patterns) { + const filepath = this._getFullEntryPath(pattern); + const entry = this._getEntry(filepath, pattern, options); + if (entry === null || !options.entryFilter(entry)) { + continue; + } + entries.push(entry); + } + return entries; + } + _getEntry(filepath, pattern, options) { + try { + const stats = this._getStat(filepath); + return this._makeEntry(stats, pattern); + } + catch (error) { + if (options.errorFilter(error)) { + return null; + } + throw error; + } + } + _getStat(filepath) { + return this._statSync(filepath, this._fsStatSettings); + } +} +exports.default = ReaderSync; diff --git a/node_modules/fast-glob/out/settings.d.ts b/node_modules/fast-glob/out/settings.d.ts new file mode 100644 index 0000000..1984854 --- /dev/null +++ b/node_modules/fast-glob/out/settings.d.ts @@ -0,0 +1,164 @@ +import { FileSystemAdapter, Pattern } from './types'; +export declare const DEFAULT_FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare type Options = { + /** + * Return the absolute path for entries. + * + * @default false + */ + absolute?: boolean; + /** + * If set to `true`, then patterns without slashes will be matched against + * the basename of the path if it contains slashes. + * + * @default false + */ + baseNameMatch?: boolean; + /** + * Enables Bash-like brace expansion. + * + * @default true + */ + braceExpansion?: boolean; + /** + * Enables a case-sensitive mode for matching files. + * + * @default true + */ + caseSensitiveMatch?: boolean; + /** + * Specifies the maximum number of concurrent requests from a reader to read + * directories. + * + * @default os.cpus().length + */ + concurrency?: number; + /** + * The current working directory in which to search. + * + * @default process.cwd() + */ + cwd?: string; + /** + * Specifies the maximum depth of a read directory relative to the start + * directory. + * + * @default Infinity + */ + deep?: number; + /** + * Allow patterns to match entries that begin with a period (`.`). + * + * @default false + */ + dot?: boolean; + /** + * Enables Bash-like `extglob` functionality. + * + * @default true + */ + extglob?: boolean; + /** + * Indicates whether to traverse descendants of symbolic link directories. + * + * @default true + */ + followSymbolicLinks?: boolean; + /** + * Custom implementation of methods for working with the file system. + * + * @default fs.* + */ + fs?: Partial; + /** + * Enables recursively repeats a pattern containing `**`. + * If `false`, `**` behaves exactly like `*`. + * + * @default true + */ + globstar?: boolean; + /** + * An array of glob patterns to exclude matches. + * This is an alternative way to use negative patterns. + * + * @default [] + */ + ignore?: Pattern[]; + /** + * Mark the directory path with the final slash. + * + * @default false + */ + markDirectories?: boolean; + /** + * Returns objects (instead of strings) describing entries. + * + * @default false + */ + objectMode?: boolean; + /** + * Return only directories. + * + * @default false + */ + onlyDirectories?: boolean; + /** + * Return only files. + * + * @default true + */ + onlyFiles?: boolean; + /** + * Enables an object mode (`objectMode`) with an additional `stats` field. + * + * @default false + */ + stats?: boolean; + /** + * By default this package suppress only `ENOENT` errors. + * Set to `true` to suppress any error. + * + * @default false + */ + suppressErrors?: boolean; + /** + * Throw an error when symbolic link is broken if `true` or safely + * return `lstat` call if `false`. + * + * @default false + */ + throwErrorOnBrokenSymbolicLink?: boolean; + /** + * Ensures that the returned entries are unique. + * + * @default true + */ + unique?: boolean; +}; +export default class Settings { + private readonly _options; + readonly absolute: boolean; + readonly baseNameMatch: boolean; + readonly braceExpansion: boolean; + readonly caseSensitiveMatch: boolean; + readonly concurrency: number; + readonly cwd: string; + readonly deep: number; + readonly dot: boolean; + readonly extglob: boolean; + readonly followSymbolicLinks: boolean; + readonly fs: FileSystemAdapter; + readonly globstar: boolean; + readonly ignore: Pattern[]; + readonly markDirectories: boolean; + readonly objectMode: boolean; + readonly onlyDirectories: boolean; + readonly onlyFiles: boolean; + readonly stats: boolean; + readonly suppressErrors: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly unique: boolean; + constructor(_options?: Options); + private _getValue; + private _getFileSystemMethods; +} diff --git a/node_modules/fast-glob/out/settings.js b/node_modules/fast-glob/out/settings.js new file mode 100644 index 0000000..3752806 --- /dev/null +++ b/node_modules/fast-glob/out/settings.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +const os = require("os"); +/** + * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. + * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 + */ +const CPU_COUNT = Math.max(os.cpus().length, 1); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + lstatSync: fs.lstatSync, + stat: fs.stat, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +class Settings { + constructor(_options = {}) { + this._options = _options; + this.absolute = this._getValue(this._options.absolute, false); + this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); + this.braceExpansion = this._getValue(this._options.braceExpansion, true); + this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); + this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); + this.cwd = this._getValue(this._options.cwd, process.cwd()); + this.deep = this._getValue(this._options.deep, Infinity); + this.dot = this._getValue(this._options.dot, false); + this.extglob = this._getValue(this._options.extglob, true); + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); + this.fs = this._getFileSystemMethods(this._options.fs); + this.globstar = this._getValue(this._options.globstar, true); + this.ignore = this._getValue(this._options.ignore, []); + this.markDirectories = this._getValue(this._options.markDirectories, false); + this.objectMode = this._getValue(this._options.objectMode, false); + this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); + this.onlyFiles = this._getValue(this._options.onlyFiles, true); + this.stats = this._getValue(this._options.stats, false); + this.suppressErrors = this._getValue(this._options.suppressErrors, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); + this.unique = this._getValue(this._options.unique, true); + if (this.onlyDirectories) { + this.onlyFiles = false; + } + if (this.stats) { + this.objectMode = true; + } + } + _getValue(option, value) { + return option === undefined ? value : option; + } + _getFileSystemMethods(methods = {}) { + return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); + } +} +exports.default = Settings; diff --git a/node_modules/fast-glob/out/types/index.d.ts b/node_modules/fast-glob/out/types/index.d.ts new file mode 100644 index 0000000..9950399 --- /dev/null +++ b/node_modules/fast-glob/out/types/index.d.ts @@ -0,0 +1,31 @@ +/// +import * as fsWalk from '@nodelib/fs.walk'; +export declare type ErrnoException = NodeJS.ErrnoException; +export declare type Entry = fsWalk.Entry; +export declare type EntryItem = string | Entry; +export declare type Pattern = string; +export declare type PatternRe = RegExp; +export declare type PatternsGroup = Record; +export declare type ReaderOptions = fsWalk.Options & { + transform(entry: Entry): EntryItem; + deepFilter: DeepFilterFunction; + entryFilter: EntryFilterFunction; + errorFilter: ErrorFilterFunction; + fs: FileSystemAdapter; + stats: boolean; +}; +export declare type ErrorFilterFunction = fsWalk.ErrorFilterFunction; +export declare type EntryFilterFunction = fsWalk.EntryFilterFunction; +export declare type DeepFilterFunction = fsWalk.DeepFilterFunction; +export declare type EntryTransformerFunction = (entry: Entry) => EntryItem; +export declare type MicromatchOptions = { + dot?: boolean; + matchBase?: boolean; + nobrace?: boolean; + nocase?: boolean; + noext?: boolean; + noglobstar?: boolean; + posix?: boolean; + strictSlashes?: boolean; +}; +export declare type FileSystemAdapter = fsWalk.FileSystemAdapter; diff --git a/node_modules/fast-glob/out/types/index.js b/node_modules/fast-glob/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/fast-glob/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/fast-glob/out/utils/array.d.ts b/node_modules/fast-glob/out/utils/array.d.ts new file mode 100644 index 0000000..98e7325 --- /dev/null +++ b/node_modules/fast-glob/out/utils/array.d.ts @@ -0,0 +1,2 @@ +export declare function flatten(items: T[][]): T[]; +export declare function splitWhen(items: T[], predicate: (item: T) => boolean): T[][]; diff --git a/node_modules/fast-glob/out/utils/array.js b/node_modules/fast-glob/out/utils/array.js new file mode 100644 index 0000000..50c406e --- /dev/null +++ b/node_modules/fast-glob/out/utils/array.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitWhen = exports.flatten = void 0; +function flatten(items) { + return items.reduce((collection, item) => [].concat(collection, item), []); +} +exports.flatten = flatten; +function splitWhen(items, predicate) { + const result = [[]]; + let groupIndex = 0; + for (const item of items) { + if (predicate(item)) { + groupIndex++; + result[groupIndex] = []; + } + else { + result[groupIndex].push(item); + } + } + return result; +} +exports.splitWhen = splitWhen; diff --git a/node_modules/fast-glob/out/utils/errno.d.ts b/node_modules/fast-glob/out/utils/errno.d.ts new file mode 100644 index 0000000..1c08d3b --- /dev/null +++ b/node_modules/fast-glob/out/utils/errno.d.ts @@ -0,0 +1,2 @@ +import { ErrnoException } from '../types'; +export declare function isEnoentCodeError(error: ErrnoException): boolean; diff --git a/node_modules/fast-glob/out/utils/errno.js b/node_modules/fast-glob/out/utils/errno.js new file mode 100644 index 0000000..f0bd801 --- /dev/null +++ b/node_modules/fast-glob/out/utils/errno.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEnoentCodeError = void 0; +function isEnoentCodeError(error) { + return error.code === 'ENOENT'; +} +exports.isEnoentCodeError = isEnoentCodeError; diff --git a/node_modules/fast-glob/out/utils/fs.d.ts b/node_modules/fast-glob/out/utils/fs.d.ts new file mode 100644 index 0000000..64c61ce --- /dev/null +++ b/node_modules/fast-glob/out/utils/fs.d.ts @@ -0,0 +1,4 @@ +/// +import * as fs from 'fs'; +import { Dirent } from '@nodelib/fs.walk'; +export declare function createDirentFromStats(name: string, stats: fs.Stats): Dirent; diff --git a/node_modules/fast-glob/out/utils/fs.js b/node_modules/fast-glob/out/utils/fs.js new file mode 100644 index 0000000..ace7c74 --- /dev/null +++ b/node_modules/fast-glob/out/utils/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/fast-glob/out/utils/index.d.ts b/node_modules/fast-glob/out/utils/index.d.ts new file mode 100644 index 0000000..f634cad --- /dev/null +++ b/node_modules/fast-glob/out/utils/index.d.ts @@ -0,0 +1,8 @@ +import * as array from './array'; +import * as errno from './errno'; +import * as fs from './fs'; +import * as path from './path'; +import * as pattern from './pattern'; +import * as stream from './stream'; +import * as string from './string'; +export { array, errno, fs, path, pattern, stream, string }; diff --git a/node_modules/fast-glob/out/utils/index.js b/node_modules/fast-glob/out/utils/index.js new file mode 100644 index 0000000..0f92c16 --- /dev/null +++ b/node_modules/fast-glob/out/utils/index.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; +const array = require("./array"); +exports.array = array; +const errno = require("./errno"); +exports.errno = errno; +const fs = require("./fs"); +exports.fs = fs; +const path = require("./path"); +exports.path = path; +const pattern = require("./pattern"); +exports.pattern = pattern; +const stream = require("./stream"); +exports.stream = stream; +const string = require("./string"); +exports.string = string; diff --git a/node_modules/fast-glob/out/utils/path.d.ts b/node_modules/fast-glob/out/utils/path.d.ts new file mode 100644 index 0000000..9606d8b --- /dev/null +++ b/node_modules/fast-glob/out/utils/path.d.ts @@ -0,0 +1,8 @@ +import { Pattern } from '../types'; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +export declare function unixify(filepath: string): string; +export declare function makeAbsolute(cwd: string, filepath: string): string; +export declare function escape(pattern: Pattern): Pattern; +export declare function removeLeadingDotSegment(entry: string): string; diff --git a/node_modules/fast-glob/out/utils/path.js b/node_modules/fast-glob/out/utils/path.js new file mode 100644 index 0000000..2544032 --- /dev/null +++ b/node_modules/fast-glob/out/utils/path.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.removeLeadingDotSegment = exports.escape = exports.makeAbsolute = exports.unixify = void 0; +const path = require("path"); +const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ +const UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\())/g; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +function unixify(filepath) { + return filepath.replace(/\\/g, '/'); +} +exports.unixify = unixify; +function makeAbsolute(cwd, filepath) { + return path.resolve(cwd, filepath); +} +exports.makeAbsolute = makeAbsolute; +function escape(pattern) { + return pattern.replace(UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); +} +exports.escape = escape; +function removeLeadingDotSegment(entry) { + // We do not use `startsWith` because this is 10x slower than current implementation for some cases. + // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with + if (entry.charAt(0) === '.') { + const secondCharactery = entry.charAt(1); + if (secondCharactery === '/' || secondCharactery === '\\') { + return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); + } + } + return entry; +} +exports.removeLeadingDotSegment = removeLeadingDotSegment; diff --git a/node_modules/fast-glob/out/utils/pattern.d.ts b/node_modules/fast-glob/out/utils/pattern.d.ts new file mode 100644 index 0000000..dbf17b1 --- /dev/null +++ b/node_modules/fast-glob/out/utils/pattern.d.ts @@ -0,0 +1,42 @@ +import { MicromatchOptions, Pattern, PatternRe } from '../types'; +declare type PatternTypeOptions = { + braceExpansion?: boolean; + caseSensitiveMatch?: boolean; + extglob?: boolean; +}; +export declare function isStaticPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; +export declare function isDynamicPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; +export declare function convertToPositivePattern(pattern: Pattern): Pattern; +export declare function convertToNegativePattern(pattern: Pattern): Pattern; +export declare function isNegativePattern(pattern: Pattern): boolean; +export declare function isPositivePattern(pattern: Pattern): boolean; +export declare function getNegativePatterns(patterns: Pattern[]): Pattern[]; +export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +export declare function getPatternsInsideCurrentDirectory(patterns: Pattern[]): Pattern[]; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +export declare function getPatternsOutsideCurrentDirectory(patterns: Pattern[]): Pattern[]; +export declare function isPatternRelatedToParentDirectory(pattern: Pattern): boolean; +export declare function getBaseDirectory(pattern: Pattern): string; +export declare function hasGlobStar(pattern: Pattern): boolean; +export declare function endsWithSlashGlobStar(pattern: Pattern): boolean; +export declare function isAffectDepthOfReadingPattern(pattern: Pattern): boolean; +export declare function expandPatternsWithBraceExpansion(patterns: Pattern[]): Pattern[]; +export declare function expandBraceExpansion(pattern: Pattern): Pattern[]; +export declare function getPatternParts(pattern: Pattern, options: MicromatchOptions): Pattern[]; +export declare function makeRe(pattern: Pattern, options: MicromatchOptions): PatternRe; +export declare function convertPatternsToRe(patterns: Pattern[], options: MicromatchOptions): PatternRe[]; +export declare function matchAny(entry: string, patternsRe: PatternRe[]): boolean; +export {}; diff --git a/node_modules/fast-glob/out/utils/pattern.js b/node_modules/fast-glob/out/utils/pattern.js new file mode 100644 index 0000000..9c97e16 --- /dev/null +++ b/node_modules/fast-glob/out/utils/pattern.js @@ -0,0 +1,169 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; +const path = require("path"); +const globParent = require("glob-parent"); +const micromatch = require("micromatch"); +const GLOBSTAR = '**'; +const ESCAPE_SYMBOL = '\\'; +const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; +const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; +const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; +const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; +const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; +function isStaticPattern(pattern, options = {}) { + return !isDynamicPattern(pattern, options); +} +exports.isStaticPattern = isStaticPattern; +function isDynamicPattern(pattern, options = {}) { + /** + * A special case with an empty string is necessary for matching patterns that start with a forward slash. + * An empty string cannot be a dynamic pattern. + * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. + */ + if (pattern === '') { + return false; + } + /** + * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check + * filepath directly (without read directory). + */ + if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { + return true; + } + if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { + return true; + } + return false; +} +exports.isDynamicPattern = isDynamicPattern; +function hasBraceExpansion(pattern) { + const openingBraceIndex = pattern.indexOf('{'); + if (openingBraceIndex === -1) { + return false; + } + const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); + if (closingBraceIndex === -1) { + return false; + } + const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); + return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); +} +function convertToPositivePattern(pattern) { + return isNegativePattern(pattern) ? pattern.slice(1) : pattern; +} +exports.convertToPositivePattern = convertToPositivePattern; +function convertToNegativePattern(pattern) { + return '!' + pattern; +} +exports.convertToNegativePattern = convertToNegativePattern; +function isNegativePattern(pattern) { + return pattern.startsWith('!') && pattern[1] !== '('; +} +exports.isNegativePattern = isNegativePattern; +function isPositivePattern(pattern) { + return !isNegativePattern(pattern); +} +exports.isPositivePattern = isPositivePattern; +function getNegativePatterns(patterns) { + return patterns.filter(isNegativePattern); +} +exports.getNegativePatterns = getNegativePatterns; +function getPositivePatterns(patterns) { + return patterns.filter(isPositivePattern); +} +exports.getPositivePatterns = getPositivePatterns; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsInsideCurrentDirectory(patterns) { + return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); +} +exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsOutsideCurrentDirectory(patterns) { + return patterns.filter(isPatternRelatedToParentDirectory); +} +exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; +function isPatternRelatedToParentDirectory(pattern) { + return pattern.startsWith('..') || pattern.startsWith('./..'); +} +exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; +function getBaseDirectory(pattern) { + return globParent(pattern, { flipBackslashes: false }); +} +exports.getBaseDirectory = getBaseDirectory; +function hasGlobStar(pattern) { + return pattern.includes(GLOBSTAR); +} +exports.hasGlobStar = hasGlobStar; +function endsWithSlashGlobStar(pattern) { + return pattern.endsWith('/' + GLOBSTAR); +} +exports.endsWithSlashGlobStar = endsWithSlashGlobStar; +function isAffectDepthOfReadingPattern(pattern) { + const basename = path.basename(pattern); + return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); +} +exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; +function expandPatternsWithBraceExpansion(patterns) { + return patterns.reduce((collection, pattern) => { + return collection.concat(expandBraceExpansion(pattern)); + }, []); +} +exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; +function expandBraceExpansion(pattern) { + return micromatch.braces(pattern, { + expand: true, + nodupes: true + }); +} +exports.expandBraceExpansion = expandBraceExpansion; +function getPatternParts(pattern, options) { + let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); + /** + * The scan method returns an empty array in some cases. + * See micromatch/picomatch#58 for more details. + */ + if (parts.length === 0) { + parts = [pattern]; + } + /** + * The scan method does not return an empty part for the pattern with a forward slash. + * This is another part of micromatch/picomatch#58. + */ + if (parts[0].startsWith('/')) { + parts[0] = parts[0].slice(1); + parts.unshift(''); + } + return parts; +} +exports.getPatternParts = getPatternParts; +function makeRe(pattern, options) { + return micromatch.makeRe(pattern, options); +} +exports.makeRe = makeRe; +function convertPatternsToRe(patterns, options) { + return patterns.map((pattern) => makeRe(pattern, options)); +} +exports.convertPatternsToRe = convertPatternsToRe; +function matchAny(entry, patternsRe) { + return patternsRe.some((patternRe) => patternRe.test(entry)); +} +exports.matchAny = matchAny; diff --git a/node_modules/fast-glob/out/utils/stream.d.ts b/node_modules/fast-glob/out/utils/stream.d.ts new file mode 100644 index 0000000..aafacef --- /dev/null +++ b/node_modules/fast-glob/out/utils/stream.d.ts @@ -0,0 +1,3 @@ +/// +import { Readable } from 'stream'; +export declare function merge(streams: Readable[]): NodeJS.ReadableStream; diff --git a/node_modules/fast-glob/out/utils/stream.js b/node_modules/fast-glob/out/utils/stream.js new file mode 100644 index 0000000..b32028c --- /dev/null +++ b/node_modules/fast-glob/out/utils/stream.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.merge = void 0; +const merge2 = require("merge2"); +function merge(streams) { + const mergedStream = merge2(streams); + streams.forEach((stream) => { + stream.once('error', (error) => mergedStream.emit('error', error)); + }); + mergedStream.once('close', () => propagateCloseEventToSources(streams)); + mergedStream.once('end', () => propagateCloseEventToSources(streams)); + return mergedStream; +} +exports.merge = merge; +function propagateCloseEventToSources(streams) { + streams.forEach((stream) => stream.emit('close')); +} diff --git a/node_modules/fast-glob/out/utils/string.d.ts b/node_modules/fast-glob/out/utils/string.d.ts new file mode 100644 index 0000000..c884735 --- /dev/null +++ b/node_modules/fast-glob/out/utils/string.d.ts @@ -0,0 +1,2 @@ +export declare function isString(input: unknown): input is string; +export declare function isEmpty(input: string): boolean; diff --git a/node_modules/fast-glob/out/utils/string.js b/node_modules/fast-glob/out/utils/string.js new file mode 100644 index 0000000..76e7ea5 --- /dev/null +++ b/node_modules/fast-glob/out/utils/string.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmpty = exports.isString = void 0; +function isString(input) { + return typeof input === 'string'; +} +exports.isString = isString; +function isEmpty(input) { + return input === ''; +} +exports.isEmpty = isEmpty; diff --git a/node_modules/fast-glob/package.json b/node_modules/fast-glob/package.json new file mode 100644 index 0000000..d74e403 --- /dev/null +++ b/node_modules/fast-glob/package.json @@ -0,0 +1,94 @@ +{ + "name": "fast-glob", + "version": "3.2.12", + "description": "It's a very fast and efficient glob library for Node.js", + "license": "MIT", + "repository": "mrmlnc/fast-glob", + "author": { + "name": "Denis Malinochkin", + "url": "https://mrmlnc.com" + }, + "engines": { + "node": ">=8.6.0" + }, + "main": "out/index.js", + "typings": "out/index.d.ts", + "files": [ + "out", + "!out/{benchmark,tests}", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "keywords": [ + "glob", + "patterns", + "fast", + "implementation" + ], + "devDependencies": { + "@nodelib/fs.macchiato": "^1.0.1", + "@types/compute-stdev": "^1.0.0", + "@types/easy-table": "^0.0.32", + "@types/glob": "^7.1.1", + "@types/glob-parent": "^5.1.0", + "@types/is-ci": "^2.0.0", + "@types/merge2": "^1.1.4", + "@types/micromatch": "^4.0.0", + "@types/minimist": "^1.2.0", + "@types/mocha": "^5.2.7", + "@types/node": "^12.7.8", + "@types/rimraf": "^2.0.2", + "@types/sinon": "^7.5.0", + "compute-stdev": "^1.0.0", + "easy-table": "^1.1.1", + "eslint": "^6.5.1", + "eslint-config-mrmlnc": "^1.1.0", + "execa": "^2.0.4", + "fast-glob": "^3.0.4", + "fdir": "^5.1.0", + "glob": "^7.1.4", + "is-ci": "^2.0.0", + "log-update": "^4.0.0", + "minimist": "^1.2.0", + "mocha": "^6.2.1", + "rimraf": "^3.0.0", + "sinon": "^7.5.0", + "tiny-glob": "^0.2.6", + "typescript": "^3.6.3" + }, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "scripts": { + "clean": "rimraf out", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "smoke": "mocha \"out/**/*.smoke.js\" -s 0", + "smoke:sync": "mocha \"out/**/*.smoke.js\" -s 0 --grep \"\\(sync\\)\"", + "smoke:async": "mocha \"out/**/*.smoke.js\" -s 0 --grep \"\\(async\\)\"", + "smoke:stream": "mocha \"out/**/*.smoke.js\" -s 0 --grep \"\\(stream\\)\"", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile -- --sourceMap --watch", + "bench": "npm run bench-async && npm run bench-stream && npm run bench-sync", + "bench-async": "npm run bench-async-flatten && npm run bench-async-deep && npm run bench-async-partial-flatten && npm run bench-async-partial-deep", + "bench-stream": "npm run bench-stream-flatten && npm run bench-stream-deep && npm run bench-stream-partial-flatten && npm run bench-stream-partial-deep", + "bench-sync": "npm run bench-sync-flatten && npm run bench-sync-deep && npm run bench-sync-partial-flatten && npm run bench-sync-partial-deep", + "bench-async-flatten": "node ./out/benchmark --mode async --pattern \"*\"", + "bench-async-deep": "node ./out/benchmark --mode async --pattern \"**\"", + "bench-async-partial-flatten": "node ./out/benchmark --mode async --pattern \"{fixtures,out}/{first,second}/*\"", + "bench-async-partial-deep": "node ./out/benchmark --mode async --pattern \"{fixtures,out}/**\"", + "bench-stream-flatten": "node ./out/benchmark --mode stream --pattern \"*\"", + "bench-stream-deep": "node ./out/benchmark --mode stream --pattern \"**\"", + "bench-stream-partial-flatten": "node ./out/benchmark --mode stream --pattern \"{fixtures,out}/{first,second}/*\"", + "bench-stream-partial-deep": "node ./out/benchmark --mode stream --pattern \"{fixtures,out}/**\"", + "bench-sync-flatten": "node ./out/benchmark --mode sync --pattern \"*\"", + "bench-sync-deep": "node ./out/benchmark --mode sync --pattern \"**\"", + "bench-sync-partial-flatten": "node ./out/benchmark --mode sync --pattern \"{fixtures,out}/{first,second}/*\"", + "bench-sync-partial-deep": "node ./out/benchmark --mode sync --pattern \"{fixtures,out}/**\"" + } +} diff --git a/node_modules/fastq/.github/dependabot.yml b/node_modules/fastq/.github/dependabot.yml new file mode 100644 index 0000000..7e7cbe1 --- /dev/null +++ b/node_modules/fastq/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 + ignore: + - dependency-name: standard + versions: + - 16.0.3 diff --git a/node_modules/fastq/.github/workflows/ci.yml b/node_modules/fastq/.github/workflows/ci.yml new file mode 100644 index 0000000..50e66b5 --- /dev/null +++ b/node_modules/fastq/.github/workflows/ci.yml @@ -0,0 +1,50 @@ +name: ci + +on: [push, pull_request] + +jobs: + legacy: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: ['0.10', '0.12', 4.x, 6.x, 8.x] + + steps: + - uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install --production && npm install tape + + - name: Run tests + run: | + npm run legacy + + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [10.x, 12.x, 13.x, 14.x, 15.x, 16.x] + + steps: + - uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test diff --git a/node_modules/fastq/LICENSE b/node_modules/fastq/LICENSE new file mode 100644 index 0000000..27c7bb4 --- /dev/null +++ b/node_modules/fastq/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015-2020, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fastq/README.md b/node_modules/fastq/README.md new file mode 100644 index 0000000..8a25fef --- /dev/null +++ b/node_modules/fastq/README.md @@ -0,0 +1,309 @@ +# fastq + +![ci][ci-url] +[![npm version][npm-badge]][npm-url] +[![Dependency Status][david-badge]][david-url] + +Fast, in memory work queue. + +Benchmarks (1 million tasks): + +* setImmediate: 812ms +* fastq: 854ms +* async.queue: 1298ms +* neoAsync.queue: 1249ms + +Obtained on node 12.16.1, on a dedicated server. + +If you need zero-overhead series function call, check out +[fastseries](http://npm.im/fastseries). For zero-overhead parallel +function call, check out [fastparallel](http://npm.im/fastparallel). + +[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) + + * Installation + * Usage + * API + * Licence & copyright + +## Install + +`npm i fastq --save` + +## Usage (callback API) + +```js +'use strict' + +const queue = require('fastq')(worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log('the result is', result) +}) + +function worker (arg, cb) { + cb(null, arg * 2) +} +``` + +## Usage (promise API) + +```js +const queue = require('fastq').promise(worker, 1) + +async function worker (arg) { + return arg * 2 +} + +async function run () { + const result = await queue.push(42) + console.log('the result is', result) +} + +run() +``` + +### Setting "this" + +```js +'use strict' + +const that = { hello: 'world' } +const queue = require('fastq')(that, worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log(this) + console.log('the result is', result) +}) + +function worker (arg, cb) { + console.log(this) + cb(null, arg * 2) +} +``` + +### Using with TypeScript (callback API) + +```ts +'use strict' + +import * as fastq from "fastq"; +import type { queue, done } from "fastq"; + +type Task = { + id: number +} + +const q: queue = fastq(worker, 1) + +q.push({ id: 42}) + +function worker (arg: Task, cb: done) { + console.log(arg.id) + cb(null) +} +``` + +### Using with TypeScript (promise API) + +```ts +'use strict' + +import * as fastq from "fastq"; +import type { queueAsPromised } from "fastq"; + +type Task = { + id: number +} + +const q: queueAsPromised = fastq.promise(asyncWorker, 1) + +q.push({ id: 42}).catch((err) => console.error(err)) + +async function asyncWorker (arg: Task): Promise { + // No need for a try-catch block, fastq handles errors automatically + console.log(arg.id) +} +``` + +## API + +* fastqueue() +* queue#push() +* queue#unshift() +* queue#pause() +* queue#resume() +* queue#idle() +* queue#length() +* queue#getQueue() +* queue#kill() +* queue#killAndDrain() +* queue#error() +* queue#concurrency +* queue#drain +* queue#empty +* queue#saturated +* fastqueue.promise() + +------------------------------------------------------- + +### fastqueue([that], worker, concurrency) + +Creates a new queue. + +Arguments: + +* `that`, optional context of the `worker` function. +* `worker`, worker function, it would be called with `that` as `this`, + if that is specified. +* `concurrency`, number of concurrent tasks that could be executed in + parallel. + +------------------------------------------------------- + +### queue.push(task, done) + +Add a task at the end of the queue. `done(err, result)` will be called +when the task was processed. + +------------------------------------------------------- + +### queue.unshift(task, done) + +Add a task at the beginning of the queue. `done(err, result)` will be called +when the task was processed. + +------------------------------------------------------- + +### queue.pause() + +Pause the processing of tasks. Currently worked tasks are not +stopped. + +------------------------------------------------------- + +### queue.resume() + +Resume the processing of tasks. + +------------------------------------------------------- + +### queue.idle() + +Returns `false` if there are tasks being processed or waiting to be processed. +`true` otherwise. + +------------------------------------------------------- + +### queue.length() + +Returns the number of tasks waiting to be processed (in the queue). + +------------------------------------------------------- + +### queue.getQueue() + +Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks + +------------------------------------------------------- + +### queue.kill() + +Removes all tasks waiting to be processed, and reset `drain` to an empty +function. + +------------------------------------------------------- + +### queue.killAndDrain() + +Same than `kill` but the `drain` function will be called before reset to empty. + +------------------------------------------------------- + +### queue.error(handler) + +Set a global error handler. `handler(err, task)` will be called +each time a task is completed, `err` will be not null if the task has thrown an error. + +------------------------------------------------------- + +### queue.concurrency + +Property that returns the number of concurrent tasks that could be executed in +parallel. It can be altered at runtime. + +------------------------------------------------------- + +### queue.drain + +Function that will be called when the last +item from the queue has been processed by a worker. +It can be altered at runtime. + +------------------------------------------------------- + +### queue.empty + +Function that will be called when the last +item from the queue has been assigned to a worker. +It can be altered at runtime. + +------------------------------------------------------- + +### queue.saturated + +Function that will be called when the queue hits the concurrency +limit. +It can be altered at runtime. + +------------------------------------------------------- + +### fastqueue.promise([that], worker(arg), concurrency) + +Creates a new queue with `Promise` apis. It also offers all the methods +and properties of the object returned by [`fastqueue`](#fastqueue) with the modified +[`push`](#pushPromise) and [`unshift`](#unshiftPromise) methods. + +Node v10+ is required to use the promisified version. + +Arguments: +* `that`, optional context of the `worker` function. +* `worker`, worker function, it would be called with `that` as `this`, + if that is specified. It MUST return a `Promise`. +* `concurrency`, number of concurrent tasks that could be executed in + parallel. + + +#### queue.push(task) => Promise + +Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) +when the task is completed successfully (unsuccessfully). + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + + +#### queue.unshift(task) => Promise + +Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) +when the task is completed successfully (unsuccessfully). + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + + +#### queue.drained() => Promise + +Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + +## License + +ISC + +[ci-url]: https://github.com/mcollina/fastq/workflows/ci/badge.svg +[npm-badge]: https://badge.fury.io/js/fastq.svg +[npm-url]: https://badge.fury.io/js/fastq +[david-badge]: https://david-dm.org/mcollina/fastq.svg +[david-url]: https://david-dm.org/mcollina/fastq diff --git a/node_modules/fastq/bench.js b/node_modules/fastq/bench.js new file mode 100644 index 0000000..4eaa829 --- /dev/null +++ b/node_modules/fastq/bench.js @@ -0,0 +1,66 @@ +'use strict' + +const max = 1000000 +const fastqueue = require('./')(worker, 1) +const { promisify } = require('util') +const immediate = promisify(setImmediate) +const qPromise = require('./').promise(immediate, 1) +const async = require('async') +const neo = require('neo-async') +const asyncqueue = async.queue(worker, 1) +const neoqueue = neo.queue(worker, 1) + +function bench (func, done) { + const key = max + '*' + func.name + let count = -1 + + console.time(key) + end() + + function end () { + if (++count < max) { + func(end) + } else { + console.timeEnd(key) + if (done) { + done() + } + } + } +} + +function benchFastQ (done) { + fastqueue.push(42, done) +} + +function benchAsyncQueue (done) { + asyncqueue.push(42, done) +} + +function benchNeoQueue (done) { + neoqueue.push(42, done) +} + +function worker (arg, cb) { + setImmediate(cb) +} + +function benchSetImmediate (cb) { + worker(42, cb) +} + +function benchFastQPromise (done) { + qPromise.push(42).then(function () { done() }, done) +} + +function runBench (done) { + async.eachSeries([ + benchSetImmediate, + benchFastQ, + benchNeoQueue, + benchAsyncQueue, + benchFastQPromise + ], bench, done) +} + +runBench(runBench) diff --git a/node_modules/fastq/example.js b/node_modules/fastq/example.js new file mode 100644 index 0000000..665fdc8 --- /dev/null +++ b/node_modules/fastq/example.js @@ -0,0 +1,14 @@ +'use strict' + +/* eslint-disable no-var */ + +var queue = require('./')(worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log('the result is', result) +}) + +function worker (arg, cb) { + cb(null, 42 * 2) +} diff --git a/node_modules/fastq/example.mjs b/node_modules/fastq/example.mjs new file mode 100644 index 0000000..81be789 --- /dev/null +++ b/node_modules/fastq/example.mjs @@ -0,0 +1,11 @@ +import { promise as queueAsPromised } from './queue.js' + +/* eslint-disable */ + +const queue = queueAsPromised(worker, 1) + +console.log('the result is', await queue.push(42)) + +async function worker (arg) { + return 42 * 2 +} diff --git a/node_modules/fastq/index.d.ts b/node_modules/fastq/index.d.ts new file mode 100644 index 0000000..65ebe65 --- /dev/null +++ b/node_modules/fastq/index.d.ts @@ -0,0 +1,37 @@ +declare function fastq(context: C, worker: fastq.worker, concurrency: number): fastq.queue +declare function fastq(worker: fastq.worker, concurrency: number): fastq.queue + +declare namespace fastq { + type worker = (this: C, task: T, cb: fastq.done) => void + type asyncWorker = (this: C, task: T) => Promise + type done = (err: Error | null, result?: R) => void + type errorHandler = (err: Error, task: T) => void + + interface queue { + push(task: T, done?: done): void + unshift(task: T, done?: done): void + pause(): any + resume(): any + idle(): boolean + length(): number + getQueue(): T[] + kill(): any + killAndDrain(): any + error(handler: errorHandler): void + concurrency: number + drain(): any + empty: () => void + saturated: () => void + } + + interface queueAsPromised extends queue { + push(task: T): Promise + unshift(task: T): Promise + drained(): Promise + } + + function promise(context: C, worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised + function promise(worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised +} + +export = fastq diff --git a/node_modules/fastq/package.json b/node_modules/fastq/package.json new file mode 100644 index 0000000..bf572e3 --- /dev/null +++ b/node_modules/fastq/package.json @@ -0,0 +1,52 @@ +{ + "name": "fastq", + "version": "1.15.0", + "description": "Fast, in memory work queue", + "main": "queue.js", + "scripts": { + "lint": "standard --verbose | snazzy", + "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test/test.js test/promise.js", + "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js test/promise.js", + "test:report": "npm run lint && npm run unit:report", + "test": "npm run lint && npm run unit && npm run typescript", + "typescript": "tsc --project ./test/tsconfig.json", + "legacy": "tape test/test.js" + }, + "pre-commit": [ + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/fastq.git" + }, + "keywords": [ + "fast", + "queue", + "async", + "worker" + ], + "author": "Matteo Collina ", + "license": "ISC", + "bugs": { + "url": "https://github.com/mcollina/fastq/issues" + }, + "homepage": "https://github.com/mcollina/fastq#readme", + "devDependencies": { + "async": "^3.1.0", + "neo-async": "^2.6.1", + "nyc": "^15.0.0", + "pre-commit": "^1.2.2", + "snazzy": "^9.0.0", + "standard": "^16.0.0", + "tape": "^5.0.0", + "typescript": "^4.0.2" + }, + "dependencies": { + "reusify": "^1.0.4" + }, + "standard": { + "ignore": [ + "example.mjs" + ] + } +} diff --git a/node_modules/fastq/queue.js b/node_modules/fastq/queue.js new file mode 100644 index 0000000..03fe112 --- /dev/null +++ b/node_modules/fastq/queue.js @@ -0,0 +1,289 @@ +'use strict' + +/* eslint-disable no-var */ + +var reusify = require('reusify') + +function fastqueue (context, worker, concurrency) { + if (typeof context === 'function') { + concurrency = worker + worker = context + context = null + } + + if (concurrency < 1) { + throw new Error('fastqueue concurrency must be greater than 1') + } + + var cache = reusify(Task) + var queueHead = null + var queueTail = null + var _running = 0 + var errorHandler = null + + var self = { + push: push, + drain: noop, + saturated: noop, + pause: pause, + paused: false, + concurrency: concurrency, + running: running, + resume: resume, + idle: idle, + length: length, + getQueue: getQueue, + unshift: unshift, + empty: noop, + kill: kill, + killAndDrain: killAndDrain, + error: error + } + + return self + + function running () { + return _running + } + + function pause () { + self.paused = true + } + + function length () { + var current = queueHead + var counter = 0 + + while (current) { + current = current.next + counter++ + } + + return counter + } + + function getQueue () { + var current = queueHead + var tasks = [] + + while (current) { + tasks.push(current.value) + current = current.next + } + + return tasks + } + + function resume () { + if (!self.paused) return + self.paused = false + for (var i = 0; i < self.concurrency; i++) { + _running++ + release() + } + } + + function idle () { + return _running === 0 && self.length() === 0 + } + + function push (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + current.errorHandler = errorHandler + + if (_running === self.concurrency || self.paused) { + if (queueTail) { + queueTail.next = current + queueTail = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function unshift (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + + if (_running === self.concurrency || self.paused) { + if (queueHead) { + current.next = queueHead + queueHead = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function release (holder) { + if (holder) { + cache.release(holder) + } + var next = queueHead + if (next) { + if (!self.paused) { + if (queueTail === queueHead) { + queueTail = null + } + queueHead = next.next + next.next = null + worker.call(context, next.value, next.worked) + if (queueTail === null) { + self.empty() + } + } else { + _running-- + } + } else if (--_running === 0) { + self.drain() + } + } + + function kill () { + queueHead = null + queueTail = null + self.drain = noop + } + + function killAndDrain () { + queueHead = null + queueTail = null + self.drain() + self.drain = noop + } + + function error (handler) { + errorHandler = handler + } +} + +function noop () {} + +function Task () { + this.value = null + this.callback = noop + this.next = null + this.release = noop + this.context = null + this.errorHandler = null + + var self = this + + this.worked = function worked (err, result) { + var callback = self.callback + var errorHandler = self.errorHandler + var val = self.value + self.value = null + self.callback = noop + if (self.errorHandler) { + errorHandler(err, val) + } + callback.call(self.context, err, result) + self.release(self) + } +} + +function queueAsPromised (context, worker, concurrency) { + if (typeof context === 'function') { + concurrency = worker + worker = context + context = null + } + + function asyncWrapper (arg, cb) { + worker.call(this, arg) + .then(function (res) { + cb(null, res) + }, cb) + } + + var queue = fastqueue(context, asyncWrapper, concurrency) + + var pushCb = queue.push + var unshiftCb = queue.unshift + + queue.push = push + queue.unshift = unshift + queue.drained = drained + + return queue + + function push (value) { + var p = new Promise(function (resolve, reject) { + pushCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function unshift (value) { + var p = new Promise(function (resolve, reject) { + unshiftCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function drained () { + if (queue.idle()) { + return new Promise(function (resolve) { + resolve() + }) + } + + var previousDrain = queue.drain + + var p = new Promise(function (resolve) { + queue.drain = function () { + previousDrain() + resolve() + } + }) + + return p + } +} + +module.exports = fastqueue +module.exports.promise = queueAsPromised diff --git a/node_modules/fastq/test/example.ts b/node_modules/fastq/test/example.ts new file mode 100644 index 0000000..c454e9d --- /dev/null +++ b/node_modules/fastq/test/example.ts @@ -0,0 +1,81 @@ +import * as fastq from '../' +import { promise as queueAsPromised } from '../' + +// Basic example + +const queue = fastq(worker, 1) + +queue.push('world', (err, result) => { + if (err) throw err + console.log('the result is', result) +}) + +queue.push('push without cb') + +queue.concurrency + +queue.drain() + +queue.empty = () => undefined + +console.log('the queue tasks are', queue.getQueue()) + +queue.idle() + +queue.kill() + +queue.killAndDrain() + +queue.length + +queue.pause() + +queue.resume() + +queue.saturated = () => undefined + +queue.unshift('world', (err, result) => { + if (err) throw err + console.log('the result is', result) +}) + +queue.unshift('unshift without cb') + +function worker(task: any, cb: fastq.done) { + cb(null, 'hello ' + task) +} + +// Generics example + +interface GenericsContext { + base: number; +} + +const genericsQueue = fastq({ base: 6 }, genericsWorker, 1) + +genericsQueue.push(7, (err, done) => { + if (err) throw err + console.log('the result is', done) +}) + +genericsQueue.unshift(7, (err, done) => { + if (err) throw err + console.log('the result is', done) +}) + +function genericsWorker(this: GenericsContext, task: number, cb: fastq.done) { + cb(null, 'the meaning of life is ' + (this.base * task)) +} + +const queue2 = queueAsPromised(asyncWorker, 1) + +async function asyncWorker(task: any) { + return 'hello ' + task +} + +async function run () { + await queue.push(42) + await queue.unshift(42) +} + +run() diff --git a/node_modules/fastq/test/promise.js b/node_modules/fastq/test/promise.js new file mode 100644 index 0000000..fe014ff --- /dev/null +++ b/node_modules/fastq/test/promise.js @@ -0,0 +1,248 @@ +'use strict' + +const test = require('tape') +const buildQueue = require('../').promise +const { promisify } = require('util') +const sleep = promisify(setTimeout) +const immediate = promisify(setImmediate) + +test('concurrency', function (t) { + t.plan(2) + t.throws(buildQueue.bind(null, worker, 0)) + t.doesNotThrow(buildQueue.bind(null, worker, 1)) + + async function worker (arg) { + return true + } +}) + +test('worker execution', async function (t) { + const queue = buildQueue(worker, 1) + + const result = await queue.push(42) + + t.equal(result, true, 'result matches') + + async function worker (arg) { + t.equal(arg, 42) + return true + } +}) + +test('limit', async function (t) { + const queue = buildQueue(worker, 1) + + const [res1, res2] = await Promise.all([queue.push(10), queue.push(0)]) + t.equal(res1, 10, 'the result matches') + t.equal(res2, 0, 'the result matches') + + async function worker (arg) { + await sleep(arg) + return arg + } +}) + +test('multiple executions', async function (t) { + const queue = buildQueue(worker, 1) + const toExec = [1, 2, 3, 4, 5] + const expected = ['a', 'b', 'c', 'd', 'e'] + let count = 0 + + await Promise.all(toExec.map(async function (task, i) { + const result = await queue.push(task) + t.equal(result, expected[i], 'the result matches') + })) + + async function worker (arg) { + t.equal(arg, toExec[count], 'arg matches') + return expected[count++] + } +}) + +test('drained', async function (t) { + const queue = buildQueue(worker, 2) + + const toExec = new Array(10).fill(10) + let count = 0 + + async function worker (arg) { + await sleep(arg) + count++ + } + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() + + t.equal(count, toExec.length) + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() + + t.equal(count, toExec.length * 2) +}) + +test('drained with exception should not throw', async function (t) { + const queue = buildQueue(worker, 2) + + const toExec = new Array(10).fill(10) + + async function worker () { + throw new Error('foo') + } + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() +}) + +test('drained with drain function', async function (t) { + let drainCalled = false + const queue = buildQueue(worker, 2) + + queue.drain = function () { + drainCalled = true + } + + const toExec = new Array(10).fill(10) + let count = 0 + + async function worker (arg) { + await sleep(arg) + count++ + } + + toExec.forEach(function () { + queue.push() + }) + + await queue.drained() + + t.equal(count, toExec.length) + t.equal(drainCalled, true) +}) + +test('drained while idle should resolve', async function (t) { + const queue = buildQueue(worker, 2) + + async function worker (arg) { + await sleep(arg) + } + + await queue.drained() +}) + +test('drained while idle should not call the drain function', async function (t) { + let drainCalled = false + const queue = buildQueue(worker, 2) + + queue.drain = function () { + drainCalled = true + } + + async function worker (arg) { + await sleep(arg) + } + + await queue.drained() + + t.equal(drainCalled, false) +}) + +test('set this', async function (t) { + t.plan(1) + const that = {} + const queue = buildQueue(that, worker, 1) + + await queue.push(42) + + async function worker (arg) { + t.equal(this, that, 'this matches') + } +}) + +test('unshift', async function (t) { + const queue = buildQueue(worker, 1) + const expected = [1, 2, 3, 4] + + await Promise.all([ + queue.push(1), + queue.push(4), + queue.unshift(3), + queue.unshift(2) + ]) + + t.is(expected.length, 0) + + async function worker (arg) { + t.equal(expected.shift(), arg, 'tasks come in order') + } +}) + +test('push with worker throwing error', async function (t) { + t.plan(5) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + try { + await q.push(42) + } catch (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + } +}) + +test('unshift with worker throwing error', async function (t) { + t.plan(2) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + try { + await q.unshift(42) + } catch (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + } +}) + +test('no unhandledRejection (push)', async function (t) { + function handleRejection () { + t.fail('unhandledRejection') + } + process.once('unhandledRejection', handleRejection) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + + q.push(42) + + await immediate() + process.removeListener('unhandledRejection', handleRejection) +}) + +test('no unhandledRejection (unshift)', async function (t) { + function handleRejection () { + t.fail('unhandledRejection') + } + process.once('unhandledRejection', handleRejection) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + + q.unshift(42) + + await immediate() + process.removeListener('unhandledRejection', handleRejection) +}) diff --git a/node_modules/fastq/test/test.js b/node_modules/fastq/test/test.js new file mode 100644 index 0000000..1cc78a5 --- /dev/null +++ b/node_modules/fastq/test/test.js @@ -0,0 +1,566 @@ +'use strict' + +/* eslint-disable no-var */ + +var test = require('tape') +var buildQueue = require('../') + +test('concurrency', function (t) { + t.plan(2) + t.throws(buildQueue.bind(null, worker, 0)) + t.doesNotThrow(buildQueue.bind(null, worker, 1)) + + function worker (arg, cb) { + cb(null, true) + } +}) + +test('worker execution', function (t) { + t.plan(3) + + var queue = buildQueue(worker, 1) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + function worker (arg, cb) { + t.equal(arg, 42) + cb(null, true) + } +}) + +test('limit', function (t) { + t.plan(4) + + var expected = [10, 0] + var queue = buildQueue(worker, 1) + + queue.push(10, result) + queue.push(0, result) + + function result (err, arg) { + t.error(err, 'no error') + t.equal(arg, expected.shift(), 'the result matches') + } + + function worker (arg, cb) { + setTimeout(cb, arg, null, arg) + } +}) + +test('multiple executions', function (t) { + t.plan(15) + + var queue = buildQueue(worker, 1) + var toExec = [1, 2, 3, 4, 5] + var count = 0 + + toExec.forEach(function (task) { + queue.push(task, done) + }) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, toExec[count - 1], 'the result matches') + } + + function worker (arg, cb) { + t.equal(arg, toExec[count], 'arg matches') + count++ + setImmediate(cb, null, arg) + } +}) + +test('multiple executions, one after another', function (t) { + t.plan(15) + + var queue = buildQueue(worker, 1) + var toExec = [1, 2, 3, 4, 5] + var count = 0 + + queue.push(toExec[0], done) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, toExec[count - 1], 'the result matches') + if (count < toExec.length) { + queue.push(toExec[count], done) + } + } + + function worker (arg, cb) { + t.equal(arg, toExec[count], 'arg matches') + count++ + setImmediate(cb, null, arg) + } +}) + +test('set this', function (t) { + t.plan(3) + + var that = {} + var queue = buildQueue(that, worker, 1) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(this, that, 'this matches') + }) + + function worker (arg, cb) { + t.equal(this, that, 'this matches') + cb(null, true) + } +}) + +test('drain', function (t) { + t.plan(4) + + var queue = buildQueue(worker, 1) + var worked = false + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + queue.drain = function () { + t.equal(true, worked, 'drained') + } + + function worker (arg, cb) { + t.equal(arg, 42) + worked = true + setImmediate(cb, null, true) + } +}) + +test('pause && resume', function (t) { + t.plan(7) + + var queue = buildQueue(worker, 1) + var worked = false + + t.notOk(queue.paused, 'it should not be paused') + + queue.pause() + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + t.notOk(worked, 'it should be paused') + t.ok(queue.paused, 'it should be paused') + + queue.resume() + queue.resume() // second resume is a no-op + + t.notOk(queue.paused, 'it should not be paused') + + function worker (arg, cb) { + t.equal(arg, 42) + worked = true + cb(null, true) + } +}) + +test('pause in flight && resume', function (t) { + t.plan(9) + + var queue = buildQueue(worker, 1) + var expected = [42, 24] + + t.notOk(queue.paused, 'it should not be paused') + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.ok(queue.paused, 'it should be paused') + process.nextTick(function () { queue.resume() }) + }) + + queue.push(24, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.paused, 'it should not be paused') + }) + + queue.pause() + + function worker (arg, cb) { + t.equal(arg, expected.shift()) + process.nextTick(function () { cb(null, true) }) + } +}) + +test('altering concurrency', function (t) { + t.plan(7) + + var queue = buildQueue(worker, 1) + var count = 0 + + queue.pause() + + queue.push(24, workDone) + queue.push(24, workDone) + + queue.concurrency = 2 + + queue.resume() + + t.equal(queue.running(), 2, '2 jobs running') + + function workDone (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + } + + function worker (arg, cb) { + t.equal(0, count, 'works in parallel') + setImmediate(function () { + count++ + cb(null, true) + }) + } +}) + +test('idle()', function (t) { + t.plan(12) + + var queue = buildQueue(worker, 1) + + t.ok(queue.idle(), 'queue is idle') + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.idle(), 'queue is not idle') + }) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + // it will go idle after executing this function + setImmediate(function () { + t.ok(queue.idle(), 'queue is now idle') + }) + }) + + t.notOk(queue.idle(), 'queue is not idle') + + function worker (arg, cb) { + t.notOk(queue.idle(), 'queue is not idle') + t.equal(arg, 42) + setImmediate(cb, null, true) + } +}) + +test('saturated', function (t) { + t.plan(9) + + var queue = buildQueue(worker, 1) + var preworked = 0 + var worked = 0 + + queue.saturated = function () { + t.pass('saturated') + t.equal(preworked, 1, 'started 1 task') + t.equal(worked, 0, 'worked zero task') + } + + queue.push(42, done) + queue.push(42, done) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + } + + function worker (arg, cb) { + t.equal(arg, 42) + preworked++ + setImmediate(function () { + worked++ + cb(null, true) + }) + } +}) + +test('length', function (t) { + t.plan(7) + + var queue = buildQueue(worker, 1) + + t.equal(queue.length(), 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.length(), 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.length(), 1, 'one task waiting') + queue.push(42, done) + t.equal(queue.length(), 2, 'two tasks waiting') + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('getQueue', function (t) { + t.plan(10) + + var queue = buildQueue(worker, 1) + + t.equal(queue.getQueue().length, 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.getQueue().length, 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.getQueue().length, 1, 'one task waiting') + t.equal(queue.getQueue()[0], 42, 'should be equal') + queue.push(43, done) + t.equal(queue.getQueue().length, 2, 'two tasks waiting') + t.equal(queue.getQueue()[0], 42, 'should be equal') + t.equal(queue.getQueue()[1], 43, 'should be equal') + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('unshift', function (t) { + t.plan(8) + + var queue = buildQueue(worker, 1) + var expected = [1, 2, 3, 4] + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('unshift && empty', function (t) { + t.plan(2) + + var queue = buildQueue(worker, 1) + var completed = false + + queue.pause() + + queue.empty = function () { + t.notOk(completed, 'the task has not completed yet') + } + + queue.unshift(1, done) + + queue.resume() + + function done (err, result) { + completed = true + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('push && empty', function (t) { + t.plan(2) + + var queue = buildQueue(worker, 1) + var completed = false + + queue.pause() + + queue.empty = function () { + t.notOk(completed, 'the task has not completed yet') + } + + queue.push(1, done) + + queue.resume() + + function done (err, result) { + completed = true + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('kill', function (t) { + t.plan(5) + + var queue = buildQueue(worker, 1) + var expected = [1] + + var predrain = queue.drain + + queue.drain = function drain () { + t.fail('drain should never be called') + } + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + queue.kill() + + function done (err, result) { + t.error(err, 'no error') + setImmediate(function () { + t.equal(queue.length(), 0, 'no queued tasks') + t.equal(queue.running(), 0, 'no running tasks') + t.equal(queue.drain, predrain, 'drain is back to default') + }) + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('killAndDrain', function (t) { + t.plan(6) + + var queue = buildQueue(worker, 1) + var expected = [1] + + var predrain = queue.drain + + queue.drain = function drain () { + t.pass('drain has been called') + } + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + queue.killAndDrain() + + function done (err, result) { + t.error(err, 'no error') + setImmediate(function () { + t.equal(queue.length(), 0, 'no queued tasks') + t.equal(queue.running(), 0, 'no running tasks') + t.equal(queue.drain, predrain, 'drain is back to default') + }) + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('pause && idle', function (t) { + t.plan(11) + + var queue = buildQueue(worker, 1) + var worked = false + + t.notOk(queue.paused, 'it should not be paused') + t.ok(queue.idle(), 'should be idle') + + queue.pause() + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + t.notOk(worked, 'it should be paused') + t.ok(queue.paused, 'it should be paused') + t.notOk(queue.idle(), 'should not be idle') + + queue.resume() + + t.notOk(queue.paused, 'it should not be paused') + t.notOk(queue.idle(), 'it should not be idle') + + function worker (arg, cb) { + t.equal(arg, 42) + worked = true + process.nextTick(cb.bind(null, null, true)) + process.nextTick(function () { + t.ok(queue.idle(), 'is should be idle') + }) + } +}) + +test('push without cb', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + + queue.push(42) + + function worker (arg, cb) { + t.equal(arg, 42) + cb() + } +}) + +test('unshift without cb', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + + queue.unshift(42) + + function worker (arg, cb) { + t.equal(arg, 42) + cb() + } +}) + +test('push with worker throwing error', function (t) { + t.plan(5) + var q = buildQueue(function (task, cb) { + cb(new Error('test error'), null) + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + q.push(42, function (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + }) +}) diff --git a/node_modules/fastq/test/tsconfig.json b/node_modules/fastq/test/tsconfig.json new file mode 100644 index 0000000..66e16e9 --- /dev/null +++ b/node_modules/fastq/test/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "noEmit": true, + "strict": true + }, + "files": [ + "./example.ts" + ] +} diff --git a/node_modules/fill-range/LICENSE b/node_modules/fill-range/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/fill-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fill-range/README.md b/node_modules/fill-range/README.md new file mode 100644 index 0000000..8d756fe --- /dev/null +++ b/node_modules/fill-range/README.md @@ -0,0 +1,237 @@ +# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range) + +> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex` + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save fill-range +``` + +## Usage + +Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_. + +```js +const fill = require('fill-range'); +// fill(from, to[, step, options]); + +console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] +console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10 +``` + +**Params** + +* `from`: **{String|Number}** the number or letter to start with +* `to`: **{String|Number}** the number or letter to end with +* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use. +* `options`: **{Object|Function}**: See all available [options](#options) + +## Examples + +By default, an array of values is returned. + +**Alphabetical ranges** + +```js +console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e'] +console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ] +``` + +**Numerical ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ] +``` + +**Negative ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ] +console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ] +``` + +**Steps (increments)** + +```js +// numerical ranges with increments +console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ] +console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ] +console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ] + +// alphabetical ranges with increments +console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ] +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ] +``` + +## Options + +### options.step + +**Type**: `number` (formatted as a string or number) + +**Default**: `undefined` + +**Description**: The increment to use for the range. Can be used with letters or numbers. + +**Example(s)** + +```js +// numbers +console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ] +console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ] +console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ] + +// letters +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ] +console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ] +``` + +### options.strictRanges + +**Type**: `boolean` + +**Default**: `false` + +**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges. + +**Example(s)** + +The following are all invalid: + +```js +fill('1.1', '2'); // decimals not supported in ranges +fill('a', '2'); // incompatible range values +fill(1, 10, 'foo'); // invalid "step" argument +``` + +### options.stringify + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Cast all returned values to strings. By default, integers are returned as numbers. + +**Example(s)** + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ] +``` + +### options.toRegex + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Create a regex-compatible source string, instead of expanding values to an array. + +**Example(s)** + +```js +// alphabetical range +console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]' +// alphabetical with step +console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y' +// numerical range +console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100' +// numerical range with zero padding +console.log(fill('000001', '100000', { toRegex: true })); +//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000' +``` + +### options.transform + +**Type**: `function` + +**Default**: `undefined` + +**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_. + +**Example(s)** + +```js +// add zero padding +console.log(fill(1, 5, value => String(value).padStart(4, '0'))); +//=> ['0001', '0002', '0003', '0004', '0005'] +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 116 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [paulmillr](https://github.com/paulmillr) | +| 2 | [realityking](https://github.com/realityking) | +| 2 | [bluelovers](https://github.com/bluelovers) | +| 1 | [edorivai](https://github.com/edorivai) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/fill-range/index.js b/node_modules/fill-range/index.js new file mode 100644 index 0000000..97ce35a --- /dev/null +++ b/node_modules/fill-range/index.js @@ -0,0 +1,249 @@ +/*! + * fill-range + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +const util = require('util'); +const toRegexRange = require('to-regex-range'); + +const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); + +const transform = toNumber => { + return value => toNumber === true ? Number(value) : String(value); +}; + +const isValidValue = value => { + return typeof value === 'number' || (typeof value === 'string' && value !== ''); +}; + +const isNumber = num => Number.isInteger(+num); + +const zeros = input => { + let value = `${input}`; + let index = -1; + if (value[0] === '-') value = value.slice(1); + if (value === '0') return false; + while (value[++index] === '0'); + return index > 0; +}; + +const stringify = (start, end, options) => { + if (typeof start === 'string' || typeof end === 'string') { + return true; + } + return options.stringify === true; +}; + +const pad = (input, maxLength, toNumber) => { + if (maxLength > 0) { + let dash = input[0] === '-' ? '-' : ''; + if (dash) input = input.slice(1); + input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); + } + if (toNumber === false) { + return String(input); + } + return input; +}; + +const toMaxLen = (input, maxLength) => { + let negative = input[0] === '-' ? '-' : ''; + if (negative) { + input = input.slice(1); + maxLength--; + } + while (input.length < maxLength) input = '0' + input; + return negative ? ('-' + input) : input; +}; + +const toSequence = (parts, options) => { + parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + + let prefix = options.capture ? '' : '?:'; + let positives = ''; + let negatives = ''; + let result; + + if (parts.positives.length) { + positives = parts.positives.join('|'); + } + + if (parts.negatives.length) { + negatives = `-(${prefix}${parts.negatives.join('|')})`; + } + + if (positives && negatives) { + result = `${positives}|${negatives}`; + } else { + result = positives || negatives; + } + + if (options.wrap) { + return `(${prefix}${result})`; + } + + return result; +}; + +const toRange = (a, b, isNumbers, options) => { + if (isNumbers) { + return toRegexRange(a, b, { wrap: false, ...options }); + } + + let start = String.fromCharCode(a); + if (a === b) return start; + + let stop = String.fromCharCode(b); + return `[${start}-${stop}]`; +}; + +const toRegex = (start, end, options) => { + if (Array.isArray(start)) { + let wrap = options.wrap === true; + let prefix = options.capture ? '' : '?:'; + return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); + } + return toRegexRange(start, end, options); +}; + +const rangeError = (...args) => { + return new RangeError('Invalid range arguments: ' + util.inspect(...args)); +}; + +const invalidRange = (start, end, options) => { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; +}; + +const invalidStep = (step, options) => { + if (options.strictRanges === true) { + throw new TypeError(`Expected step "${step}" to be a number`); + } + return []; +}; + +const fillNumbers = (start, end, step = 1, options = {}) => { + let a = Number(start); + let b = Number(end); + + if (!Number.isInteger(a) || !Number.isInteger(b)) { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; + } + + // fix negative zero + if (a === 0) a = 0; + if (b === 0) b = 0; + + let descending = a > b; + let startString = String(start); + let endString = String(end); + let stepString = String(step); + step = Math.max(Math.abs(step), 1); + + let padded = zeros(startString) || zeros(endString) || zeros(stepString); + let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; + let toNumber = padded === false && stringify(start, end, options) === false; + let format = options.transform || transform(toNumber); + + if (options.toRegex && step === 1) { + return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); + } + + let parts = { negatives: [], positives: [] }; + let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + if (options.toRegex === true && step > 1) { + push(a); + } else { + range.push(pad(format(a, index), maxLen, toNumber)); + } + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return step > 1 + ? toSequence(parts, options) + : toRegex(range, null, { wrap: false, ...options }); + } + + return range; +}; + +const fillLetters = (start, end, step = 1, options = {}) => { + if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { + return invalidRange(start, end, options); + } + + + let format = options.transform || (val => String.fromCharCode(val)); + let a = `${start}`.charCodeAt(0); + let b = `${end}`.charCodeAt(0); + + let descending = a > b; + let min = Math.min(a, b); + let max = Math.max(a, b); + + if (options.toRegex && step === 1) { + return toRange(min, max, false, options); + } + + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + range.push(format(a, index)); + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return toRegex(range, null, { wrap: false, options }); + } + + return range; +}; + +const fill = (start, end, step, options = {}) => { + if (end == null && isValidValue(start)) { + return [start]; + } + + if (!isValidValue(start) || !isValidValue(end)) { + return invalidRange(start, end, options); + } + + if (typeof step === 'function') { + return fill(start, end, 1, { transform: step }); + } + + if (isObject(step)) { + return fill(start, end, 0, step); + } + + let opts = { ...options }; + if (opts.capture === true) opts.wrap = true; + step = step || opts.step || 1; + + if (!isNumber(step)) { + if (step != null && !isObject(step)) return invalidStep(step, opts); + return fill(start, end, 1, step); + } + + if (isNumber(start) && isNumber(end)) { + return fillNumbers(start, end, step, opts); + } + + return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); +}; + +module.exports = fill; diff --git a/node_modules/fill-range/package.json b/node_modules/fill-range/package.json new file mode 100644 index 0000000..07d3076 --- /dev/null +++ b/node_modules/fill-range/package.json @@ -0,0 +1,69 @@ +{ + "name": "fill-range", + "description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`", + "version": "7.0.1", + "homepage": "https://github.com/jonschlinkert/fill-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Edo Rivai (edo.rivai.nl)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Paul Miller (paulmillr.com)", + "Rouven Weßling (www.rouvenwessling.de)", + "(https://github.com/wtgtybhertgeghgtwtg)" + ], + "repository": "jonschlinkert/fill-range", + "bugs": { + "url": "https://github.com/jonschlinkert/fill-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "devDependencies": { + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "array", + "bash", + "brace", + "expand", + "expansion", + "fill", + "glob", + "match", + "matches", + "matching", + "number", + "numerical", + "range", + "ranges", + "regex", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/fs.realpath/LICENSE b/node_modules/fs.realpath/LICENSE new file mode 100644 index 0000000..5bd884c --- /dev/null +++ b/node_modules/fs.realpath/LICENSE @@ -0,0 +1,43 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---- + +This library bundles a version of the `fs.realpath` and `fs.realpathSync` +methods from Node.js v0.10 under the terms of the Node.js MIT license. + +Node's license follows, also included at the header of `old.js` which contains +the licensed code: + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fs.realpath/README.md b/node_modules/fs.realpath/README.md new file mode 100644 index 0000000..a42ceac --- /dev/null +++ b/node_modules/fs.realpath/README.md @@ -0,0 +1,33 @@ +# fs.realpath + +A backwards-compatible fs.realpath for Node v6 and above + +In Node v6, the JavaScript implementation of fs.realpath was replaced +with a faster (but less resilient) native implementation. That raises +new and platform-specific errors and cannot handle long or excessively +symlink-looping paths. + +This module handles those cases by detecting the new errors and +falling back to the JavaScript implementation. On versions of Node +prior to v6, it has no effect. + +## USAGE + +```js +var rp = require('fs.realpath') + +// async version +rp.realpath(someLongAndLoopingPath, function (er, real) { + // the ELOOP was handled, but it was a bit slower +}) + +// sync version +var real = rp.realpathSync(someLongAndLoopingPath) + +// monkeypatch at your own risk! +// This replaces the fs.realpath/fs.realpathSync builtins +rp.monkeypatch() + +// un-do the monkeypatching +rp.unmonkeypatch() +``` diff --git a/node_modules/fs.realpath/index.js b/node_modules/fs.realpath/index.js new file mode 100644 index 0000000..b09c7c7 --- /dev/null +++ b/node_modules/fs.realpath/index.js @@ -0,0 +1,66 @@ +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = require('fs') +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = require('./old.js') + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} diff --git a/node_modules/fs.realpath/old.js b/node_modules/fs.realpath/old.js new file mode 100644 index 0000000..b40305e --- /dev/null +++ b/node_modules/fs.realpath/old.js @@ -0,0 +1,303 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = require('path'); +var isWindows = process.platform === 'win32'; +var fs = require('fs'); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; diff --git a/node_modules/fs.realpath/package.json b/node_modules/fs.realpath/package.json new file mode 100644 index 0000000..3edc57d --- /dev/null +++ b/node_modules/fs.realpath/package.json @@ -0,0 +1,26 @@ +{ + "name": "fs.realpath", + "version": "1.0.0", + "description": "Use node's fs.realpath, but fall back to the JS implementation if the native one fails", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "scripts": { + "test": "tap test/*.js --cov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/fs.realpath.git" + }, + "keywords": [ + "realpath", + "fs", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "old.js", + "index.js" + ] +} diff --git a/node_modules/fs/README.md b/node_modules/fs/README.md new file mode 100644 index 0000000..5e9a74c --- /dev/null +++ b/node_modules/fs/README.md @@ -0,0 +1,9 @@ +# Security holding package + +This package name is not currently in use, but was formerly occupied +by another package. To avoid malicious use, npm is hanging on to the +package name, but loosely, and we'll probably give it to you if you +want it. + +You may adopt this package by contacting support@npmjs.com and +requesting the name. diff --git a/node_modules/fs/package.json b/node_modules/fs/package.json new file mode 100644 index 0000000..11661b0 --- /dev/null +++ b/node_modules/fs/package.json @@ -0,0 +1,20 @@ +{ + "name": "fs", + "version": "0.0.1-security", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/security-holder.git" + }, + "keywords": [], + "author": "", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/security-holder/issues" + }, + "homepage": "https://github.com/npm/security-holder#readme" +} diff --git a/node_modules/fsevents/LICENSE b/node_modules/fsevents/LICENSE new file mode 100644 index 0000000..5d70441 --- /dev/null +++ b/node_modules/fsevents/LICENSE @@ -0,0 +1,22 @@ +MIT License +----------- + +Copyright (C) 2010-2020 by Philipp Dunkel, Ben Noordhuis, Elan Shankar, Paul Miller + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/README.md b/node_modules/fsevents/README.md new file mode 100644 index 0000000..025c9a1 --- /dev/null +++ b/node_modules/fsevents/README.md @@ -0,0 +1,83 @@ +# fsevents [![NPM](https://nodei.co/npm/fsevents.png)](https://nodei.co/npm/fsevents/) + +Native access to MacOS FSEvents in [Node.js](https://nodejs.org/) + +The FSEvents API in MacOS allows applications to register for notifications of +changes to a given directory tree. It is a very fast and lightweight alternative +to kqueue. + +This is a low-level library. For a cross-platform file watching module that +uses fsevents, check out [Chokidar](https://github.com/paulmillr/chokidar). + +## Installation + +Supports only **Node.js v8.16 and higher**. + +```sh +npm install fsevents +``` + +## Usage + +```js +const fsevents = require('fsevents'); +const stop = fsevents.watch(__dirname, (path, flags, id) => { + const info = fsevents.getInfo(path, flags, id); +}); // To start observation +stop(); // To end observation +``` + +The callback passed as the second parameter to `.watch` get's called whenever the operating system detects a +a change in the file system. It takes three arguments: + +###### `fsevents.watch(dirname: string, (path: string, flags: number, id: string) => void): () => Promise` + + * `path: string` - the item in the filesystem that have been changed + * `flags: number` - a numeric value describing what the change was + * `id: string` - an unique-id identifying this specific event + + Returns closer callback which when called returns a Promise resolving when the watcher process has been shut down. + +###### `fsevents.getInfo(path: string, flags: number, id: string): FsEventInfo` + +The `getInfo` function takes the `path`, `flags` and `id` arguments and converts those parameters into a structure +that is easier to digest to determine what the change was. + +The `FsEventsInfo` has the following shape: + +```js +/** + * @typedef {'created'|'modified'|'deleted'|'moved'|'root-changed'|'cloned'|'unknown'} FsEventsEvent + * @typedef {'file'|'directory'|'symlink'} FsEventsType + */ +{ + "event": "created", // {FsEventsEvent} + "path": "file.txt", + "type": "file", // {FsEventsType} + "changes": { + "inode": true, // Had iNode Meta-Information changed + "finder": false, // Had Finder Meta-Data changed + "access": false, // Had access permissions changed + "xattrs": false // Had xAttributes changed + }, + "flags": 0x100000000 +} +``` + +## Changelog + +- v2.3 supports Apple Silicon ARM CPUs +- v2 supports node 8.16+ and reduces package size massively +- v1.2.8 supports node 6+ +- v1.2.7 supports node 4+ + +## Troubleshooting + +- I'm getting `EBADPLATFORM` `Unsupported platform for fsevents` error. +- It's fine, nothing is broken. fsevents is macos-only. Other platforms are skipped. If you want to hide this warning, report a bug to NPM bugtracker asking them to hide ebadplatform warnings by default. + +## License + +The MIT License Copyright (C) 2010-2020 by Philipp Dunkel, Ben Noordhuis, Elan Shankar, Paul Miller — see LICENSE file. + +Visit our [GitHub page](https://github.com/fsevents/fsevents) and [NPM Page](https://npmjs.org/package/fsevents) diff --git a/node_modules/fsevents/fsevents.d.ts b/node_modules/fsevents/fsevents.d.ts new file mode 100644 index 0000000..2723c04 --- /dev/null +++ b/node_modules/fsevents/fsevents.d.ts @@ -0,0 +1,46 @@ +declare type Event = "created" | "cloned" | "modified" | "deleted" | "moved" | "root-changed" | "unknown"; +declare type Type = "file" | "directory" | "symlink"; +declare type FileChanges = { + inode: boolean; + finder: boolean; + access: boolean; + xattrs: boolean; +}; +declare type Info = { + event: Event; + path: string; + type: Type; + changes: FileChanges; + flags: number; +}; +declare type WatchHandler = (path: string, flags: number, id: string) => void; +export declare function watch(path: string, handler: WatchHandler): () => Promise; +export declare function watch(path: string, since: number, handler: WatchHandler): () => Promise; +export declare function getInfo(path: string, flags: number): Info; +export declare const constants: { + None: 0x00000000; + MustScanSubDirs: 0x00000001; + UserDropped: 0x00000002; + KernelDropped: 0x00000004; + EventIdsWrapped: 0x00000008; + HistoryDone: 0x00000010; + RootChanged: 0x00000020; + Mount: 0x00000040; + Unmount: 0x00000080; + ItemCreated: 0x00000100; + ItemRemoved: 0x00000200; + ItemInodeMetaMod: 0x00000400; + ItemRenamed: 0x00000800; + ItemModified: 0x00001000; + ItemFinderInfoMod: 0x00002000; + ItemChangeOwner: 0x00004000; + ItemXattrMod: 0x00008000; + ItemIsFile: 0x00010000; + ItemIsDir: 0x00020000; + ItemIsSymlink: 0x00040000; + ItemIsHardlink: 0x00100000; + ItemIsLastHardlink: 0x00200000; + OwnEvent: 0x00080000; + ItemCloned: 0x00400000; +}; +export {}; diff --git a/node_modules/fsevents/fsevents.js b/node_modules/fsevents/fsevents.js new file mode 100644 index 0000000..f1b31c9 --- /dev/null +++ b/node_modules/fsevents/fsevents.js @@ -0,0 +1,82 @@ +/* + ** © 2020 by Philipp Dunkel, Ben Noordhuis, Elan Shankar, Paul Miller + ** Licensed under MIT License. + */ + +/* jshint node:true */ +"use strict"; + +if (process.platform !== "darwin") { + throw new Error(`Module 'fsevents' is not compatible with platform '${process.platform}'`); +} + +const Native = require("./fsevents.node"); +const events = Native.constants; + +function watch(path, since, handler) { + if (typeof path !== "string") { + throw new TypeError(`fsevents argument 1 must be a string and not a ${typeof path}`); + } + if ("function" === typeof since && "undefined" === typeof handler) { + handler = since; + since = Native.flags.SinceNow; + } + if (typeof since !== "number") { + throw new TypeError(`fsevents argument 2 must be a number and not a ${typeof since}`); + } + if (typeof handler !== "function") { + throw new TypeError(`fsevents argument 3 must be a function and not a ${typeof handler}`); + } + + let instance = Native.start(Native.global, path, since, handler); + if (!instance) throw new Error(`could not watch: ${path}`); + return () => { + const result = instance ? Promise.resolve(instance).then(Native.stop) : Promise.resolve(undefined); + instance = undefined; + return result; + }; +} + +function getInfo(path, flags) { + return { + path, + flags, + event: getEventType(flags), + type: getFileType(flags), + changes: getFileChanges(flags), + }; +} + +function getFileType(flags) { + if (events.ItemIsFile & flags) return "file"; + if (events.ItemIsDir & flags) return "directory"; + if (events.ItemIsSymlink & flags) return "symlink"; +} +function anyIsTrue(obj) { + for (let key in obj) { + if (obj[key]) return true; + } + return false; +} +function getEventType(flags) { + if (events.ItemRemoved & flags) return "deleted"; + if (events.ItemRenamed & flags) return "moved"; + if (events.ItemCreated & flags) return "created"; + if (events.ItemModified & flags) return "modified"; + if (events.RootChanged & flags) return "root-changed"; + if (events.ItemCloned & flags) return "cloned"; + if (anyIsTrue(flags)) return "modified"; + return "unknown"; +} +function getFileChanges(flags) { + return { + inode: !!(events.ItemInodeMetaMod & flags), + finder: !!(events.ItemFinderInfoMod & flags), + access: !!(events.ItemChangeOwner & flags), + xattrs: !!(events.ItemXattrMod & flags), + }; +} + +exports.watch = watch; +exports.getInfo = getInfo; +exports.constants = events; diff --git a/node_modules/fsevents/fsevents.node b/node_modules/fsevents/fsevents.node new file mode 100755 index 0000000..00fac7e Binary files /dev/null and b/node_modules/fsevents/fsevents.node differ diff --git a/node_modules/fsevents/package.json b/node_modules/fsevents/package.json new file mode 100644 index 0000000..af6da84 --- /dev/null +++ b/node_modules/fsevents/package.json @@ -0,0 +1,62 @@ +{ + "name": "fsevents", + "version": "2.3.2", + "description": "Native Access to MacOS FSEvents", + "main": "fsevents.js", + "types": "fsevents.d.ts", + "os": [ + "darwin" + ], + "files": [ + "fsevents.d.ts", + "fsevents.js", + "fsevents.node" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + }, + "scripts": { + "clean": "node-gyp clean && rm -f fsevents.node", + "build": "node-gyp clean && rm -f fsevents.node && node-gyp rebuild && node-gyp clean", + "test": "/bin/bash ./test.sh 2>/dev/null", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "https://github.com/fsevents/fsevents.git" + }, + "keywords": [ + "fsevents", + "mac" + ], + "contributors": [ + { + "name": "Philipp Dunkel", + "email": "pip@pipobscure.com" + }, + { + "name": "Ben Noordhuis", + "email": "info@bnoordhuis.nl" + }, + { + "name": "Elan Shankar", + "email": "elan.shanker@gmail.com" + }, + { + "name": "Miroslav Bajtoš", + "email": "mbajtoss@gmail.com" + }, + { + "name": "Paul Miller", + "url": "https://paulmillr.com" + } + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/fsevents/fsevents/issues" + }, + "homepage": "https://github.com/fsevents/fsevents", + "devDependencies": { + "node-gyp": "^6.1.0" + } +} diff --git a/node_modules/glob-parent/LICENSE b/node_modules/glob-parent/LICENSE new file mode 100644 index 0000000..d701b08 --- /dev/null +++ b/node_modules/glob-parent/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015, 2019 Elan Shanker, 2021 Blaine Bublitz , Eric Schoffstall and other contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/glob-parent/README.md b/node_modules/glob-parent/README.md new file mode 100644 index 0000000..6ae18a1 --- /dev/null +++ b/node_modules/glob-parent/README.md @@ -0,0 +1,134 @@ +

+ + + +

+ +# glob-parent + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][ci-image]][ci-url] [![Coveralls Status][coveralls-image]][coveralls-url] + +Extract the non-magic parent path from a glob string. + +## Usage + +```js +var globParent = require('glob-parent'); + +globParent('path/to/*.js'); // 'path/to' +globParent('/root/path/to/*.js'); // '/root/path/to' +globParent('/*.js'); // '/' +globParent('*.js'); // '.' +globParent('**/*.js'); // '.' +globParent('path/{to,from}'); // 'path' +globParent('path/!(to|from)'); // 'path' +globParent('path/?(to|from)'); // 'path' +globParent('path/+(to|from)'); // 'path' +globParent('path/*(to|from)'); // 'path' +globParent('path/@(to|from)'); // 'path' +globParent('path/**/*'); // 'path' + +// if provided a non-glob path, returns the nearest dir +globParent('path/foo/bar.js'); // 'path/foo' +globParent('path/foo/'); // 'path/foo' +globParent('path/foo'); // 'path' (see issue #3 for details) +``` + +## API + +### `globParent(maybeGlobString, [options])` + +Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. + +#### options + +```js +{ + // Disables the automatic conversion of slashes for Windows + flipBackslashes: true; +} +``` + +## Escaping + +The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: + +- `?` (question mark) unless used as a path segment alone +- `*` (asterisk) +- `|` (pipe) +- `(` (opening parenthesis) +- `)` (closing parenthesis) +- `{` (opening curly brace) +- `}` (closing curly brace) +- `[` (opening bracket) +- `]` (closing bracket) + +**Example** + +```js +globParent('foo/[bar]/'); // 'foo' +globParent('foo/\\[bar]/'); // 'foo/[bar]' +``` + +## Limitations + +### Braces & Brackets + +This library attempts a quick and imperfect method of determining which path +parts have glob magic without fully parsing/lexing the pattern. There are some +advanced use cases that can trip it up, such as nested braces where the outer +pair is escaped and the inner one contains a path separator. If you find +yourself in the unlikely circumstance of being affected by this or need to +ensure higher-fidelity glob handling in your library, it is recommended that you +pre-process your input with [expand-braces] and/or [expand-brackets]. + +### Windows + +Backslashes are not valid path separators for globs. If a path with backslashes +is provided anyway, for simple cases, glob-parent will replace the path +separator for you and return the non-glob parent path (now with +forward-slashes, which are still valid as Windows path separators). + +This cannot be used in conjunction with escape characters. + +```js +// BAD +globParent('C:\\Program Files \\(x86\\)\\*.ext'); // 'C:/Program Files /(x86/)' + +// GOOD +globParent('C:/Program Files\\(x86\\)/*.ext'); // 'C:/Program Files (x86)' +``` + +If you are using escape characters for a pattern without path parts (i.e. +relative to `cwd`), prefix with `./` to avoid confusing glob-parent. + +```js +// BAD +globParent('foo \\[bar]'); // 'foo ' +globParent('foo \\[bar]*'); // 'foo ' + +// GOOD +globParent('./foo \\[bar]'); // 'foo [bar]' +globParent('./foo \\[bar]*'); // '.' +``` + +## License + +ISC + + +[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg?style=flat-square +[npm-url]: https://www.npmjs.com/package/glob-parent +[npm-image]: https://img.shields.io/npm/v/glob-parent.svg?style=flat-square + +[ci-url]: https://github.com/gulpjs/glob-parent/actions?query=workflow:dev +[ci-image]: https://img.shields.io/github/workflow/status/gulpjs/glob-parent/dev?style=flat-square + +[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent +[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg?style=flat-square + + + +[expand-braces]: https://github.com/jonschlinkert/expand-braces +[expand-brackets]: https://github.com/jonschlinkert/expand-brackets + diff --git a/node_modules/glob-parent/index.js b/node_modules/glob-parent/index.js new file mode 100644 index 0000000..09dde64 --- /dev/null +++ b/node_modules/glob-parent/index.js @@ -0,0 +1,75 @@ +'use strict'; + +var isGlob = require('is-glob'); +var pathPosixDirname = require('path').posix.dirname; +var isWin32 = require('os').platform() === 'win32'; + +var slash = '/'; +var backslash = /\\/g; +var escaped = /\\([!*?|[\](){}])/g; + +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); + + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); + } + + // special case for strings ending in enclosure containing path separator + if (isEnclosure(str)) { + str += slash; + } + + // preserves full path in case of trailing path separator + str += 'a'; + + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlobby(str)); + + // remove escape chars and return result + return str.replace(escaped, '$1'); +}; + +function isEnclosure(str) { + var lastChar = str.slice(-1); + + var enclosureStart; + switch (lastChar) { + case '}': + enclosureStart = '{'; + break; + case ']': + enclosureStart = '['; + break; + default: + return false; + } + + var foundIndex = str.indexOf(enclosureStart); + if (foundIndex < 0) { + return false; + } + + return str.slice(foundIndex + 1, -1).includes(slash); +} + +function isGlobby(str) { + if (/\([^()]+$/.test(str)) { + return true; + } + if (str[0] === '{' || str[0] === '[') { + return true; + } + if (/[^\\][{[]/.test(str)) { + return true; + } + return isGlob(str); +} diff --git a/node_modules/glob-parent/package.json b/node_modules/glob-parent/package.json new file mode 100644 index 0000000..baeab42 --- /dev/null +++ b/node_modules/glob-parent/package.json @@ -0,0 +1,54 @@ +{ + "name": "glob-parent", + "version": "6.0.2", + "description": "Extract the non-magic parent path from a glob string.", + "author": "Gulp Team (https://gulpjs.com/)", + "contributors": [ + "Elan Shanker (https://github.com/es128)", + "Blaine Bublitz " + ], + "repository": "gulpjs/glob-parent", + "license": "ISC", + "engines": { + "node": ">=10.13.0" + }, + "main": "index.js", + "files": [ + "LICENSE", + "index.js" + ], + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "nyc mocha --async-only" + }, + "dependencies": { + "is-glob": "^4.0.3" + }, + "devDependencies": { + "eslint": "^7.0.0", + "eslint-config-gulp": "^5.0.0", + "expect": "^26.0.1", + "mocha": "^7.1.2", + "nyc": "^15.0.1" + }, + "nyc": { + "reporter": [ + "lcov", + "text-summary" + ] + }, + "prettier": { + "singleQuote": true + }, + "keywords": [ + "glob", + "parent", + "strip", + "path", + "dirname", + "directory", + "base", + "wildcard" + ] +} diff --git a/node_modules/glob/LICENSE b/node_modules/glob/LICENSE new file mode 100644 index 0000000..42ca266 --- /dev/null +++ b/node_modules/glob/LICENSE @@ -0,0 +1,21 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md new file mode 100644 index 0000000..0916a48 --- /dev/null +++ b/node_modules/glob/README.md @@ -0,0 +1,375 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![](logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` + +![](oh-my-glob.gif) diff --git a/node_modules/glob/changelog.md b/node_modules/glob/changelog.md new file mode 100644 index 0000000..4163677 --- /dev/null +++ b/node_modules/glob/changelog.md @@ -0,0 +1,67 @@ +## 7.0 + +- Raise error if `options.cwd` is specified, and not a directory + +## 6.0 + +- Remove comment and negation pattern support +- Ignore patterns are always in `dot:true` mode + +## 5.0 + +- Deprecate comment and negation patterns +- Fix regression in `mark` and `nodir` options from making all cache + keys absolute path. +- Abort if `fs.readdir` returns an error that's unexpected +- Don't emit `match` events for ignored items +- Treat ENOTSUP like ENOTDIR in readdir + +## 4.5 + +- Add `options.follow` to always follow directory symlinks in globstar +- Add `options.realpath` to call `fs.realpath` on all results +- Always cache based on absolute path + +## 4.4 + +- Add `options.ignore` +- Fix handling of broken symlinks + +## 4.3 + +- Bump minimatch to 2.x +- Pass all tests on Windows + +## 4.2 + +- Add `glob.hasMagic` function +- Add `options.nodir` flag + +## 4.1 + +- Refactor sync and async implementations for performance +- Throw if callback provided to sync glob function +- Treat symbolic links in globstar results the same as Bash 4.3 + +## 4.0 + +- Use `^` for dependency versions (bumped major because this breaks + older npm versions) +- Ensure callbacks are only ever called once +- switch to ISC license + +## 3.x + +- Rewrite in JavaScript +- Add support for setting root, cwd, and windows support +- Cache many fs calls +- Add globstar support +- emit match events + +## 2.x + +- Use `glob.h` and `fnmatch.h` from NetBSD + +## 1.x + +- `glob.h` static binding. diff --git a/node_modules/glob/common.js b/node_modules/glob/common.js new file mode 100644 index 0000000..66651bb --- /dev/null +++ b/node_modules/glob/common.js @@ -0,0 +1,240 @@ +exports.alphasort = alphasort +exports.alphasorti = alphasorti +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasorti (a, b) { + return a.toLowerCase().localeCompare(b.toLowerCase()) +} + +function alphasort (a, b) { + return a.localeCompare(b) +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(self.nocase ? alphasorti : alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/node_modules/glob/glob.js b/node_modules/glob/glob.js new file mode 100644 index 0000000..58dec0f --- /dev/null +++ b/node_modules/glob/glob.js @@ -0,0 +1,790 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var fs = require('fs') +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json new file mode 100644 index 0000000..6477c30 --- /dev/null +++ b/node_modules/glob/package.json @@ -0,0 +1,46 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.1.6", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^12.0.1", + "tick": "0.0.6" + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap test/*.js --cov", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/glob/sync.js b/node_modules/glob/sync.js new file mode 100644 index 0000000..c952134 --- /dev/null +++ b/node_modules/glob/sync.js @@ -0,0 +1,486 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var fs = require('fs') +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/node_modules/inflight/LICENSE b/node_modules/inflight/LICENSE new file mode 100644 index 0000000..05eeeb8 --- /dev/null +++ b/node_modules/inflight/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/inflight/README.md b/node_modules/inflight/README.md new file mode 100644 index 0000000..6dc8929 --- /dev/null +++ b/node_modules/inflight/README.md @@ -0,0 +1,37 @@ +# inflight + +Add callbacks to requests in flight to avoid async duplication + +## USAGE + +```javascript +var inflight = require('inflight') + +// some request that does some stuff +function req(key, callback) { + // key is any random string. like a url or filename or whatever. + // + // will return either a falsey value, indicating that the + // request for this key is already in flight, or a new callback + // which when called will call all callbacks passed to inflightk + // with the same key + callback = inflight(key, callback) + + // If we got a falsey value back, then there's already a req going + if (!callback) return + + // this is where you'd fetch the url or whatever + // callback is also once()-ified, so it can safely be assigned + // to multiple events etc. First call wins. + setTimeout(function() { + callback(null, key) + }, 100) +} + +// only assigns a single setTimeout +// when it dings, all cbs get called +req('foo', cb1) +req('foo', cb2) +req('foo', cb3) +req('foo', cb4) +``` diff --git a/node_modules/inflight/inflight.js b/node_modules/inflight/inflight.js new file mode 100644 index 0000000..48202b3 --- /dev/null +++ b/node_modules/inflight/inflight.js @@ -0,0 +1,54 @@ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} diff --git a/node_modules/inflight/package.json b/node_modules/inflight/package.json new file mode 100644 index 0000000..6084d35 --- /dev/null +++ b/node_modules/inflight/package.json @@ -0,0 +1,29 @@ +{ + "name": "inflight", + "version": "1.0.6", + "description": "Add callbacks to requests in flight to avoid async duplication", + "main": "inflight.js", + "files": [ + "inflight.js" + ], + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.1.2" + }, + "scripts": { + "test": "tap test.js --100" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/inflight.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "bugs": { + "url": "https://github.com/isaacs/inflight/issues" + }, + "homepage": "https://github.com/isaacs/inflight", + "license": "ISC" +} diff --git a/node_modules/is-binary-path/index.d.ts b/node_modules/is-binary-path/index.d.ts new file mode 100644 index 0000000..19dcd43 --- /dev/null +++ b/node_modules/is-binary-path/index.d.ts @@ -0,0 +1,17 @@ +/** +Check if a file path is a binary file. + +@example +``` +import isBinaryPath = require('is-binary-path'); + +isBinaryPath('source/unicorn.png'); +//=> true + +isBinaryPath('source/unicorn.txt'); +//=> false +``` +*/ +declare function isBinaryPath(filePath: string): boolean; + +export = isBinaryPath; diff --git a/node_modules/is-binary-path/index.js b/node_modules/is-binary-path/index.js new file mode 100644 index 0000000..ef7548c --- /dev/null +++ b/node_modules/is-binary-path/index.js @@ -0,0 +1,7 @@ +'use strict'; +const path = require('path'); +const binaryExtensions = require('binary-extensions'); + +const extensions = new Set(binaryExtensions); + +module.exports = filePath => extensions.has(path.extname(filePath).slice(1).toLowerCase()); diff --git a/node_modules/is-binary-path/license b/node_modules/is-binary-path/license new file mode 100644 index 0000000..401b1c7 --- /dev/null +++ b/node_modules/is-binary-path/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2019 Sindre Sorhus (https://sindresorhus.com), Paul Miller (https://paulmillr.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-binary-path/package.json b/node_modules/is-binary-path/package.json new file mode 100644 index 0000000..a8d005a --- /dev/null +++ b/node_modules/is-binary-path/package.json @@ -0,0 +1,40 @@ +{ + "name": "is-binary-path", + "version": "2.1.0", + "description": "Check if a file path is a binary file", + "license": "MIT", + "repository": "sindresorhus/is-binary-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "binary", + "extensions", + "extension", + "file", + "path", + "check", + "detect", + "is" + ], + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/is-binary-path/readme.md b/node_modules/is-binary-path/readme.md new file mode 100644 index 0000000..b4ab025 --- /dev/null +++ b/node_modules/is-binary-path/readme.md @@ -0,0 +1,34 @@ +# is-binary-path [![Build Status](https://travis-ci.org/sindresorhus/is-binary-path.svg?branch=master)](https://travis-ci.org/sindresorhus/is-binary-path) + +> Check if a file path is a binary file + + +## Install + +``` +$ npm install is-binary-path +``` + + +## Usage + +```js +const isBinaryPath = require('is-binary-path'); + +isBinaryPath('source/unicorn.png'); +//=> true + +isBinaryPath('source/unicorn.txt'); +//=> false +``` + + +## Related + +- [binary-extensions](https://github.com/sindresorhus/binary-extensions) - List of binary file extensions +- [is-text-path](https://github.com/sindresorhus/is-text-path) - Check if a filepath is a text file + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com), [Paul Miller](https://paulmillr.com) diff --git a/node_modules/is-core-module/.eslintrc b/node_modules/is-core-module/.eslintrc new file mode 100644 index 0000000..f2e0726 --- /dev/null +++ b/node_modules/is-core-module/.eslintrc @@ -0,0 +1,18 @@ +{ + "extends": "@ljharb", + "root": true, + "rules": { + "func-style": 1, + }, + "overrides": [ + { + "files": "test/**", + "rules": { + "global-require": 0, + "max-depth": 0, + "max-lines-per-function": 0, + "no-negated-condition": 0, + }, + }, + ], +} diff --git a/node_modules/is-core-module/.nycrc b/node_modules/is-core-module/.nycrc new file mode 100644 index 0000000..bdd626c --- /dev/null +++ b/node_modules/is-core-module/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/is-core-module/CHANGELOG.md b/node_modules/is-core-module/CHANGELOG.md new file mode 100644 index 0000000..028384a --- /dev/null +++ b/node_modules/is-core-module/CHANGELOG.md @@ -0,0 +1,166 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v2.12.1](https://github.com/inspect-js/is-core-module/compare/v2.12.0...v2.12.1) - 2023-05-16 + +### Commits + +- [Fix] `test/reporters` now requires the `node:` prefix as of v20.2 [`12183d0`](https://github.com/inspect-js/is-core-module/commit/12183d0d8e4edf56b6ce18a1b3be54bfce10175b) + +## [v2.12.0](https://github.com/inspect-js/is-core-module/compare/v2.11.0...v2.12.0) - 2023-04-10 + +### Commits + +- [actions] update rebase action to use reusable workflow [`c0a7251`](https://github.com/inspect-js/is-core-module/commit/c0a7251f734f3c621932c5fcdfd1bf966b42ca32) +- [Dev Deps] update `@ljharb/eslint-config`, `aud`, `tape` [`9ae8b7f`](https://github.com/inspect-js/is-core-module/commit/9ae8b7fac03c369861d0991b4a2ce8d4848e6a7d) +- [New] `test/reporters` added in v19.9, `wasi` added in v20 [`9d5341a`](https://github.com/inspect-js/is-core-module/commit/9d5341ab32053f25b7fa7db3c0e18461db24a79c) +- [Dev Deps] add missing `in-publish` dep [`5980245`](https://github.com/inspect-js/is-core-module/commit/59802456e9ac919fa748f53be9d8fbf304a197df) + +## [v2.11.0](https://github.com/inspect-js/is-core-module/compare/v2.10.0...v2.11.0) - 2022-10-18 + +### Commits + +- [meta] use `npmignore` to autogenerate an npmignore file [`3360011`](https://github.com/inspect-js/is-core-module/commit/33600118857b46177178072fba2affcdeb009d12) +- [Dev Deps] update `aud`, `tape` [`651c6b0`](https://github.com/inspect-js/is-core-module/commit/651c6b0cc2799d4130866cf43ad333dcade3d26c) +- [New] `inspector/promises` and `node:inspector/promises` is now available in node 19 [`22d332f`](https://github.com/inspect-js/is-core-module/commit/22d332fe22ac050305444e0781ff85af819abcb0) + +## [v2.10.0](https://github.com/inspect-js/is-core-module/compare/v2.9.0...v2.10.0) - 2022-08-03 + +### Commits + +- [New] `node:test` is now available in node ^16.17 [`e8fd36e`](https://github.com/inspect-js/is-core-module/commit/e8fd36e9b86c917775a07cc473b62a3294f459f2) +- [Tests] improve skip message [`c014a4c`](https://github.com/inspect-js/is-core-module/commit/c014a4c0cd6eb15fff573ae4709191775e70cab4) + +## [v2.9.0](https://github.com/inspect-js/is-core-module/compare/v2.8.1...v2.9.0) - 2022-04-19 + +### Commits + +- [New] add `node:test`, in node 18+ [`f853eca`](https://github.com/inspect-js/is-core-module/commit/f853eca801d0a7d4e1dbb670f1b6d9837d9533c5) +- [Tests] use `mock-property` [`03b3644`](https://github.com/inspect-js/is-core-module/commit/03b3644dff4417f4ba5a7d0aa0138f5f6b3e5c46) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `tape` [`7c0e2d0`](https://github.com/inspect-js/is-core-module/commit/7c0e2d06ed2a89acf53abe2ab34d703ed5b03455) +- [meta] simplify "exports" [`d6ed201`](https://github.com/inspect-js/is-core-module/commit/d6ed201eba7fbba0e59814a9050fc49a6e9878c8) + +## [v2.8.1](https://github.com/inspect-js/is-core-module/compare/v2.8.0...v2.8.1) - 2022-01-05 + +### Commits + +- [actions] reuse common workflows [`cd2cf9b`](https://github.com/inspect-js/is-core-module/commit/cd2cf9b3b66c8d328f65610efe41e9325db7716d) +- [Fix] update node 0.4 results [`062195d`](https://github.com/inspect-js/is-core-module/commit/062195d89f0876a88b95d378b43f7fcc1205bc5b) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `safe-publish-latest`, `tape` [`0790b62`](https://github.com/inspect-js/is-core-module/commit/0790b6222848c6167132f9f73acc3520fa8d1298) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` [`7d139a6`](https://github.com/inspect-js/is-core-module/commit/7d139a6d767709eabf0a0251e074ec1fb230c06e) +- [Tests] run `nyc` in `tests-only`, not `test` [`780e8a0`](https://github.com/inspect-js/is-core-module/commit/780e8a049951c71cf78b1707f0871c48a28bde14) + +## [v2.8.0](https://github.com/inspect-js/is-core-module/compare/v2.7.0...v2.8.0) - 2021-10-14 + +### Commits + +- [actions] update codecov uploader [`0cfe94e`](https://github.com/inspect-js/is-core-module/commit/0cfe94e106a7d005ea03e008c0a21dec13a77904) +- [New] add `readline/promises` to node v17+ [`4f78c30`](https://github.com/inspect-js/is-core-module/commit/4f78c3008b1b58b4db6dc91d99610b1bc859da7e) +- [Tests] node ^14.18 supports `node:` prefixes for CJS [`43e2f17`](https://github.com/inspect-js/is-core-module/commit/43e2f177452cea2f0eaf34f61b5407217bbdb6f4) + +## [v2.7.0](https://github.com/inspect-js/is-core-module/compare/v2.6.0...v2.7.0) - 2021-09-27 + +### Commits + +- [New] node `v14.18` added `node:`-prefixed core modules to `require` [`6d943ab`](https://github.com/inspect-js/is-core-module/commit/6d943abe81382b9bbe344384d80fbfebe1cc0526) +- [Tests] add coverage for Object.prototype pollution [`c6baf5f`](https://github.com/inspect-js/is-core-module/commit/c6baf5f942311a1945c1af41167bb80b84df2af7) +- [Dev Deps] update `@ljharb/eslint-config` [`6717f00`](https://github.com/inspect-js/is-core-module/commit/6717f000d063ea57beb772bded36c2f056ac404c) +- [eslint] fix linter warning [`594c10b`](https://github.com/inspect-js/is-core-module/commit/594c10bb7d39d7eb00925c90924199ff596184b2) +- [meta] add `sideEffects` flag [`c32cfa5`](https://github.com/inspect-js/is-core-module/commit/c32cfa5195632944c4dd4284a142b8476e75be13) + +## [v2.6.0](https://github.com/inspect-js/is-core-module/compare/v2.5.0...v2.6.0) - 2021-08-17 + +### Commits + +- [Dev Deps] update `eslint`, `tape` [`6cc928f`](https://github.com/inspect-js/is-core-module/commit/6cc928f8a4bba66aeeccc4f6beeac736d4bd3081) +- [New] add `stream/consumers` to node `>= 16.7` [`a1a423e`](https://github.com/inspect-js/is-core-module/commit/a1a423e467e4cc27df180234fad5bab45943e67d) +- [Refactor] Remove duplicated `&&` operand [`86faea7`](https://github.com/inspect-js/is-core-module/commit/86faea738213a2433c62d1098488dc9314dca832) +- [Tests] include prereleases [`a4da7a6`](https://github.com/inspect-js/is-core-module/commit/a4da7a6abf7568e2aa4fd98e69452179f1850963) + +## [v2.5.0](https://github.com/inspect-js/is-core-module/compare/v2.4.0...v2.5.0) - 2021-07-12 + +### Commits + +- [Dev Deps] update `auto-changelog`, `eslint` [`6334cc9`](https://github.com/inspect-js/is-core-module/commit/6334cc94f3af7469685bd8f236740991baaf2705) +- [New] add `stream/web` to node v16.5+ [`17ac59b`](https://github.com/inspect-js/is-core-module/commit/17ac59b662d63e220a2e5728625f005c24f177b2) + +## [v2.4.0](https://github.com/inspect-js/is-core-module/compare/v2.3.0...v2.4.0) - 2021-05-09 + +### Commits + +- [readme] add actions and codecov badges [`82b7faa`](https://github.com/inspect-js/is-core-module/commit/82b7faa12b56dbe47fbea67e1a5b9e447027ba40) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`8096868`](https://github.com/inspect-js/is-core-module/commit/8096868c024a161ccd4d44110b136763e92eace8) +- [Dev Deps] update `eslint` [`6726824`](https://github.com/inspect-js/is-core-module/commit/67268249b88230018c510f6532a8046d7326346f) +- [New] add `diagnostics_channel` to node `^14.17` [`86c6563`](https://github.com/inspect-js/is-core-module/commit/86c65634201b8ff9b3e48a9a782594579c7f5c3c) +- [meta] fix prepublish script [`697a01e`](https://github.com/inspect-js/is-core-module/commit/697a01e3c9c0be074066520954f30fb28532ec57) + +## [v2.3.0](https://github.com/inspect-js/is-core-module/compare/v2.2.0...v2.3.0) - 2021-04-24 + +### Commits + +- [meta] do not publish github action workflow files [`060d4bb`](https://github.com/inspect-js/is-core-module/commit/060d4bb971a29451c19ff336eb56bee27f9fa95a) +- [New] add support for `node:` prefix, in node 16+ [`7341223`](https://github.com/inspect-js/is-core-module/commit/73412230a769f6e81c05eea50b6520cebf54ed2f) +- [actions] use `node/install` instead of `node/run`; use `codecov` action [`016269a`](https://github.com/inspect-js/is-core-module/commit/016269abae9f6657a5254adfbb813f09a05067f9) +- [patch] remove unneeded `.0` in version ranges [`cb466a6`](https://github.com/inspect-js/is-core-module/commit/cb466a6d89e52b8389e5c12715efcd550c41cea3) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`c9f9c39`](https://github.com/inspect-js/is-core-module/commit/c9f9c396ace60ef81906f98059c064e6452473ed) +- [actions] update workflows [`3ee4a89`](https://github.com/inspect-js/is-core-module/commit/3ee4a89fd5a02fccd43882d905448ea6a98e9a3c) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` [`dee4fed`](https://github.com/inspect-js/is-core-module/commit/dee4fed79690c1d43a22f7fa9426abebdc6d727f) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` [`7d046ba`](https://github.com/inspect-js/is-core-module/commit/7d046ba07ae8c9292e43652694ca808d7b309de8) +- [meta] use `prepublishOnly` script for npm 7+ [`149e677`](https://github.com/inspect-js/is-core-module/commit/149e6771a5ede6d097e71785b467a9c4b4977cc7) +- [readme] remove travis badge [`903b51d`](https://github.com/inspect-js/is-core-module/commit/903b51d6b69b98abeabfbc3695c345b02646f19c) + +## [v2.2.0](https://github.com/inspect-js/is-core-module/compare/v2.1.0...v2.2.0) - 2020-11-26 + +### Commits + +- [Tests] migrate tests to Github Actions [`c919f57`](https://github.com/inspect-js/is-core-module/commit/c919f573c0a92d10a0acad0b650b5aecb033d426) +- [patch] `core.json`: %s/ /\t/g [`db3f685`](https://github.com/inspect-js/is-core-module/commit/db3f68581f53e73cc09cd675955eb1bdd6a5a39b) +- [Tests] run `nyc` on all tests [`b2f925f`](https://github.com/inspect-js/is-core-module/commit/b2f925f8866f210ef441f39fcc8cc42692ab89b1) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`; add `safe-publish-latest` [`89f02a2`](https://github.com/inspect-js/is-core-module/commit/89f02a2b4162246dea303a6ee31bb9a550b05c72) +- [New] add `path/posix`, `path/win32`, `util/types` [`77f94f1`](https://github.com/inspect-js/is-core-module/commit/77f94f1e90ffd7c0be2a3f1aa8574ebf7fd981b3) + +## [v2.1.0](https://github.com/inspect-js/is-core-module/compare/v2.0.0...v2.1.0) - 2020-11-04 + +### Commits + +- [Dev Deps] update `eslint` [`5e0034e`](https://github.com/inspect-js/is-core-module/commit/5e0034eae57c09c8f1bd769f502486a00f56c6e4) +- [New] Add `diagnostics_channel` [`c2d83d0`](https://github.com/inspect-js/is-core-module/commit/c2d83d0a0225a1a658945d9bab7036ea347d29ec) + +## [v2.0.0](https://github.com/inspect-js/is-core-module/compare/v1.0.2...v2.0.0) - 2020-09-29 + +### Commits + +- v2 implementation [`865aeb5`](https://github.com/inspect-js/is-core-module/commit/865aeb5ca0e90248a3dfff5d7622e4751fdeb9cd) +- Only apps should have lockfiles [`5a5e660`](https://github.com/inspect-js/is-core-module/commit/5a5e660d568e37eb44e17fb1ebb12a105205fc2b) +- Initial commit for v2 [`5a51524`](https://github.com/inspect-js/is-core-module/commit/5a51524e06f92adece5fbb138c69b7b9748a2348) +- Tests [`116eae4`](https://github.com/inspect-js/is-core-module/commit/116eae4fccd01bc72c1fd3cc4b7561c387afc496) +- [meta] add `auto-changelog` [`c24388b`](https://github.com/inspect-js/is-core-module/commit/c24388bee828d223040519d1f5b226ca35beee63) +- [actions] add "Automatic Rebase" and "require allow edits" actions [`34292db`](https://github.com/inspect-js/is-core-module/commit/34292dbcbadae0868aff03c22dbd8b7b8a11558a) +- [Tests] add `npm run lint` [`4f9eeee`](https://github.com/inspect-js/is-core-module/commit/4f9eeee7ddff10698bbf528620f4dc8d4fa3e697) +- [readme] fix travis badges, https all URLs [`e516a73`](https://github.com/inspect-js/is-core-module/commit/e516a73b0dccce20938c432b1ba512eae8eff9e9) +- [meta] create FUNDING.yml [`1aabebc`](https://github.com/inspect-js/is-core-module/commit/1aabebca98d01f8a04e46bc2e2520fa93cf21ac6) +- [Fix] `domain`: domain landed sometime > v0.7.7 and <= v0.7.12 [`2df7d37`](https://github.com/inspect-js/is-core-module/commit/2df7d37595d41b15eeada732b706b926c2771655) +- [Fix] `sys`: worked in 0.6, not 0.7, and 0.8+ [`a75c134`](https://github.com/inspect-js/is-core-module/commit/a75c134229e1e9441801f6b73f6a52489346eb65) + +## [v1.0.2](https://github.com/inspect-js/is-core-module/compare/v1.0.1...v1.0.2) - 2014-09-28 + +### Commits + +- simpler [`66fe90f`](https://github.com/inspect-js/is-core-module/commit/66fe90f9771581b9adc0c3900baa52c21b5baea2) + +## [v1.0.1](https://github.com/inspect-js/is-core-module/compare/v1.0.0...v1.0.1) - 2014-09-28 + +### Commits + +- remove stupid [`f21f906`](https://github.com/inspect-js/is-core-module/commit/f21f906f882c2bd656a5fc5ed6fbe48ddaffb2ac) +- update readme [`1eff0ec`](https://github.com/inspect-js/is-core-module/commit/1eff0ec69798d1ec65771552d1562911e90a8027) + +## v1.0.0 - 2014-09-28 + +### Commits + +- init [`48e5e76`](https://github.com/inspect-js/is-core-module/commit/48e5e76cac378fddb8c1f7d4055b8dfc943d6b96) diff --git a/node_modules/is-core-module/LICENSE b/node_modules/is-core-module/LICENSE new file mode 100644 index 0000000..2e50287 --- /dev/null +++ b/node_modules/is-core-module/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014 Dave Justice + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/is-core-module/README.md b/node_modules/is-core-module/README.md new file mode 100644 index 0000000..062d906 --- /dev/null +++ b/node_modules/is-core-module/README.md @@ -0,0 +1,40 @@ +# is-core-module [![Version Badge][2]][1] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +Is this specifier a node.js core module? Optionally provide a node version to check; defaults to the current node version. + +## Example + +```js +var isCore = require('is-core-module'); +var assert = require('assert'); +assert(isCore('fs')); +assert(!isCore('butts')); +``` + +## Tests +Clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/is-core-module +[2]: https://versionbadg.es/inspect-js/is-core-module.svg +[5]: https://david-dm.org/inspect-js/is-core-module.svg +[6]: https://david-dm.org/inspect-js/is-core-module +[7]: https://david-dm.org/inspect-js/is-core-module/dev-status.svg +[8]: https://david-dm.org/inspect-js/is-core-module#info=devDependencies +[11]: https://nodei.co/npm/is-core-module.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/is-core-module.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/is-core-module.svg +[downloads-url]: https://npm-stat.com/charts.html?package=is-core-module +[codecov-image]: https://codecov.io/gh/inspect-js/is-core-module/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/inspect-js/is-core-module/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/is-core-module +[actions-url]: https://github.com/inspect-js/is-core-module/actions diff --git a/node_modules/is-core-module/core.json b/node_modules/is-core-module/core.json new file mode 100644 index 0000000..af29f0b --- /dev/null +++ b/node_modules/is-core-module/core.json @@ -0,0 +1,158 @@ +{ + "assert": true, + "node:assert": [">= 14.18 && < 15", ">= 16"], + "assert/strict": ">= 15", + "node:assert/strict": ">= 16", + "async_hooks": ">= 8", + "node:async_hooks": [">= 14.18 && < 15", ">= 16"], + "buffer_ieee754": ">= 0.5 && < 0.9.7", + "buffer": true, + "node:buffer": [">= 14.18 && < 15", ">= 16"], + "child_process": true, + "node:child_process": [">= 14.18 && < 15", ">= 16"], + "cluster": ">= 0.5", + "node:cluster": [">= 14.18 && < 15", ">= 16"], + "console": true, + "node:console": [">= 14.18 && < 15", ">= 16"], + "constants": true, + "node:constants": [">= 14.18 && < 15", ">= 16"], + "crypto": true, + "node:crypto": [">= 14.18 && < 15", ">= 16"], + "_debug_agent": ">= 1 && < 8", + "_debugger": "< 8", + "dgram": true, + "node:dgram": [">= 14.18 && < 15", ">= 16"], + "diagnostics_channel": [">= 14.17 && < 15", ">= 15.1"], + "node:diagnostics_channel": [">= 14.18 && < 15", ">= 16"], + "dns": true, + "node:dns": [">= 14.18 && < 15", ">= 16"], + "dns/promises": ">= 15", + "node:dns/promises": ">= 16", + "domain": ">= 0.7.12", + "node:domain": [">= 14.18 && < 15", ">= 16"], + "events": true, + "node:events": [">= 14.18 && < 15", ">= 16"], + "freelist": "< 6", + "fs": true, + "node:fs": [">= 14.18 && < 15", ">= 16"], + "fs/promises": [">= 10 && < 10.1", ">= 14"], + "node:fs/promises": [">= 14.18 && < 15", ">= 16"], + "_http_agent": ">= 0.11.1", + "node:_http_agent": [">= 14.18 && < 15", ">= 16"], + "_http_client": ">= 0.11.1", + "node:_http_client": [">= 14.18 && < 15", ">= 16"], + "_http_common": ">= 0.11.1", + "node:_http_common": [">= 14.18 && < 15", ">= 16"], + "_http_incoming": ">= 0.11.1", + "node:_http_incoming": [">= 14.18 && < 15", ">= 16"], + "_http_outgoing": ">= 0.11.1", + "node:_http_outgoing": [">= 14.18 && < 15", ">= 16"], + "_http_server": ">= 0.11.1", + "node:_http_server": [">= 14.18 && < 15", ">= 16"], + "http": true, + "node:http": [">= 14.18 && < 15", ">= 16"], + "http2": ">= 8.8", + "node:http2": [">= 14.18 && < 15", ">= 16"], + "https": true, + "node:https": [">= 14.18 && < 15", ">= 16"], + "inspector": ">= 8", + "node:inspector": [">= 14.18 && < 15", ">= 16"], + "inspector/promises": [">= 19"], + "node:inspector/promises": [">= 19"], + "_linklist": "< 8", + "module": true, + "node:module": [">= 14.18 && < 15", ">= 16"], + "net": true, + "node:net": [">= 14.18 && < 15", ">= 16"], + "node-inspect/lib/_inspect": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_client": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_repl": ">= 7.6 && < 12", + "os": true, + "node:os": [">= 14.18 && < 15", ">= 16"], + "path": true, + "node:path": [">= 14.18 && < 15", ">= 16"], + "path/posix": ">= 15.3", + "node:path/posix": ">= 16", + "path/win32": ">= 15.3", + "node:path/win32": ">= 16", + "perf_hooks": ">= 8.5", + "node:perf_hooks": [">= 14.18 && < 15", ">= 16"], + "process": ">= 1", + "node:process": [">= 14.18 && < 15", ">= 16"], + "punycode": ">= 0.5", + "node:punycode": [">= 14.18 && < 15", ">= 16"], + "querystring": true, + "node:querystring": [">= 14.18 && < 15", ">= 16"], + "readline": true, + "node:readline": [">= 14.18 && < 15", ">= 16"], + "readline/promises": ">= 17", + "node:readline/promises": ">= 17", + "repl": true, + "node:repl": [">= 14.18 && < 15", ">= 16"], + "smalloc": ">= 0.11.5 && < 3", + "_stream_duplex": ">= 0.9.4", + "node:_stream_duplex": [">= 14.18 && < 15", ">= 16"], + "_stream_transform": ">= 0.9.4", + "node:_stream_transform": [">= 14.18 && < 15", ">= 16"], + "_stream_wrap": ">= 1.4.1", + "node:_stream_wrap": [">= 14.18 && < 15", ">= 16"], + "_stream_passthrough": ">= 0.9.4", + "node:_stream_passthrough": [">= 14.18 && < 15", ">= 16"], + "_stream_readable": ">= 0.9.4", + "node:_stream_readable": [">= 14.18 && < 15", ">= 16"], + "_stream_writable": ">= 0.9.4", + "node:_stream_writable": [">= 14.18 && < 15", ">= 16"], + "stream": true, + "node:stream": [">= 14.18 && < 15", ">= 16"], + "stream/consumers": ">= 16.7", + "node:stream/consumers": ">= 16.7", + "stream/promises": ">= 15", + "node:stream/promises": ">= 16", + "stream/web": ">= 16.5", + "node:stream/web": ">= 16.5", + "string_decoder": true, + "node:string_decoder": [">= 14.18 && < 15", ">= 16"], + "sys": [">= 0.4 && < 0.7", ">= 0.8"], + "node:sys": [">= 14.18 && < 15", ">= 16"], + "test/reporters": ">= 19.9 && < 20.2", + "node:test/reporters": [">= 19.9", ">= 20"], + "node:test": [">= 16.17 && < 17", ">= 18"], + "timers": true, + "node:timers": [">= 14.18 && < 15", ">= 16"], + "timers/promises": ">= 15", + "node:timers/promises": ">= 16", + "_tls_common": ">= 0.11.13", + "node:_tls_common": [">= 14.18 && < 15", ">= 16"], + "_tls_legacy": ">= 0.11.3 && < 10", + "_tls_wrap": ">= 0.11.3", + "node:_tls_wrap": [">= 14.18 && < 15", ">= 16"], + "tls": true, + "node:tls": [">= 14.18 && < 15", ">= 16"], + "trace_events": ">= 10", + "node:trace_events": [">= 14.18 && < 15", ">= 16"], + "tty": true, + "node:tty": [">= 14.18 && < 15", ">= 16"], + "url": true, + "node:url": [">= 14.18 && < 15", ">= 16"], + "util": true, + "node:util": [">= 14.18 && < 15", ">= 16"], + "util/types": ">= 15.3", + "node:util/types": ">= 16", + "v8/tools/arguments": ">= 10 && < 12", + "v8/tools/codemap": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/consarray": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/csvparser": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/logreader": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/profile_view": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/splaytree": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8": ">= 1", + "node:v8": [">= 14.18 && < 15", ">= 16"], + "vm": true, + "node:vm": [">= 14.18 && < 15", ">= 16"], + "wasi": [">= 13.4 && < 13.5", ">= 20"], + "node:wasi": ">= 20", + "worker_threads": ">= 11.7", + "node:worker_threads": [">= 14.18 && < 15", ">= 16"], + "zlib": ">= 0.5", + "node:zlib": [">= 14.18 && < 15", ">= 16"] +} diff --git a/node_modules/is-core-module/index.js b/node_modules/is-core-module/index.js new file mode 100644 index 0000000..f9637e0 --- /dev/null +++ b/node_modules/is-core-module/index.js @@ -0,0 +1,69 @@ +'use strict'; + +var has = require('has'); + +function specifierIncluded(current, specifier) { + var nodeParts = current.split('.'); + var parts = specifier.split(' '); + var op = parts.length > 1 ? parts[0] : '='; + var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.'); + + for (var i = 0; i < 3; ++i) { + var cur = parseInt(nodeParts[i] || 0, 10); + var ver = parseInt(versionParts[i] || 0, 10); + if (cur === ver) { + continue; // eslint-disable-line no-restricted-syntax, no-continue + } + if (op === '<') { + return cur < ver; + } + if (op === '>=') { + return cur >= ver; + } + return false; + } + return op === '>='; +} + +function matchesRange(current, range) { + var specifiers = range.split(/ ?&& ?/); + if (specifiers.length === 0) { + return false; + } + for (var i = 0; i < specifiers.length; ++i) { + if (!specifierIncluded(current, specifiers[i])) { + return false; + } + } + return true; +} + +function versionIncluded(nodeVersion, specifierValue) { + if (typeof specifierValue === 'boolean') { + return specifierValue; + } + + var current = typeof nodeVersion === 'undefined' + ? process.versions && process.versions.node + : nodeVersion; + + if (typeof current !== 'string') { + throw new TypeError(typeof nodeVersion === 'undefined' ? 'Unable to determine current node version' : 'If provided, a valid node version is required'); + } + + if (specifierValue && typeof specifierValue === 'object') { + for (var i = 0; i < specifierValue.length; ++i) { + if (matchesRange(current, specifierValue[i])) { + return true; + } + } + return false; + } + return matchesRange(current, specifierValue); +} + +var data = require('./core.json'); + +module.exports = function isCore(x, nodeVersion) { + return has(data, x) && versionIncluded(nodeVersion, data[x]); +}; diff --git a/node_modules/is-core-module/package.json b/node_modules/is-core-module/package.json new file mode 100644 index 0000000..62bb065 --- /dev/null +++ b/node_modules/is-core-module/package.json @@ -0,0 +1,73 @@ +{ + "name": "is-core-module", + "version": "2.12.1", + "description": "Is this specifier a node.js core module?", + "main": "index.js", + "sideEffects": false, + "exports": { + ".": "./index.js", + "./package.json": "./package.json" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=autogenerated", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "lint": "eslint .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/inspect-js/is-core-module.git" + }, + "keywords": [ + "core", + "modules", + "module", + "npm", + "node", + "dependencies" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/inspect-js/is-core-module/issues" + }, + "homepage": "https://github.com/inspect-js/is-core-module", + "dependencies": { + "has": "^1.0.3" + }, + "devDependencies": { + "@ljharb/eslint-config": "^21.0.1", + "aud": "^2.0.2", + "auto-changelog": "^2.4.0", + "eslint": "=8.8.0", + "in-publish": "^2.0.1", + "mock-property": "^1.0.0", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "semver": "^6.3.0", + "tape": "^5.6.3" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github" + ] + } +} diff --git a/node_modules/is-core-module/test/index.js b/node_modules/is-core-module/test/index.js new file mode 100644 index 0000000..912808b --- /dev/null +++ b/node_modules/is-core-module/test/index.js @@ -0,0 +1,133 @@ +'use strict'; + +var test = require('tape'); +var keys = require('object-keys'); +var semver = require('semver'); +var mockProperty = require('mock-property'); + +var isCore = require('../'); +var data = require('../core.json'); + +var supportsNodePrefix = semver.satisfies(process.versions.node, '^14.18 || >= 16', { includePrerelease: true }); + +test('core modules', function (t) { + t.test('isCore()', function (st) { + st.ok(isCore('fs')); + st.ok(isCore('net')); + st.ok(isCore('http')); + + st.ok(!isCore('seq')); + st.ok(!isCore('../')); + + st.ok(!isCore('toString')); + + st.end(); + }); + + t.test('core list', function (st) { + var cores = keys(data); + st.plan(cores.length); + + for (var i = 0; i < cores.length; ++i) { + var mod = cores[i]; + var requireFunc = function () { require(mod); }; // eslint-disable-line no-loop-func + if (isCore(mod)) { + st.doesNotThrow(requireFunc, mod + ' supported; requiring does not throw'); + } else { + st['throws'](requireFunc, mod + ' not supported; requiring throws'); + } + } + + st.end(); + }); + + t.test('core via repl module', { skip: !data.repl }, function (st) { + var libs = require('repl')._builtinLibs; // eslint-disable-line no-underscore-dangle + if (!libs) { + st.skip('repl._builtinLibs does not exist'); + } else { + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + st.ok(data[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + if (mod.slice(0, 5) !== 'node:') { + if (supportsNodePrefix) { + st.doesNotThrow( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' does not throw' + ); + } else { + st['throws']( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' throws' + ); + } + } + } + } + st.end(); + }); + + t.test('core via builtinModules list', { skip: !data.module }, function (st) { + var libs = require('module').builtinModules; + if (!libs) { + st.skip('module.builtinModules does not exist'); + } else { + var excludeList = [ + '_debug_agent', + 'v8/tools/tickprocessor-driver', + 'v8/tools/SourceMap', + 'v8/tools/tickprocessor', + 'v8/tools/profile' + ]; + // see https://github.com/nodejs/node/issues/42785 + if (semver.satisfies(process.version, '>= 18')) { + libs = libs.concat('node:test'); + } + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + if (excludeList.indexOf(mod) === -1) { + st.ok(data[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + if (mod.slice(0, 5) !== 'node:') { + if (supportsNodePrefix) { + st.doesNotThrow( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' does not throw' + ); + } else { + st['throws']( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' throws' + ); + } + } + } + } + } + st.end(); + }); + + t.test('Object.prototype pollution', function (st) { + var nonKey = 'not a core module'; + st.teardown(mockProperty(Object.prototype, 'fs', { value: false })); + st.teardown(mockProperty(Object.prototype, 'path', { value: '>= 999999999' })); + st.teardown(mockProperty(Object.prototype, 'http', { value: data.http })); + st.teardown(mockProperty(Object.prototype, nonKey, { value: true })); + + st.equal(isCore('fs'), true, 'fs is a core module even if Object.prototype lies'); + st.equal(isCore('path'), true, 'path is a core module even if Object.prototype lies'); + st.equal(isCore('http'), true, 'path is a core module even if Object.prototype matches data'); + st.equal(isCore(nonKey), false, '"' + nonKey + '" is not a core module even if Object.prototype lies'); + + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/is-extglob/LICENSE b/node_modules/is-extglob/LICENSE new file mode 100644 index 0000000..842218c --- /dev/null +++ b/node_modules/is-extglob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-extglob/README.md b/node_modules/is-extglob/README.md new file mode 100644 index 0000000..0416af5 --- /dev/null +++ b/node_modules/is-extglob/README.md @@ -0,0 +1,107 @@ +# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) + +> Returns true if a string has an extglob. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extglob +``` + +## Usage + +```js +var isExtglob = require('is-extglob'); +``` + +**True** + +```js +isExtglob('?(abc)'); +isExtglob('@(abc)'); +isExtglob('!(abc)'); +isExtglob('*(abc)'); +isExtglob('+(abc)'); +``` + +**False** + +Escaped extglobs: + +```js +isExtglob('\\?(abc)'); +isExtglob('\\@(abc)'); +isExtglob('\\!(abc)'); +isExtglob('\\*(abc)'); +isExtglob('\\+(abc)'); +``` + +Everything else... + +```js +isExtglob('foo.js'); +isExtglob('!foo.js'); +isExtglob('*.js'); +isExtglob('**/abc.js'); +isExtglob('abc/*.js'); +isExtglob('abc/(aaa|bbb).js'); +isExtglob('abc/[a-z].js'); +isExtglob('abc/{a,b}.js'); +isExtglob('abc/?.js'); +isExtglob('abc.js'); +isExtglob('abc/def/ghi.js'); +``` + +## History + +**v2.0** + +Adds support for escaping. Escaped exglobs no longer return true. + +## About + +### Related projects + +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/is-extglob/index.js b/node_modules/is-extglob/index.js new file mode 100644 index 0000000..c1d986f --- /dev/null +++ b/node_modules/is-extglob/index.js @@ -0,0 +1,20 @@ +/*! + * is-extglob + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isExtglob(str) { + if (typeof str !== 'string' || str === '') { + return false; + } + + var match; + while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { + if (match[2]) return true; + str = str.slice(match.index + match[0].length); + } + + return false; +}; diff --git a/node_modules/is-extglob/package.json b/node_modules/is-extglob/package.json new file mode 100644 index 0000000..7a90836 --- /dev/null +++ b/node_modules/is-extglob/package.json @@ -0,0 +1,69 @@ +{ + "name": "is-extglob", + "description": "Returns true if a string has an extglob.", + "version": "2.1.1", + "homepage": "https://github.com/jonschlinkert/is-extglob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extglob", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extglob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "has-glob", + "is-glob", + "micromatch" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-glob/LICENSE b/node_modules/is-glob/LICENSE new file mode 100644 index 0000000..3f2eca1 --- /dev/null +++ b/node_modules/is-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-glob/README.md b/node_modules/is-glob/README.md new file mode 100644 index 0000000..740724b --- /dev/null +++ b/node_modules/is-glob/README.md @@ -0,0 +1,206 @@ +# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/github/workflow/status/micromatch/is-glob/dev)](https://github.com/micromatch/is-glob/actions) + +> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-glob +``` + +You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). + +## Usage + +```js +var isGlob = require('is-glob'); +``` + +### Default behavior + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js'); +isGlob('*.js'); +isGlob('**/abc.js'); +isGlob('abc/*.js'); +isGlob('abc/(aaa|bbb).js'); +isGlob('abc/[a-z].js'); +isGlob('abc/{a,b}.js'); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js'); +isGlob('abc/!(a).js'); +isGlob('abc/+(a).js'); +isGlob('abc/*(a).js'); +isGlob('abc/?(a).js'); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('abc/\\@(a).js'); +isGlob('abc/\\!(a).js'); +isGlob('abc/\\+(a).js'); +isGlob('abc/\\*(a).js'); +isGlob('abc/\\?(a).js'); +isGlob('\\!foo.js'); +isGlob('\\*.js'); +isGlob('\\*\\*/abc.js'); +isGlob('abc/\\*.js'); +isGlob('abc/\\(aaa|bbb).js'); +isGlob('abc/\\[a-z].js'); +isGlob('abc/\\{a,b}.js'); +//=> false +``` + +Patterns that do not have glob patterns return `false`: + +```js +isGlob('abc.js'); +isGlob('abc/def/ghi.js'); +isGlob('foo.js'); +isGlob('abc/@.js'); +isGlob('abc/+.js'); +isGlob('abc/?.js'); +isGlob(); +isGlob(null); +//=> false +``` + +Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): + +```js +isGlob(['**/*.js']); +isGlob(['foo.js']); +//=> false +``` + +### Option strict + +When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that +some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js', {strict: false}); +isGlob('*.js', {strict: false}); +isGlob('**/abc.js', {strict: false}); +isGlob('abc/*.js', {strict: false}); +isGlob('abc/(aaa|bbb).js', {strict: false}); +isGlob('abc/[a-z].js', {strict: false}); +isGlob('abc/{a,b}.js', {strict: false}); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js', {strict: false}); +isGlob('abc/!(a).js', {strict: false}); +isGlob('abc/+(a).js', {strict: false}); +isGlob('abc/*(a).js', {strict: false}); +isGlob('abc/?(a).js', {strict: false}); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('\\!foo.js', {strict: false}); +isGlob('\\*.js', {strict: false}); +isGlob('\\*\\*/abc.js', {strict: false}); +isGlob('abc/\\*.js', {strict: false}); +isGlob('abc/\\(aaa|bbb).js', {strict: false}); +isGlob('abc/\\[a-z].js', {strict: false}); +isGlob('abc/\\{a,b}.js', {strict: false}); +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") +* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") +* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") +* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 47 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [doowb](https://github.com/doowb) | +| 1 | [phated](https://github.com/phated) | +| 1 | [danhper](https://github.com/danhper) | +| 1 | [paulmillr](https://github.com/paulmillr) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/is-glob/index.js b/node_modules/is-glob/index.js new file mode 100644 index 0000000..620f563 --- /dev/null +++ b/node_modules/is-glob/index.js @@ -0,0 +1,150 @@ +/*! + * is-glob + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +var isExtglob = require('is-extglob'); +var chars = { '{': '}', '(': ')', '[': ']'}; +var strictCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + var pipeIndex = -2; + var closeSquareIndex = -2; + var closeCurlyIndex = -2; + var closeParenIndex = -2; + var backSlashIndex = -2; + while (index < str.length) { + if (str[index] === '*') { + return true; + } + + if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { + return true; + } + + if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { + if (closeSquareIndex < index) { + closeSquareIndex = str.indexOf(']', index); + } + if (closeSquareIndex > index) { + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + } + } + + if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { + closeCurlyIndex = str.indexOf('}', index); + if (closeCurlyIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { + return true; + } + } + } + + if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { + closeParenIndex = str.indexOf(')', index); + if (closeParenIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + + if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { + if (pipeIndex < index) { + pipeIndex = str.indexOf('|', index); + } + if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { + closeParenIndex = str.indexOf(')', pipeIndex); + if (closeParenIndex > pipeIndex) { + backSlashIndex = str.indexOf('\\', pipeIndex); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +var relaxedCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + while (index < str.length) { + if (/[*?{}()[\]]/.test(str[index])) { + return true; + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +module.exports = function isGlob(str, options) { + if (typeof str !== 'string' || str === '') { + return false; + } + + if (isExtglob(str)) { + return true; + } + + var check = strictCheck; + + // optionally relax check + if (options && options.strict === false) { + check = relaxedCheck; + } + + return check(str); +}; diff --git a/node_modules/is-glob/package.json b/node_modules/is-glob/package.json new file mode 100644 index 0000000..858af03 --- /dev/null +++ b/node_modules/is-glob/package.json @@ -0,0 +1,81 @@ +{ + "name": "is-glob", + "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", + "version": "4.0.3", + "homepage": "https://github.com/micromatch/is-glob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Daniel Perez (https://tuvistavie.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/is-glob", + "bugs": { + "url": "https://github.com/micromatch/is-glob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha && node benchmark.js" + }, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "layout": "default", + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assemble", + "base", + "update", + "verb" + ] + }, + "reflinks": [ + "assemble", + "bach", + "base", + "composer", + "gulp", + "has-glob", + "is-valid-glob", + "micromatch", + "npm", + "scaffold", + "verb", + "vinyl" + ] + } +} diff --git a/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/is-number/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-number/README.md b/node_modules/is-number/README.md new file mode 100644 index 0000000..eb8149e --- /dev/null +++ b/node_modules/is-number/README.md @@ -0,0 +1,187 @@ +# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![NPM total downloads](https://img.shields.io/npm/dt/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-number) + +> Returns true if the value is a finite number. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-number +``` + +## Why is this needed? + +In JavaScript, it's not always as straightforward as it should be to reliably check if a value is a number. It's common for devs to use `+`, `-`, or `Number()` to cast a string value to a number (for example, when values are returned from user input, regex matches, parsers, etc). But there are many non-intuitive edge cases that yield unexpected results: + +```js +console.log(+[]); //=> 0 +console.log(+''); //=> 0 +console.log(+' '); //=> 0 +console.log(typeof NaN); //=> 'number' +``` + +This library offers a performant way to smooth out edge cases like these. + +## Usage + +```js +const isNumber = require('is-number'); +``` + +See the [tests](./test.js) for more examples. + +### true + +```js +isNumber(5e3); // true +isNumber(0xff); // true +isNumber(-1.1); // true +isNumber(0); // true +isNumber(1); // true +isNumber(1.1); // true +isNumber(10); // true +isNumber(10.10); // true +isNumber(100); // true +isNumber('-1.1'); // true +isNumber('0'); // true +isNumber('012'); // true +isNumber('0xff'); // true +isNumber('1'); // true +isNumber('1.1'); // true +isNumber('10'); // true +isNumber('10.10'); // true +isNumber('100'); // true +isNumber('5e3'); // true +isNumber(parseInt('012')); // true +isNumber(parseFloat('012')); // true +``` + +### False + +Everything else is false, as you would expect: + +```js +isNumber(Infinity); // false +isNumber(NaN); // false +isNumber(null); // false +isNumber(undefined); // false +isNumber(''); // false +isNumber(' '); // false +isNumber('foo'); // false +isNumber([1]); // false +isNumber([]); // false +isNumber(function () {}); // false +isNumber({}); // false +``` + +## Release history + +### 7.0.0 + +* Refactor. Now uses `.isFinite` if it exists. +* Performance is about the same as v6.0 when the value is a string or number. But it's now 3x-4x faster when the value is not a string or number. + +### 6.0.0 + +* Optimizations, thanks to @benaadams. + +### 5.0.0 + +**Breaking changes** + +* removed support for `instanceof Number` and `instanceof String` + +## Benchmarks + +As with all benchmarks, take these with a grain of salt. See the [benchmarks](./benchmark/index.js) for more detail. + +``` +# all +v7.0 x 413,222 ops/sec ±2.02% (86 runs sampled) +v6.0 x 111,061 ops/sec ±1.29% (85 runs sampled) +parseFloat x 317,596 ops/sec ±1.36% (86 runs sampled) +fastest is 'v7.0' + +# string +v7.0 x 3,054,496 ops/sec ±1.05% (89 runs sampled) +v6.0 x 2,957,781 ops/sec ±0.98% (88 runs sampled) +parseFloat x 3,071,060 ops/sec ±1.13% (88 runs sampled) +fastest is 'parseFloat,v7.0' + +# number +v7.0 x 3,146,895 ops/sec ±0.89% (89 runs sampled) +v6.0 x 3,214,038 ops/sec ±1.07% (89 runs sampled) +parseFloat x 3,077,588 ops/sec ±1.07% (87 runs sampled) +fastest is 'v6.0' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 49 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [charlike-old](https://github.com/charlike-old) | +| 1 | [benaadams](https://github.com/benaadams) | +| 1 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 15, 2018._ \ No newline at end of file diff --git a/node_modules/is-number/index.js b/node_modules/is-number/index.js new file mode 100644 index 0000000..27f19b7 --- /dev/null +++ b/node_modules/is-number/index.js @@ -0,0 +1,18 @@ +/*! + * is-number + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function(num) { + if (typeof num === 'number') { + return num - num === 0; + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); + } + return false; +}; diff --git a/node_modules/is-number/package.json b/node_modules/is-number/package.json new file mode 100644 index 0000000..3715072 --- /dev/null +++ b/node_modules/is-number/package.json @@ -0,0 +1,82 @@ +{ + "name": "is-number", + "description": "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.", + "version": "7.0.0", + "homepage": "https://github.com/jonschlinkert/is-number", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Olsten Larck (https://i.am.charlike.online)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "jonschlinkert/is-number", + "bugs": { + "url": "https://github.com/jonschlinkert/is-number/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.12.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi": "^0.3.1", + "benchmark": "^2.1.4", + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "cast", + "check", + "coerce", + "coercion", + "finite", + "integer", + "is", + "isnan", + "is-nan", + "is-num", + "is-number", + "isnumber", + "isfinite", + "istype", + "kind", + "math", + "nan", + "num", + "number", + "numeric", + "parseFloat", + "parseInt", + "test", + "type", + "typeof", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "is-plain-object", + "is-primitive", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/jiti/LICENSE b/node_modules/jiti/LICENSE new file mode 100644 index 0000000..e739abc --- /dev/null +++ b/node_modules/jiti/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Pooya Parsa + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/jiti/README.md b/node_modules/jiti/README.md new file mode 100644 index 0000000..14710c1 --- /dev/null +++ b/node_modules/jiti/README.md @@ -0,0 +1,152 @@ +# jiti + +> Runtime typescript and ESM support for Node.js + +[![version][npm-v-src]][npm-v-href] +[![downloads][npm-d-src]][npm-d-href] +[![size][size-src]][size-href] + +## Features + +- Seamless typescript and ESM syntax support +- Seamless interoperability between ESM and CommonJS +- Synchronous API to replace `require` +- Super slim and zero dependency +- Smart syntax detection to avoid extra transforms +- CommonJS cache integration +- Filesystem transpile hard cache +- V8 compile cache +- Custom resolve alias + +## Usage + +### Programmatic + +```js +const jiti = require("jiti")(__filename); + +jiti("./path/to/file.ts"); +``` + +You can also pass options as second argument: + +```js +const jiti = require("jiti")(__filename, { debug: true }); +``` + +### CLI + +```bash +jiti index.ts +# or npx jiti index.ts +``` + +### Register require hook + +```bash +node -r jiti/register index.ts +``` + +Alternatively, you can register `jiti` as a require hook programmatically: + +```js +const jiti = require("jiti")(); +const unregister = jiti.register(); +``` + +## Options + +### `debug` + +- Type: Boolean +- Default: `false` +- Environment Variable: `JITI_DEBUG` + +Enable debug to see which files are transpiled + +### `cache` + +- Type: Boolean | String +- Default: `true` +- Environment Variable: `JITI_CACHE` + +Use transpile cache + +If set to `true` will use `node_modules/.cache/jiti` (if exists) or `{TMP_DIR}/node-jiti` + +### `esmResolve` + +- Type: Boolean | String +- Default: `false` +- Environment Variable: `JITI_ESM_RESOLVE` + +Using esm resolution algorithm to support `import` condition. + +### `transform` + +- Type: Function +- Default: Babel (lazy loaded) + +Transform function. See [src/babel](./src/babel.ts) for more details + +### `sourceMaps` + +- Type: Boolean +- Default `false` +- Environment Variable: `JITI_SOURCE_MAPS` + +Add inline source map to transformed source for better debugging. + +### `interopDefault` + +- Type: Boolean +- Default: `false` + +Return the `.default` export of a module at the top-level. + +### `alias` + +- Type: Object +- Default: - +- Environment Variable: `JITI_ALIAS` + +Custom alias map used to resolve ids. + +### `nativeModules` + +- Type: Array +- Default: ['typescript`] +- Environment Variable: `JITI_NATIVE_MODULES` + +List of modules (within `node_modules`) to always use native require for them. + +### `transformModules` + +- Type: Array +- Default: [] +- Environment Variable: `JITI_TRANSFORM_MODULES` + +List of modules (within `node_modules`) to transform them regardless of syntax. + +## Development + +- Clone this repository +- Enable [Corepack](https://github.com/nodejs/corepack) using `corepack enable` +- Install dependencies using `pnpm install` +- Run `pnpm dev` +- Run `pnpm jiti ./test/path/to/file.ts` + +## License + +MIT. Made with 💖 + + + +[npm-v-src]: https://img.shields.io/npm/v/jiti?style=flat-square +[npm-v-href]: https://npmjs.com/package/jiti +[npm-d-src]: https://img.shields.io/npm/dm/jiti?style=flat-square +[npm-d-href]: https://npmjs.com/package/jiti +[github-actions-src]: https://img.shields.io/github/workflow/status/unjs/jiti/ci/master?style=flat-square +[github-actions-href]: https://github.com/unjs/jiti/actions?query=workflow%3Aci +[size-src]: https://packagephobia.now.sh/badge?p=jiti +[size-href]: https://packagephobia.now.sh/result?p=jiti diff --git a/node_modules/jiti/bin/jiti.js b/node_modules/jiti/bin/jiti.js new file mode 100755 index 0000000..af44c47 --- /dev/null +++ b/node_modules/jiti/bin/jiti.js @@ -0,0 +1,16 @@ +#!/usr/bin/env node + +const { resolve } = require("path"); + +const script = process.argv.splice(2, 1)[0]; + +if (!script) { + // eslint-disable-next-line no-console + console.error("Usage: jiti [...arguments]"); + process.exit(1); +} + +const pwd = process.cwd(); +const jiti = require("..")(pwd); +const resolved = (process.argv[1] = jiti.resolve(resolve(pwd, script))); +jiti(resolved); diff --git a/node_modules/jiti/dist/babel.d.ts b/node_modules/jiti/dist/babel.d.ts new file mode 100644 index 0000000..ea178d4 --- /dev/null +++ b/node_modules/jiti/dist/babel.d.ts @@ -0,0 +1,2 @@ +import { TransformOptions, TRANSFORM_RESULT } from "./types"; +export default function transform(opts: TransformOptions): TRANSFORM_RESULT; diff --git a/node_modules/jiti/dist/babel.js b/node_modules/jiti/dist/babel.js new file mode 100644 index 0000000..8557588 --- /dev/null +++ b/node_modules/jiti/dist/babel.js @@ -0,0 +1,1885 @@ +(()=>{var __webpack_modules__={"./node_modules/.pnpm/@babel+core@7.21.3/node_modules/@babel/core/lib/config/files lazy recursive":module=>{function webpackEmptyAsyncContext(req){return Promise.resolve().then((()=>{var e=new Error("Cannot find module '"+req+"'");throw e.code="MODULE_NOT_FOUND",e}))}webpackEmptyAsyncContext.keys=()=>[],webpackEmptyAsyncContext.resolve=webpackEmptyAsyncContext,webpackEmptyAsyncContext.id="./node_modules/.pnpm/@babel+core@7.21.3/node_modules/@babel/core/lib/config/files lazy recursive",module.exports=webpackEmptyAsyncContext},"./node_modules/.pnpm/@babel+core@7.21.3/node_modules/@babel/core/lib/config/files sync recursive":module=>{function webpackEmptyContext(req){var e=new Error("Cannot find module '"+req+"'");throw e.code="MODULE_NOT_FOUND",e}webpackEmptyContext.keys=()=>[],webpackEmptyContext.resolve=webpackEmptyContext,webpackEmptyContext.id="./node_modules/.pnpm/@babel+core@7.21.3/node_modules/@babel/core/lib/config/files sync recursive",module.exports=webpackEmptyContext},"./node_modules/.pnpm/@babel+plugin-syntax-class-properties@7.12.13_@babel+core@7.21.3/node_modules/@babel/plugin-syntax-class-properties/lib/index.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.default=void 0;var _default=(0,__webpack_require__("./node_modules/.pnpm/@babel+helper-plugin-utils@7.20.2/node_modules/@babel/helper-plugin-utils/lib/index.js").declare)((api=>(api.assertVersion(7),{name:"syntax-class-properties",manipulateOptions(opts,parserOpts){parserOpts.plugins.push("classProperties","classPrivateProperties","classPrivateMethods")}})));exports.default=_default},"./node_modules/.pnpm/@babel+plugin-syntax-export-namespace-from@7.8.3_@babel+core@7.21.3/node_modules/@babel/plugin-syntax-export-namespace-from/lib/index.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.default=void 0;var _default=(0,__webpack_require__("./node_modules/.pnpm/@babel+helper-plugin-utils@7.20.2/node_modules/@babel/helper-plugin-utils/lib/index.js").declare)((api=>(api.assertVersion(7),{name:"syntax-export-namespace-from",manipulateOptions(opts,parserOpts){parserOpts.plugins.push("exportNamespaceFrom")}})));exports.default=_default},"./node_modules/.pnpm/@babel+plugin-syntax-nullish-coalescing-operator@7.8.3_@babel+core@7.21.3/node_modules/@babel/plugin-syntax-nullish-coalescing-operator/lib/index.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.default=void 0;var _default=(0,__webpack_require__("./node_modules/.pnpm/@babel+helper-plugin-utils@7.20.2/node_modules/@babel/helper-plugin-utils/lib/index.js").declare)((api=>(api.assertVersion(7),{name:"syntax-nullish-coalescing-operator",manipulateOptions(opts,parserOpts){parserOpts.plugins.push("nullishCoalescingOperator")}})));exports.default=_default},"./node_modules/.pnpm/@babel+plugin-syntax-optional-chaining@7.8.3_@babel+core@7.21.3/node_modules/@babel/plugin-syntax-optional-chaining/lib/index.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.default=void 0;var _default=(0,__webpack_require__("./node_modules/.pnpm/@babel+helper-plugin-utils@7.20.2/node_modules/@babel/helper-plugin-utils/lib/index.js").declare)((api=>(api.assertVersion(7),{name:"syntax-optional-chaining",manipulateOptions(opts,parserOpts){parserOpts.plugins.push("optionalChaining")}})));exports.default=_default},"./node_modules/.pnpm/@jridgewell+gen-mapping@0.3.2/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js":function(__unused_webpack_module,exports,__webpack_require__){!function(exports,setArray,sourcemapCodec,traceMapping){"use strict";const COLUMN=0,SOURCES_INDEX=1,SOURCE_LINE=2,SOURCE_COLUMN=3,NAMES_INDEX=4,NO_NAME=-1;let addSegmentInternal;exports.addSegment=void 0,exports.addMapping=void 0,exports.maybeAddSegment=void 0,exports.maybeAddMapping=void 0,exports.setSourceContent=void 0,exports.toDecodedMap=void 0,exports.toEncodedMap=void 0,exports.fromMap=void 0,exports.allMappings=void 0;class GenMapping{constructor({file,sourceRoot}={}){this._names=new setArray.SetArray,this._sources=new setArray.SetArray,this._sourcesContent=[],this._mappings=[],this.file=file,this.sourceRoot=sourceRoot}}function getLine(mappings,index){for(let i=mappings.length;i<=index;i++)mappings[i]=[];return mappings[index]}function getColumnIndex(line,genColumn){let index=line.length;for(let i=index-1;i>=0&&!(genColumn>=line[i][COLUMN]);index=i--);return index}function insert(array,index,value){for(let i=array.length;i>index;i--)array[i]=array[i-1];array[index]=value}function removeEmptyFinalLines(mappings){const{length}=mappings;let len=length;for(let i=len-1;i>=0&&!(mappings[i].length>0);len=i,i--);lenaddSegmentInternal(!1,map,genLine,genColumn,source,sourceLine,sourceColumn,name,content),exports.maybeAddSegment=(map,genLine,genColumn,source,sourceLine,sourceColumn,name,content)=>addSegmentInternal(!0,map,genLine,genColumn,source,sourceLine,sourceColumn,name,content),exports.addMapping=(map,mapping)=>addMappingInternal(!1,map,mapping),exports.maybeAddMapping=(map,mapping)=>addMappingInternal(!0,map,mapping),exports.setSourceContent=(map,source,content)=>{const{_sources:sources,_sourcesContent:sourcesContent}=map;sourcesContent[setArray.put(sources,source)]=content},exports.toDecodedMap=map=>{const{file,sourceRoot,_mappings:mappings,_sources:sources,_sourcesContent:sourcesContent,_names:names}=map;return removeEmptyFinalLines(mappings),{version:3,file:file||void 0,names:names.array,sourceRoot:sourceRoot||void 0,sources:sources.array,sourcesContent,mappings}},exports.toEncodedMap=map=>{const decoded=exports.toDecodedMap(map);return Object.assign(Object.assign({},decoded),{mappings:sourcemapCodec.encode(decoded.mappings)})},exports.allMappings=map=>{const out=[],{_mappings:mappings,_sources:sources,_names:names}=map;for(let i=0;i{const map=new traceMapping.TraceMap(input),gen=new GenMapping({file:map.file,sourceRoot:map.sourceRoot});return putAll(gen._names,map.names),putAll(gen._sources,map.sources),gen._sourcesContent=map.sourcesContent||map.sources.map((()=>null)),gen._mappings=traceMapping.decodedMappings(map),gen},addSegmentInternal=(skipable,map,genLine,genColumn,source,sourceLine,sourceColumn,name,content)=>{const{_mappings:mappings,_sources:sources,_sourcesContent:sourcesContent,_names:names}=map,line=getLine(mappings,genLine),index=getColumnIndex(line,genColumn);if(!source){if(skipable&&skipSourceless(line,index))return;return insert(line,index,[genColumn])}const sourcesIndex=setArray.put(sources,source),namesIndex=name?setArray.put(names,name):NO_NAME;if(sourcesIndex===sourcesContent.length&&(sourcesContent[sourcesIndex]=null!=content?content:null),!skipable||!skipSource(line,index,sourcesIndex,sourceLine,sourceColumn,namesIndex))return insert(line,index,name?[genColumn,sourcesIndex,sourceLine,sourceColumn,namesIndex]:[genColumn,sourcesIndex,sourceLine,sourceColumn])},exports.GenMapping=GenMapping,Object.defineProperty(exports,"__esModule",{value:!0})}(exports,__webpack_require__("./node_modules/.pnpm/@jridgewell+set-array@1.1.2/node_modules/@jridgewell/set-array/dist/set-array.umd.js"),__webpack_require__("./node_modules/.pnpm/@jridgewell+sourcemap-codec@1.4.14/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js"),__webpack_require__("./node_modules/.pnpm/@jridgewell+trace-mapping@0.3.17/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js"))},"./node_modules/.pnpm/@jridgewell+resolve-uri@3.1.0/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js":function(module){module.exports=function(){"use strict";const schemeRegex=/^[\w+.-]+:\/\//,urlRegex=/^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/,fileRegex=/^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;var UrlType;function isAbsoluteUrl(input){return schemeRegex.test(input)}function isSchemeRelativeUrl(input){return input.startsWith("//")}function isAbsolutePath(input){return input.startsWith("/")}function isFileUrl(input){return input.startsWith("file:")}function isRelative(input){return/^[.?#]/.test(input)}function parseAbsoluteUrl(input){const match=urlRegex.exec(input);return makeUrl(match[1],match[2]||"",match[3],match[4]||"",match[5]||"/",match[6]||"",match[7]||"")}function parseFileUrl(input){const match=fileRegex.exec(input),path=match[2];return makeUrl("file:","",match[1]||"","",isAbsolutePath(path)?path:"/"+path,match[3]||"",match[4]||"")}function makeUrl(scheme,user,host,port,path,query,hash){return{scheme,user,host,port,path,query,hash,type:UrlType.Absolute}}function parseUrl(input){if(isSchemeRelativeUrl(input)){const url=parseAbsoluteUrl("http:"+input);return url.scheme="",url.type=UrlType.SchemeRelative,url}if(isAbsolutePath(input)){const url=parseAbsoluteUrl("http://foo.com"+input);return url.scheme="",url.host="",url.type=UrlType.AbsolutePath,url}if(isFileUrl(input))return parseFileUrl(input);if(isAbsoluteUrl(input))return parseAbsoluteUrl(input);const url=parseAbsoluteUrl("http://foo.com/"+input);return url.scheme="",url.host="",url.type=input?input.startsWith("?")?UrlType.Query:input.startsWith("#")?UrlType.Hash:UrlType.RelativePath:UrlType.Empty,url}function stripPathFilename(path){if(path.endsWith("/.."))return path;const index=path.lastIndexOf("/");return path.slice(0,index+1)}function mergePaths(url,base){normalizePath(base,base.type),"/"===url.path?url.path=base.path:url.path=stripPathFilename(base.path)+url.path}function normalizePath(url,type){const rel=type<=UrlType.RelativePath,pieces=url.path.split("/");let pointer=1,positive=0,addTrailingSlash=!1;for(let i=1;iinputType&&(inputType=baseType)}normalizePath(url,inputType);const queryHash=url.query+url.hash;switch(inputType){case UrlType.Hash:case UrlType.Query:return queryHash;case UrlType.RelativePath:{const path=url.path.slice(1);return path?isRelative(base||input)&&!isRelative(path)?"./"+path+queryHash:path+queryHash:queryHash||"."}case UrlType.AbsolutePath:return url.path+queryHash;default:return url.scheme+"//"+url.user+url.host+url.port+url.path+queryHash}}return function(UrlType){UrlType[UrlType.Empty=1]="Empty",UrlType[UrlType.Hash=2]="Hash",UrlType[UrlType.Query=3]="Query",UrlType[UrlType.RelativePath=4]="RelativePath",UrlType[UrlType.AbsolutePath=5]="AbsolutePath",UrlType[UrlType.SchemeRelative=6]="SchemeRelative",UrlType[UrlType.Absolute=7]="Absolute"}(UrlType||(UrlType={})),resolve}()},"./node_modules/.pnpm/@jridgewell+set-array@1.1.2/node_modules/@jridgewell/set-array/dist/set-array.umd.js":function(__unused_webpack_module,exports){!function(exports){"use strict";exports.get=void 0,exports.put=void 0,exports.pop=void 0;class SetArray{constructor(){this._indexes={__proto__:null},this.array=[]}}exports.get=(strarr,key)=>strarr._indexes[key],exports.put=(strarr,key)=>{const index=exports.get(strarr,key);if(void 0!==index)return index;const{array,_indexes:indexes}=strarr;return indexes[key]=array.push(key)-1},exports.pop=strarr=>{const{array,_indexes:indexes}=strarr;0!==array.length&&(indexes[array.pop()]=void 0)},exports.SetArray=SetArray,Object.defineProperty(exports,"__esModule",{value:!0})}(exports)},"./node_modules/.pnpm/@jridgewell+sourcemap-codec@1.4.14/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js":function(__unused_webpack_module,exports){!function(exports){"use strict";const comma=",".charCodeAt(0),semicolon=";".charCodeAt(0),chars="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",intToChar=new Uint8Array(64),charToInt=new Uint8Array(128);for(let i=0;iBuffer.from(buf.buffer,buf.byteOffset,buf.byteLength).toString()}:{decode(buf){let out="";for(let i=0;i>>=1,shouldNegate&&(value=-2147483648|-value),state[j]+=value,pos}function hasMoreVlq(mappings,i,length){return!(i>=length)&&mappings.charCodeAt(i)!==comma}function sort(line){line.sort(sortComparator)}function sortComparator(a,b){return a[0]-b[0]}function encode(decoded){const state=new Int32Array(5),bufLength=16384,subLength=bufLength-36,buf=new Uint8Array(bufLength),sub=buf.subarray(0,subLength);let pos=0,out="";for(let i=0;i0&&(pos===bufLength&&(out+=td.decode(buf),pos=0),buf[pos++]=semicolon),0!==line.length){state[0]=0;for(let j=0;jsubLength&&(out+=td.decode(sub),buf.copyWithin(0,subLength,pos),pos-=subLength),j>0&&(buf[pos++]=comma),pos=encodeInteger(buf,pos,state,segment,0),1!==segment.length&&(pos=encodeInteger(buf,pos,state,segment,1),pos=encodeInteger(buf,pos,state,segment,2),pos=encodeInteger(buf,pos,state,segment,3),4!==segment.length&&(pos=encodeInteger(buf,pos,state,segment,4)))}}}return out+td.decode(buf.subarray(0,pos))}function encodeInteger(buf,pos,state,segment,j){const next=segment[j];let num=next-state[j];state[j]=next,num=num<0?-num<<1|1:num<<1;do{let clamped=31#num>>>=5,num>0&&(clamped|=32),buf[pos++]=intToChar[clamped]}while(num>0);return pos}exports.decode=decode,exports.encode=encode,Object.defineProperty(exports,"__esModule",{value:!0})}(exports)},"./node_modules/.pnpm/@jridgewell+trace-mapping@0.3.17/node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js":function(__unused_webpack_module,exports,__webpack_require__){!function(exports,sourcemapCodec,resolveUri){"use strict";function _interopDefaultLegacy(e){return e&&"object"==typeof e&&"default"in e?e:{default:e}}var resolveUri__default=_interopDefaultLegacy(resolveUri);function resolve(input,base){return base&&!base.endsWith("/")&&(base+="/"),resolveUri__default.default(input,base)}function stripFilename(path){if(!path)return"";const index=path.lastIndexOf("/");return path.slice(0,index+1)}const COLUMN=0,SOURCES_INDEX=1,SOURCE_LINE=2,SOURCE_COLUMN=3,NAMES_INDEX=4,REV_GENERATED_LINE=1,REV_GENERATED_COLUMN=2;function maybeSort(mappings,owned){const unsortedIndex=nextUnsortedSegmentLine(mappings,0);if(unsortedIndex===mappings.length)return mappings;owned||(mappings=mappings.slice());for(let i=unsortedIndex;i>1),cmp=haystack[mid][COLUMN]-needle;if(0===cmp)return found=!0,mid;cmp<0?low=mid+1:high=mid-1}return found=!1,low-1}function upperBound(haystack,needle,index){for(let i=index+1;i=0&&haystack[i][COLUMN]===needle;index=i--);return index}function memoizedState(){return{lastKey:-1,lastNeedle:-1,lastIndex:-1}}function memoizedBinarySearch(haystack,needle,state,key){const{lastKey,lastNeedle,lastIndex}=state;let low=0,high=haystack.length-1;if(key===lastKey){if(needle===lastNeedle)return found=-1!==lastIndex&&haystack[lastIndex][COLUMN]===needle,lastIndex;needle>=lastNeedle?low=-1===lastIndex?0:lastIndex:high=lastIndex}return state.lastKey=key,state.lastNeedle=needle,state.lastIndex=binarySearch(haystack,needle,low,high)}function buildBySources(decoded,memos){const sources=memos.map(buildNullArray);for(let i=0;iindex;i--)array[i]=array[i-1];array[index]=value}function buildNullArray(){return{__proto__:null}}const AnyMap=function(map,mapUrl){const parsed="string"==typeof map?JSON.parse(map):map;if(!("sections"in parsed))return new TraceMap(parsed,mapUrl);const mappings=[],sources=[],sourcesContent=[],names=[];recurse(parsed,mapUrl,mappings,sources,sourcesContent,names,0,0,1/0,1/0);const joined={version:3,file:parsed.file,names,sources,sourcesContent,mappings};return exports.presortedDecodedMap(joined)};function recurse(input,mapUrl,mappings,sources,sourcesContent,names,lineOffset,columnOffset,stopLine,stopColumn){const{sections}=input;for(let i=0;istopLine)return;const out=getLine(mappings,lineI),cOffset=0===i?columnOffset:0,line=decoded[i];for(let j=0;j=stopColumn)return;if(1===seg.length){out.push([column]);continue}const sourcesIndex=sourcesOffset+seg[SOURCES_INDEX],sourceLine=seg[SOURCE_LINE],sourceColumn=seg[SOURCE_COLUMN];out.push(4===seg.length?[column,sourcesIndex,sourceLine,sourceColumn]:[column,sourcesIndex,sourceLine,sourceColumn,namesOffset+seg[NAMES_INDEX]])}}}function append(arr,other){for(let i=0;iresolve(s||"",from)));const{mappings}=parsed;"string"==typeof mappings?(this._encoded=mappings,this._decoded=void 0):(this._encoded=void 0,this._decoded=maybeSort(mappings,isString)),this._decodedMemo=memoizedState(),this._bySources=void 0,this._bySourceMemos=void 0}}function clone(map,mappings){return{version:map.version,file:map.file,names:map.names,sourceRoot:map.sourceRoot,sources:map.sources,sourcesContent:map.sourcesContent,mappings}}function OMapping(source,line,column,name){return{source,line,column,name}}function GMapping(line,column){return{line,column}}function traceSegmentInternal(segments,memo,line,column,bias){let index=memoizedBinarySearch(segments,column,memo,line);return found?index=(bias===LEAST_UPPER_BOUND?upperBound:lowerBound)(segments,column,index):bias===LEAST_UPPER_BOUND&&index++,-1===index||index===segments.length?-1:index}function sliceGeneratedPositions(segments,memo,line,column,bias){let min=traceSegmentInternal(segments,memo,line,column,GREATEST_LOWER_BOUND);if(found||bias!==LEAST_UPPER_BOUND||min++,-1===min||min===segments.length)return[];const matchedColumn=found?column:segments[min][COLUMN];found||(min=lowerBound(segments,matchedColumn,min));const max=upperBound(segments,matchedColumn,min),result=[];for(;min<=max;min++){const segment=segments[min];result.push(GMapping(segment[REV_GENERATED_LINE]+1,segment[REV_GENERATED_COLUMN]))}return result}(()=>{function generatedPosition(map,source,line,column,bias,all){if(--line<0)throw new Error(LINE_GTR_ZERO);if(column<0)throw new Error(COL_GTR_EQ_ZERO);const{sources,resolvedSources}=map;let sourceIndex=sources.indexOf(source);if(-1===sourceIndex&&(sourceIndex=resolvedSources.indexOf(source)),-1===sourceIndex)return all?[]:GMapping(null,null);const segments=(map._bySources||(map._bySources=buildBySources(exports.decodedMappings(map),map._bySourceMemos=sources.map(memoizedState))))[sourceIndex][line];if(null==segments)return all?[]:GMapping(null,null);const memo=map._bySourceMemos[sourceIndex];if(all)return sliceGeneratedPositions(segments,memo,line,column,bias);const index=traceSegmentInternal(segments,memo,line,column,bias);if(-1===index)return GMapping(null,null);const segment=segments[index];return GMapping(segment[REV_GENERATED_LINE]+1,segment[REV_GENERATED_COLUMN])}exports.encodedMappings=map=>{var _a;return null!==(_a=map._encoded)&&void 0!==_a?_a:map._encoded=sourcemapCodec.encode(map._decoded)},exports.decodedMappings=map=>map._decoded||(map._decoded=sourcemapCodec.decode(map._encoded)),exports.traceSegment=(map,line,column)=>{const decoded=exports.decodedMappings(map);if(line>=decoded.length)return null;const segments=decoded[line],index=traceSegmentInternal(segments,map._decodedMemo,line,column,GREATEST_LOWER_BOUND);return-1===index?null:segments[index]},exports.originalPositionFor=(map,{line,column,bias})=>{if(--line<0)throw new Error(LINE_GTR_ZERO);if(column<0)throw new Error(COL_GTR_EQ_ZERO);const decoded=exports.decodedMappings(map);if(line>=decoded.length)return OMapping(null,null,null,null);const segments=decoded[line],index=traceSegmentInternal(segments,map._decodedMemo,line,column,bias||GREATEST_LOWER_BOUND);if(-1===index)return OMapping(null,null,null,null);const segment=segments[index];if(1===segment.length)return OMapping(null,null,null,null);const{names,resolvedSources}=map;return OMapping(resolvedSources[segment[SOURCES_INDEX]],segment[SOURCE_LINE]+1,segment[SOURCE_COLUMN],5===segment.length?names[segment[NAMES_INDEX]]:null)},exports.allGeneratedPositionsFor=(map,{source,line,column,bias})=>generatedPosition(map,source,line,column,bias||LEAST_UPPER_BOUND,!0),exports.generatedPositionFor=(map,{source,line,column,bias})=>generatedPosition(map,source,line,column,bias||GREATEST_LOWER_BOUND,!1),exports.eachMapping=(map,cb)=>{const decoded=exports.decodedMappings(map),{names,resolvedSources}=map;for(let i=0;i{const{sources,resolvedSources,sourcesContent}=map;if(null==sourcesContent)return null;let index=sources.indexOf(source);return-1===index&&(index=resolvedSources.indexOf(source)),-1===index?null:sourcesContent[index]},exports.presortedDecodedMap=(map,mapUrl)=>{const tracer=new TraceMap(clone(map,[]),mapUrl);return tracer._decoded=map.mappings,tracer},exports.decodedMap=map=>clone(map,exports.decodedMappings(map)),exports.encodedMap=map=>clone(map,exports.encodedMappings(map))})(),exports.AnyMap=AnyMap,exports.GREATEST_LOWER_BOUND=GREATEST_LOWER_BOUND,exports.LEAST_UPPER_BOUND=LEAST_UPPER_BOUND,exports.TraceMap=TraceMap,Object.defineProperty(exports,"__esModule",{value:!0})}(exports,__webpack_require__("./node_modules/.pnpm/@jridgewell+sourcemap-codec@1.4.14/node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js"),__webpack_require__("./node_modules/.pnpm/@jridgewell+resolve-uri@3.1.0/node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js"))},"./node_modules/.pnpm/babel-plugin-dynamic-import-node@2.3.3/node_modules/babel-plugin-dynamic-import-node/lib/index.js":(module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.default=function(api){var transformImport=(0,_utils.createDynamicImportTransform)(api);return{manipulateOptions:function(opts,parserOpts){parserOpts.plugins.push("dynamicImport")},visitor:{Import:function(path){transformImport(this,path)}}}};var _utils=__webpack_require__("./node_modules/.pnpm/babel-plugin-dynamic-import-node@2.3.3/node_modules/babel-plugin-dynamic-import-node/lib/utils.js");module.exports=exports.default},"./node_modules/.pnpm/babel-plugin-dynamic-import-node@2.3.3/node_modules/babel-plugin-dynamic-import-node/lib/utils.js":(__unused_webpack_module,exports)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0});var _slicedToArray=function(arr,i){if(Array.isArray(arr))return arr;if(Symbol.iterator in Object(arr))return function(arr,i){var _arr=[],_n=!0,_d=!1,_e=void 0;try{for(var _s,_i=arr[Symbol.iterator]();!(_n=(_s=_i.next()).done)&&(_arr.push(_s.value),!i||_arr.length!==i);_n=!0);}catch(err){_d=!0,_e=err}finally{try{!_n&&_i.return&&_i.return()}finally{if(_d)throw _e}}return _arr}(arr,i);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function getImportSource(t,callNode){var importArguments=callNode.arguments,importPath=_slicedToArray(importArguments,1)[0];return t.isStringLiteral(importPath)||t.isTemplateLiteral(importPath)?(t.removeComments(importPath),importPath):t.templateLiteral([t.templateElement({raw:"",cooked:""}),t.templateElement({raw:"",cooked:""},!0)],importArguments)}exports.getImportSource=getImportSource,exports.createDynamicImportTransform=function(_ref){var template=_ref.template,t=_ref.types,builders={static:{interop:template("Promise.resolve().then(() => INTEROP(require(SOURCE)))"),noInterop:template("Promise.resolve().then(() => require(SOURCE))")},dynamic:{interop:template("Promise.resolve(SOURCE).then(s => INTEROP(require(s)))"),noInterop:template("Promise.resolve(SOURCE).then(s => require(s))")}},visited="function"==typeof WeakSet&&new WeakSet;return function(context,path){if(visited){if(visited.has(path))return;visited.add(path)}var node,SOURCE=getImportSource(t,path.parent),builder=(node=SOURCE,t.isStringLiteral(node)||t.isTemplateLiteral(node)&&0===node.expressions.length?builders.static:builders.dynamic),newImport=context.opts.noInterop?builder.noInterop({SOURCE}):builder.interop({SOURCE,INTEROP:context.addHelper("interopRequireWildcard")});path.parentPath.replaceWith(newImport)}}},"./node_modules/.pnpm/babel-plugin-parameter-decorator@1.0.16/node_modules/babel-plugin-parameter-decorator/lib/index.js":(module,__unused_webpack_exports,__webpack_require__)=>{"use strict";var _path=__webpack_require__("path");function isInType(path){switch(path.parent.type){case"TSTypeReference":case"TSQualifiedName":case"TSExpressionWithTypeArguments":case"TSTypeQuery":return!0;default:return!1}}module.exports=function(_ref){var types=_ref.types,decoratorExpressionForConstructor=function(decorator,param){return function(className){var resultantDecorator=types.callExpression(decorator.expression,[types.Identifier(className),types.Identifier("undefined"),types.NumericLiteral(param.key)]),resultantDecoratorWithFallback=types.logicalExpression("||",resultantDecorator,types.Identifier(className)),assignment=types.assignmentExpression("=",types.Identifier(className),resultantDecoratorWithFallback);return types.expressionStatement(assignment)}},decoratorExpressionForMethod=function(decorator,param){return function(className,functionName){var resultantDecorator=types.callExpression(decorator.expression,[types.Identifier("".concat(className,".prototype")),types.StringLiteral(functionName),types.NumericLiteral(param.key)]);return types.expressionStatement(resultantDecorator)}};return{visitor:{Program:function(path,state){var extension=(0,_path.extname)(state.file.opts.filename);".ts"!==extension&&".tsx"!==extension||function(){var decorators=Object.create(null);path.node.body.filter((function(it){var type=it.type,declaration=it.declaration;switch(type){case"ClassDeclaration":return!0;case"ExportNamedDeclaration":case"ExportDefaultDeclaration":return declaration&&"ClassDeclaration"===declaration.type;default:return!1}})).map((function(it){return"ClassDeclaration"===it.type?it:it.declaration})).forEach((function(clazz){clazz.body.body.forEach((function(body){(body.params||[]).forEach((function(param){(param.decorators||[]).forEach((function(decorator){decorator.expression.callee?decorators[decorator.expression.callee.name]=decorator:decorators[decorator.expression.name]=decorator}))}))}))}));var _iteratorNormalCompletion=!0,_didIteratorError=!1,_iteratorError=void 0;try{for(var _step,_iterator=path.get("body")[Symbol.iterator]();!(_iteratorNormalCompletion=(_step=_iterator.next()).done);_iteratorNormalCompletion=!0){var stmt=_step.value;if("ImportDeclaration"===stmt.node.type){if(0===stmt.node.specifiers.length)continue;var _iteratorNormalCompletion2=!0,_didIteratorError2=!1,_iteratorError2=void 0;try{for(var _step2,_loop=function(){var specifier=_step2.value,binding=stmt.scope.getBinding(specifier.local.name);binding.referencePaths.length?binding.referencePaths.reduce((function(prev,next){return prev||isInType(next)}),!1)&&Object.keys(decorators).forEach((function(k){var decorator=decorators[k];(decorator.expression.arguments||[]).forEach((function(arg){arg.name===specifier.local.name&&binding.referencePaths.push({parent:decorator.expression})}))})):decorators[specifier.local.name]&&binding.referencePaths.push({parent:decorators[specifier.local.name]})},_iterator2=stmt.node.specifiers[Symbol.iterator]();!(_iteratorNormalCompletion2=(_step2=_iterator2.next()).done);_iteratorNormalCompletion2=!0)_loop()}catch(err){_didIteratorError2=!0,_iteratorError2=err}finally{try{_iteratorNormalCompletion2||null==_iterator2.return||_iterator2.return()}finally{if(_didIteratorError2)throw _iteratorError2}}}}}catch(err){_didIteratorError=!0,_iteratorError=err}finally{try{_iteratorNormalCompletion||null==_iterator.return||_iterator.return()}finally{if(_didIteratorError)throw _iteratorError}}}()},Function:function(path){var functionName="";path.node.id?functionName=path.node.id.name:path.node.key&&(functionName=path.node.key.name),(path.get("params")||[]).slice().forEach((function(param){var decorators=param.node.decorators||[],transformable=decorators.length;if(decorators.slice().forEach((function(decorator){if("ClassMethod"===path.type){var classIdentifier,parentNode=path.parentPath.parentPath,classDeclaration=path.findParent((function(p){return"ClassDeclaration"===p.type}));if(classDeclaration?classIdentifier=classDeclaration.node.id.name:(parentNode.insertAfter(null),classIdentifier=function(path){var assignment=path.findParent((function(p){return"AssignmentExpression"===p.node.type}));return"SequenceExpression"===assignment.node.right.type?assignment.node.right.expressions[1].name:"ClassExpression"===assignment.node.right.type?assignment.node.left.name:null}(path)),"constructor"===functionName){var expression=decoratorExpressionForConstructor(decorator,param)(classIdentifier);parentNode.insertAfter(expression)}else{var _expression=decoratorExpressionForMethod(decorator,param)(classIdentifier,functionName);parentNode.insertAfter(_expression)}}else{var className=path.findParent((function(p){return"VariableDeclarator"===p.node.type})).node.id.name;if(functionName===className){var _expression2=decoratorExpressionForConstructor(decorator,param)(className);if("body"===path.parentKey)path.insertAfter(_expression2);else path.findParent((function(p){return"body"===p.parentKey})).insertAfter(_expression2)}else{var classParent=path.findParent((function(p){return"CallExpression"===p.node.type})),_expression3=decoratorExpressionForMethod(decorator,param)(className,functionName);classParent.insertAfter(_expression3)}}})),transformable){var replacement=function(path){switch(path.node.type){case"ObjectPattern":return types.ObjectPattern(path.node.properties);case"AssignmentPattern":return types.AssignmentPattern(path.node.left,path.node.right);case"TSParameterProperty":return types.Identifier(path.node.parameter.name);default:return types.Identifier(path.node.name)}}(param);param.replaceWith(replacement)}}))}}}}},"./node_modules/.pnpm/babel-plugin-transform-typescript-metadata@0.3.2/node_modules/babel-plugin-transform-typescript-metadata/lib/metadata/metadataVisitor.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.metadataVisitor=function(classPath,path){const field=path.node,classNode=classPath.node;switch(field.type){case"ClassMethod":const decorators="constructor"===field.kind?classNode.decorators:field.decorators;if(!decorators||0===decorators.length)return;decorators.push(createMetadataDesignDecorator("design:type",_core.types.identifier("Function"))),decorators.push(createMetadataDesignDecorator("design:paramtypes",_core.types.arrayExpression(field.params.map((param=>(0,_serializeType.serializeType)(classPath,param))))));break;case"ClassProperty":if(!field.decorators||0===field.decorators.length)return;if(!field.typeAnnotation||"TSTypeAnnotation"!==field.typeAnnotation.type)return;field.decorators.push(createMetadataDesignDecorator("design:type",(0,_serializeType.serializeType)(classPath,field)))}};var _core=__webpack_require__("./node_modules/.pnpm/@babel+core@7.21.3/node_modules/@babel/core/lib/index.js"),_serializeType=__webpack_require__("./node_modules/.pnpm/babel-plugin-transform-typescript-metadata@0.3.2/node_modules/babel-plugin-transform-typescript-metadata/lib/metadata/serializeType.js");function createMetadataDesignDecorator(design,typeArg){return _core.types.decorator(_core.types.callExpression(_core.types.memberExpression(_core.types.identifier("Reflect"),_core.types.identifier("metadata")),[_core.types.stringLiteral(design),typeArg]))}},"./node_modules/.pnpm/babel-plugin-transform-typescript-metadata@0.3.2/node_modules/babel-plugin-transform-typescript-metadata/lib/metadata/serializeType.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.serializeType=function(classPath,param){const node=getTypedNode(param);if(null==node)return createVoidZero();if(!node.typeAnnotation||"TSTypeAnnotation"!==node.typeAnnotation.type)return createVoidZero();const annotation=node.typeAnnotation.typeAnnotation;return serializeTypeNode(classPath.node.id?classPath.node.id.name:"",annotation)},exports.isClassType=isClassType;var _core=__webpack_require__("./node_modules/.pnpm/@babel+core@7.21.3/node_modules/@babel/core/lib/index.js");function createVoidZero(){return _core.types.unaryExpression("void",_core.types.numericLiteral(0))}function getTypedNode(param){return null==param?null:"ClassProperty"===param.type||"Identifier"===param.type||"ObjectPattern"===param.type?param:"AssignmentPattern"===param.type&&"Identifier"===param.left.type?param.left:"TSParameterProperty"===param.type?getTypedNode(param.parameter):null}function serializeTypeReferenceNode(className,node){const reference=serializeReference(node.typeName);return isClassType(className,reference)?_core.types.identifier("Object"):_core.types.conditionalExpression(_core.types.binaryExpression("===",_core.types.unaryExpression("typeof",reference),_core.types.stringLiteral("undefined")),_core.types.identifier("Object"),_core.types.cloneDeep(reference))}function isClassType(className,node){switch(node.type){case"Identifier":return node.name===className;case"MemberExpression":return isClassType(className,node.object);default:throw new Error(`The property expression at ${node.start} is not valid as a Type to be used in Reflect.metadata`)}}function serializeReference(typeName){return"Identifier"===typeName.type?_core.types.identifier(typeName.name):_core.types.memberExpression(serializeReference(typeName.left),typeName.right)}function serializeTypeNode(className,node){if(void 0===node)return _core.types.identifier("Object");switch(node.type){case"TSVoidKeyword":case"TSUndefinedKeyword":case"TSNullKeyword":case"TSNeverKeyword":return createVoidZero();case"TSParenthesizedType":return serializeTypeNode(className,node.typeAnnotation);case"TSFunctionType":case"TSConstructorType":return _core.types.identifier("Function");case"TSArrayType":case"TSTupleType":return _core.types.identifier("Array");case"TSTypePredicate":case"TSBooleanKeyword":return _core.types.identifier("Boolean");case"TSStringKeyword":return _core.types.identifier("String");case"TSObjectKeyword":return _core.types.identifier("Object");case"TSLiteralType":switch(node.literal.type){case"StringLiteral":return _core.types.identifier("String");case"NumericLiteral":return _core.types.identifier("Number");case"BooleanLiteral":return _core.types.identifier("Boolean");default:throw new Error("Bad type for decorator"+node.literal)}case"TSNumberKeyword":case"TSBigIntKeyword":return _core.types.identifier("Number");case"TSSymbolKeyword":return _core.types.identifier("Symbol");case"TSTypeReference":return serializeTypeReferenceNode(className,node);case"TSIntersectionType":case"TSUnionType":return serializeTypeList(className,node.types);case"TSConditionalType":return serializeTypeList(className,[node.trueType,node.falseType]);case"TSTypeQuery":case"TSTypeOperator":case"TSIndexedAccessType":case"TSMappedType":case"TSTypeLiteral":case"TSAnyKeyword":case"TSUnknownKeyword":case"TSThisType":break;default:throw new Error("Bad type for decorator")}return _core.types.identifier("Object")}function serializeTypeList(className,types){let serializedUnion;for(let typeNode of types){for(;"TSParenthesizedType"===typeNode.type;)typeNode=typeNode.typeAnnotation;if("TSNeverKeyword"===typeNode.type)continue;if("TSNullKeyword"===typeNode.type||"TSUndefinedKeyword"===typeNode.type)continue;const serializedIndividual=serializeTypeNode(className,typeNode);if(_core.types.isIdentifier(serializedIndividual)&&"Object"===serializedIndividual.name)return serializedIndividual;if(serializedUnion){if(!_core.types.isIdentifier(serializedUnion)||!_core.types.isIdentifier(serializedIndividual)||serializedUnion.name!==serializedIndividual.name)return _core.types.identifier("Object")}else serializedUnion=serializedIndividual}return serializedUnion||createVoidZero()}},"./node_modules/.pnpm/babel-plugin-transform-typescript-metadata@0.3.2/node_modules/babel-plugin-transform-typescript-metadata/lib/parameter/parameterVisitor.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.parameterVisitor=function(classPath,path){if("ClassMethod"!==path.type)return;if("ClassMethod"!==path.node.type)return;if("Identifier"!==path.node.key.type)return;const methodPath=path;(methodPath.get("params")||[]).slice().forEach((function(param){let resultantDecorator;null!=("Identifier"===param.node.type||"ObjectPattern"===param.node.type?param.node:"TSParameterProperty"===param.node.type&&"Identifier"===param.node.parameter.type?param.node.parameter:null)&&((param.node.decorators||[]).slice().forEach((function(decorator){"constructor"===methodPath.node.kind?(resultantDecorator=createParamDecorator(param.key,decorator.expression,!0),classPath.node.decorators||(classPath.node.decorators=[]),classPath.node.decorators.push(resultantDecorator)):(resultantDecorator=createParamDecorator(param.key,decorator.expression,!1),methodPath.node.decorators||(methodPath.node.decorators=[]),methodPath.node.decorators.push(resultantDecorator))})),resultantDecorator&&(param.node.decorators=null))}))};var _core=__webpack_require__("./node_modules/.pnpm/@babel+core@7.21.3/node_modules/@babel/core/lib/index.js");function createParamDecorator(paramIndex,decoratorExpression,isConstructor=!1){return _core.types.decorator(_core.types.functionExpression(null,[_core.types.identifier("target"),_core.types.identifier("key")],_core.types.blockStatement([_core.types.returnStatement(_core.types.callExpression(decoratorExpression,[_core.types.identifier("target"),_core.types.identifier(isConstructor?"undefined":"key"),_core.types.numericLiteral(paramIndex)]))])))}},"./node_modules/.pnpm/babel-plugin-transform-typescript-metadata@0.3.2/node_modules/babel-plugin-transform-typescript-metadata/lib/plugin.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.default=void 0;var _helperPluginUtils=__webpack_require__("./node_modules/.pnpm/@babel+helper-plugin-utils@7.20.2/node_modules/@babel/helper-plugin-utils/lib/index.js"),_parameterVisitor=__webpack_require__("./node_modules/.pnpm/babel-plugin-transform-typescript-metadata@0.3.2/node_modules/babel-plugin-transform-typescript-metadata/lib/parameter/parameterVisitor.js"),_metadataVisitor=__webpack_require__("./node_modules/.pnpm/babel-plugin-transform-typescript-metadata@0.3.2/node_modules/babel-plugin-transform-typescript-metadata/lib/metadata/metadataVisitor.js"),_default=(0,_helperPluginUtils.declare)((api=>(api.assertVersion(7),{visitor:{Program(programPath){programPath.traverse({ClassDeclaration(path){for(const field of path.get("body").get("body"))"ClassMethod"!==field.type&&"ClassProperty"!==field.type||((0,_parameterVisitor.parameterVisitor)(path,field),(0,_metadataVisitor.metadataVisitor)(path,field));path.parentPath.scope.crawl()}})}}})));exports.default=_default},"./node_modules/.pnpm/convert-source-map@1.9.0/node_modules/convert-source-map/index.js":(__unused_webpack_module,exports,__webpack_require__)=>{"use strict";var decodeBase64,fs=__webpack_require__("fs"),path=__webpack_require__("path");function Converter(sm,opts){(opts=opts||{}).isFileComment&&(sm=function(sm,dir){var r=exports.mapFileCommentRegex.exec(sm),filename=r[1]||r[2],filepath=path.resolve(dir,filename);try{return fs.readFileSync(filepath,"utf8")}catch(e){throw new Error("An error occurred while trying to read the map file at "+filepath+"\n"+e)}}(sm,opts.commentFileDir)),opts.hasComment&&(sm=function(sm){return sm.split(",").pop()}(sm)),opts.isEncoded&&(sm=decodeBase64(sm)),(opts.isJSON||opts.isEncoded)&&(sm=JSON.parse(sm)),this.sourcemap=sm}Object.defineProperty(exports,"commentRegex",{get:function(){return/^\s*\/(?:\/|\*)[@#]\s+sourceMappingURL=data:(?:application|text)\/json;(?:charset[:=]\S+?;)?base64,(?:.*)$/gm}}),Object.defineProperty(exports,"mapFileCommentRegex",{get:function(){return/(?:\/\/[@#][ \t]+sourceMappingURL=([^\s'"`]+?)[ \t]*$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^\*]+?)[ \t]*(?:\*\/){1}[ \t]*$)/gm}}),decodeBase64="undefined"!=typeof Buffer?"function"==typeof Buffer.from?function(base64){return Buffer.from(base64,"base64").toString()}:function(base64){if("number"==typeof value)throw new TypeError("The value to decode must not be of type number.");return new Buffer(base64,"base64").toString()}:function(base64){return decodeURIComponent(escape(atob(base64)))},Converter.prototype.toJSON=function(space){return JSON.stringify(this.sourcemap,null,space)},"undefined"!=typeof Buffer?"function"==typeof Buffer.from?Converter.prototype.toBase64=function(){var json=this.toJSON();return Buffer.from(json,"utf8").toString("base64")}:Converter.prototype.toBase64=function(){var json=this.toJSON();if("number"==typeof json)throw new TypeError("The json to encode must not be of type number.");return new Buffer(json,"utf8").toString("base64")}:Converter.prototype.toBase64=function(){var json=this.toJSON();return btoa(unescape(encodeURIComponent(json)))},Converter.prototype.toComment=function(options){var data="sourceMappingURL=data:application/json;charset=utf-8;base64,"+this.toBase64();return options&&options.multiline?"/*# "+data+" */":"//# "+data},Converter.prototype.toObject=function(){return JSON.parse(this.toJSON())},Converter.prototype.addProperty=function(key,value){if(this.sourcemap.hasOwnProperty(key))throw new Error('property "'+key+'" already exists on the sourcemap, use set property instead');return this.setProperty(key,value)},Converter.prototype.setProperty=function(key,value){return this.sourcemap[key]=value,this},Converter.prototype.getProperty=function(key){return this.sourcemap[key]},exports.fromObject=function(obj){return new Converter(obj)},exports.fromJSON=function(json){return new Converter(json,{isJSON:!0})},exports.fromBase64=function(base64){return new Converter(base64,{isEncoded:!0})},exports.fromComment=function(comment){return new Converter(comment=comment.replace(/^\/\*/g,"//").replace(/\*\/$/g,""),{isEncoded:!0,hasComment:!0})},exports.fromMapFileComment=function(comment,dir){return new Converter(comment,{commentFileDir:dir,isFileComment:!0,isJSON:!0})},exports.fromSource=function(content){var m=content.match(exports.commentRegex);return m?exports.fromComment(m.pop()):null},exports.fromMapFileSource=function(content,dir){var m=content.match(exports.mapFileCommentRegex);return m?exports.fromMapFileComment(m.pop(),dir):null},exports.removeComments=function(src){return src.replace(exports.commentRegex,"")},exports.removeMapFileComments=function(src){return src.replace(exports.mapFileCommentRegex,"")},exports.generateMapFileComment=function(file,options){var data="sourceMappingURL="+file;return options&&options.multiline?"/*# "+data+" */":"//# "+data}},"./node_modules/.pnpm/debug@4.3.4/node_modules/debug/src/browser.js":(module,exports,__webpack_require__)=>{exports.formatArgs=function(args){if(args[0]=(this.useColors?"%c":"")+this.namespace+(this.useColors?" %c":" ")+args[0]+(this.useColors?"%c ":" ")+"+"+module.exports.humanize(this.diff),!this.useColors)return;const c="color: "+this.color;args.splice(1,0,c,"color: inherit");let index=0,lastC=0;args[0].replace(/%[a-zA-Z%]/g,(match=>{"%%"!==match&&(index++,"%c"===match&&(lastC=index))})),args.splice(lastC,0,c)},exports.save=function(namespaces){try{namespaces?exports.storage.setItem("debug",namespaces):exports.storage.removeItem("debug")}catch(error){}},exports.load=function(){let r;try{r=exports.storage.getItem("debug")}catch(error){}!r&&"undefined"!=typeof process&&"env"in process&&(r=process.env.DEBUG);return r},exports.useColors=function(){if("undefined"!=typeof window&&window.process&&("renderer"===window.process.type||window.process.__nwjs))return!0;if("undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/))return!1;return"undefined"!=typeof document&&document.documentElement&&document.documentElement.style&&document.documentElement.style.WebkitAppearance||"undefined"!=typeof window&&window.console&&(window.console.firebug||window.console.exception&&window.console.table)||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)&&parseInt(RegExp.$1,10)>=31||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)},exports.storage=function(){try{return localStorage}catch(error){}}(),exports.destroy=(()=>{let warned=!1;return()=>{warned||(warned=!0,console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."))}})(),exports.colors=["#0000CC","#0000FF","#0033CC","#0033FF","#0066CC","#0066FF","#0099CC","#0099FF","#00CC00","#00CC33","#00CC66","#00CC99","#00CCCC","#00CCFF","#3300CC","#3300FF","#3333CC","#3333FF","#3366CC","#3366FF","#3399CC","#3399FF","#33CC00","#33CC33","#33CC66","#33CC99","#33CCCC","#33CCFF","#6600CC","#6600FF","#6633CC","#6633FF","#66CC00","#66CC33","#9900CC","#9900FF","#9933CC","#9933FF","#99CC00","#99CC33","#CC0000","#CC0033","#CC0066","#CC0099","#CC00CC","#CC00FF","#CC3300","#CC3333","#CC3366","#CC3399","#CC33CC","#CC33FF","#CC6600","#CC6633","#CC9900","#CC9933","#CCCC00","#CCCC33","#FF0000","#FF0033","#FF0066","#FF0099","#FF00CC","#FF00FF","#FF3300","#FF3333","#FF3366","#FF3399","#FF33CC","#FF33FF","#FF6600","#FF6633","#FF9900","#FF9933","#FFCC00","#FFCC33"],exports.log=console.debug||console.log||(()=>{}),module.exports=__webpack_require__("./node_modules/.pnpm/debug@4.3.4/node_modules/debug/src/common.js")(exports);const{formatters}=module.exports;formatters.j=function(v){try{return JSON.stringify(v)}catch(error){return"[UnexpectedJSONParseError]: "+error.message}}},"./node_modules/.pnpm/debug@4.3.4/node_modules/debug/src/common.js":(module,__unused_webpack_exports,__webpack_require__)=>{module.exports=function(env){function createDebug(namespace){let prevTime,namespacesCache,enabledCache,enableOverride=null;function debug(...args){if(!debug.enabled)return;const self=debug,curr=Number(new Date),ms=curr-(prevTime||curr);self.diff=ms,self.prev=prevTime,self.curr=curr,prevTime=curr,args[0]=createDebug.coerce(args[0]),"string"!=typeof args[0]&&args.unshift("%O");let index=0;args[0]=args[0].replace(/%([a-zA-Z%])/g,((match,format)=>{if("%%"===match)return"%";index++;const formatter=createDebug.formatters[format];if("function"==typeof formatter){const val=args[index];match=formatter.call(self,val),args.splice(index,1),index--}return match})),createDebug.formatArgs.call(self,args);(self.log||createDebug.log).apply(self,args)}return debug.namespace=namespace,debug.useColors=createDebug.useColors(),debug.color=createDebug.selectColor(namespace),debug.extend=extend,debug.destroy=createDebug.destroy,Object.defineProperty(debug,"enabled",{enumerable:!0,configurable:!1,get:()=>null!==enableOverride?enableOverride:(namespacesCache!==createDebug.namespaces&&(namespacesCache=createDebug.namespaces,enabledCache=createDebug.enabled(namespace)),enabledCache),set:v=>{enableOverride=v}}),"function"==typeof createDebug.init&&createDebug.init(debug),debug}function extend(namespace,delimiter){const newDebug=createDebug(this.namespace+(void 0===delimiter?":":delimiter)+namespace);return newDebug.log=this.log,newDebug}function toNamespace(regexp){return regexp.toString().substring(2,regexp.toString().length-2).replace(/\.\*\?$/,"*")}return createDebug.debug=createDebug,createDebug.default=createDebug,createDebug.coerce=function(val){if(val instanceof Error)return val.stack||val.message;return val},createDebug.disable=function(){const namespaces=[...createDebug.names.map(toNamespace),...createDebug.skips.map(toNamespace).map((namespace=>"-"+namespace))].join(",");return createDebug.enable(""),namespaces},createDebug.enable=function(namespaces){let i;createDebug.save(namespaces),createDebug.namespaces=namespaces,createDebug.names=[],createDebug.skips=[];const split=("string"==typeof namespaces?namespaces:"").split(/[\s,]+/),len=split.length;for(i=0;i{createDebug[key]=env[key]})),createDebug.names=[],createDebug.skips=[],createDebug.formatters={},createDebug.selectColor=function(namespace){let hash=0;for(let i=0;i{"undefined"==typeof process||"renderer"===process.type||!0===process.browser||process.__nwjs?module.exports=__webpack_require__("./node_modules/.pnpm/debug@4.3.4/node_modules/debug/src/browser.js"):module.exports=__webpack_require__("./node_modules/.pnpm/debug@4.3.4/node_modules/debug/src/node.js")},"./node_modules/.pnpm/debug@4.3.4/node_modules/debug/src/node.js":(module,exports,__webpack_require__)=>{const tty=__webpack_require__("tty"),util=__webpack_require__("util");exports.init=function(debug){debug.inspectOpts={};const keys=Object.keys(exports.inspectOpts);for(let i=0;i{}),"Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."),exports.colors=[6,2,3,4,5,1];try{const supportsColor=__webpack_require__("./node_modules/.pnpm/supports-color@7.2.0/node_modules/supports-color/index.js");supportsColor&&(supportsColor.stderr||supportsColor).level>=2&&(exports.colors=[20,21,26,27,32,33,38,39,40,41,42,43,44,45,56,57,62,63,68,69,74,75,76,77,78,79,80,81,92,93,98,99,112,113,128,129,134,135,148,149,160,161,162,163,164,165,166,167,168,169,170,171,172,173,178,179,184,185,196,197,198,199,200,201,202,203,204,205,206,207,208,209,214,215,220,221])}catch(error){}exports.inspectOpts=Object.keys(process.env).filter((key=>/^debug_/i.test(key))).reduce(((obj,key)=>{const prop=key.substring(6).toLowerCase().replace(/_([a-z])/g,((_,k)=>k.toUpperCase()));let val=process.env[key];return val=!!/^(yes|on|true|enabled)$/i.test(val)||!/^(no|off|false|disabled)$/i.test(val)&&("null"===val?null:Number(val)),obj[prop]=val,obj}),{}),module.exports=__webpack_require__("./node_modules/.pnpm/debug@4.3.4/node_modules/debug/src/common.js")(exports);const{formatters}=module.exports;formatters.o=function(v){return this.inspectOpts.colors=this.useColors,util.inspect(v,this.inspectOpts).split("\n").map((str=>str.trim())).join(" ")},formatters.O=function(v){return this.inspectOpts.colors=this.useColors,util.inspect(v,this.inspectOpts)}},"./node_modules/.pnpm/gensync@1.0.0-beta.2/node_modules/gensync/index.js":module=>{"use strict";const GENSYNC_START=Symbol.for("gensync:v1:start"),GENSYNC_SUSPEND=Symbol.for("gensync:v1:suspend"),GENSYNC_EXPECTED_START="GENSYNC_EXPECTED_START",GENSYNC_EXPECTED_SUSPEND="GENSYNC_EXPECTED_SUSPEND",GENSYNC_OPTIONS_ERROR="GENSYNC_OPTIONS_ERROR",GENSYNC_ERRBACK_NO_CALLBACK="GENSYNC_ERRBACK_NO_CALLBACK";function assertTypeof(type,name,value,allowUndefined){if(typeof value===type||allowUndefined&&void 0===value)return;let msg;throw msg=allowUndefined?`Expected opts.${name} to be either a ${type}, or undefined.`:`Expected opts.${name} to be a ${type}.`,makeError(msg,GENSYNC_OPTIONS_ERROR)}function makeError(msg,code){return Object.assign(new Error(msg),{code})}function buildOperation({name,arity,sync,async}){return setFunctionMetadata(name,arity,(function*(...args){const resume=yield GENSYNC_START;if(!resume){return sync.call(this,args)}let result;try{async.call(this,args,(value=>{result||(result={value},resume())}),(err=>{result||(result={err},resume())}))}catch(err){result={err},resume()}if(yield GENSYNC_SUSPEND,result.hasOwnProperty("err"))throw result.err;return result.value}))}function evaluateSync(gen){let value;for(;!({value}=gen.next()).done;)assertStart(value,gen);return value}function evaluateAsync(gen,resolve,reject){!function step(){try{let value;for(;!({value}=gen.next()).done;){assertStart(value,gen);let sync=!0,didSyncResume=!1;const out=gen.next((()=>{sync?didSyncResume=!0:step()}));if(sync=!1,assertSuspend(out,gen),!didSyncResume)return}return resolve(value)}catch(err){return reject(err)}}()}function assertStart(value,gen){value!==GENSYNC_START&&throwError(gen,makeError(`Got unexpected yielded value in gensync generator: ${JSON.stringify(value)}. Did you perhaps mean to use 'yield*' instead of 'yield'?`,GENSYNC_EXPECTED_START))}function assertSuspend({value,done},gen){(done||value!==GENSYNC_SUSPEND)&&throwError(gen,makeError(done?"Unexpected generator completion. If you get this, it is probably a gensync bug.":`Expected GENSYNC_SUSPEND, got ${JSON.stringify(value)}. If you get this, it is probably a gensync bug.`,GENSYNC_EXPECTED_SUSPEND))}function throwError(gen,err){throw gen.throw&&gen.throw(err),err}function setFunctionMetadata(name,arity,fn){if("string"==typeof name){const nameDesc=Object.getOwnPropertyDescriptor(fn,"name");nameDesc&&!nameDesc.configurable||Object.defineProperty(fn,"name",Object.assign(nameDesc||{},{configurable:!0,value:name}))}if("number"==typeof arity){const lengthDesc=Object.getOwnPropertyDescriptor(fn,"length");lengthDesc&&!lengthDesc.configurable||Object.defineProperty(fn,"length",Object.assign(lengthDesc||{},{configurable:!0,value:arity}))}return fn}module.exports=Object.assign((function(optsOrFn){let genFn=optsOrFn;return genFn="function"!=typeof optsOrFn?function({name,arity,sync,async,errback}){if(assertTypeof("string","name",name,!0),assertTypeof("number","arity",arity,!0),assertTypeof("function","sync",sync),assertTypeof("function","async",async,!0),assertTypeof("function","errback",errback,!0),async&&errback)throw makeError("Expected one of either opts.async or opts.errback, but got _both_.",GENSYNC_OPTIONS_ERROR);if("string"!=typeof name){let fnName;errback&&errback.name&&"errback"!==errback.name&&(fnName=errback.name),async&&async.name&&"async"!==async.name&&(fnName=async.name.replace(/Async$/,"")),sync&&sync.name&&"sync"!==sync.name&&(fnName=sync.name.replace(/Sync$/,"")),"string"==typeof fnName&&(name=fnName)}"number"!=typeof arity&&(arity=sync.length);return buildOperation({name,arity,sync:function(args){return sync.apply(this,args)},async:function(args,resolve,reject){async?async.apply(this,args).then(resolve,reject):errback?errback.call(this,...args,((err,value)=>{null==err?resolve(value):reject(err)})):resolve(sync.apply(this,args))}})}(optsOrFn):function(genFn){return setFunctionMetadata(genFn.name,genFn.length,(function(...args){return genFn.apply(this,args)}))}(optsOrFn),Object.assign(genFn,function(genFn){const fns={sync:function(...args){return evaluateSync(genFn.apply(this,args))},async:function(...args){return new Promise(((resolve,reject)=>{evaluateAsync(genFn.apply(this,args),resolve,reject)}))},errback:function(...args){const cb=args.pop();if("function"!=typeof cb)throw makeError("Asynchronous function called without callback",GENSYNC_ERRBACK_NO_CALLBACK);let gen;try{gen=genFn.apply(this,args)}catch(err){return void cb(err)}evaluateAsync(gen,(val=>cb(void 0,val)),(err=>cb(err)))}};return fns}(genFn))}),{all:buildOperation({name:"all",arity:1,sync:function(args){return Array.from(args[0]).map((item=>evaluateSync(item)))},async:function(args,resolve,reject){const items=Array.from(args[0]);if(0===items.length)return void Promise.resolve().then((()=>resolve([])));let count=0;const results=items.map((()=>{}));items.forEach(((item,i)=>{evaluateAsync(item,(val=>{results[i]=val,count+=1,count===results.length&&resolve(results)}),reject)}))}}),race:buildOperation({name:"race",arity:1,sync:function(args){const items=Array.from(args[0]);if(0===items.length)throw makeError("Must race at least 1 item","GENSYNC_RACE_NONEMPTY");return evaluateSync(items[0])},async:function(args,resolve,reject){const items=Array.from(args[0]);if(0===items.length)throw makeError("Must race at least 1 item","GENSYNC_RACE_NONEMPTY");for(const item of items)evaluateAsync(item,resolve,reject)}})})},"./node_modules/.pnpm/globals@11.12.0/node_modules/globals/index.js":(module,__unused_webpack_exports,__webpack_require__)=>{"use strict";module.exports=__webpack_require__("./node_modules/.pnpm/globals@11.12.0/node_modules/globals/globals.json")},"./node_modules/.pnpm/has-flag@4.0.0/node_modules/has-flag/index.js":module=>{"use strict";module.exports=(flag,argv=process.argv)=>{const prefix=flag.startsWith("-")?"":1===flag.length?"-":"--",position=argv.indexOf(prefix+flag),terminatorPosition=argv.indexOf("--");return-1!==position&&(-1===terminatorPosition||position{"use strict";const object={},hasOwnProperty=object.hasOwnProperty,forOwn=(object,callback)=>{for(const key in object)hasOwnProperty.call(object,key)&&callback(key,object[key])},toString=object.toString,isArray=Array.isArray,isBuffer=Buffer.isBuffer,singleEscapes={'"':'\\"',"'":"\\'","\\":"\\\\","\b":"\\b","\f":"\\f","\n":"\\n","\r":"\\r","\t":"\\t"},regexSingleEscape=/["'\\\b\f\n\r\t]/,regexDigit=/[0-9]/,regexWhitelist=/[ !#-&\(-\[\]-_a-~]/,jsesc=(argument,options)=>{const increaseIndentation=()=>{oldIndent=indent,++options.indentLevel,indent=options.indent.repeat(options.indentLevel)},defaults={escapeEverything:!1,minimal:!1,isScriptContext:!1,quotes:"single",wrap:!1,es6:!1,json:!1,compact:!0,lowercaseHex:!1,numbers:"decimal",indent:"\t",indentLevel:0,__inline1__:!1,__inline2__:!1},json=options&&options.json;var destination,source;json&&(defaults.quotes="double",defaults.wrap=!0),destination=defaults,"single"!=(options=(source=options)?(forOwn(source,((key,value)=>{destination[key]=value})),destination):destination).quotes&&"double"!=options.quotes&&"backtick"!=options.quotes&&(options.quotes="single");const quote="double"==options.quotes?'"':"backtick"==options.quotes?"`":"'",compact=options.compact,lowercaseHex=options.lowercaseHex;let indent=options.indent.repeat(options.indentLevel),oldIndent="";const inline1=options.__inline1__,inline2=options.__inline2__,newLine=compact?"":"\n";let result,isEmpty=!0;const useBinNumbers="binary"==options.numbers,useOctNumbers="octal"==options.numbers,useDecNumbers="decimal"==options.numbers,useHexNumbers="hexadecimal"==options.numbers;if(json&&argument&&(value=>"function"==typeof value)(argument.toJSON)&&(argument=argument.toJSON()),!(value=>"string"==typeof value||"[object String]"==toString.call(value))(argument)){if((value=>"[object Map]"==toString.call(value))(argument))return 0==argument.size?"new Map()":(compact||(options.__inline1__=!0,options.__inline2__=!1),"new Map("+jsesc(Array.from(argument),options)+")");if((value=>"[object Set]"==toString.call(value))(argument))return 0==argument.size?"new Set()":"new Set("+jsesc(Array.from(argument),options)+")";if(isBuffer(argument))return 0==argument.length?"Buffer.from([])":"Buffer.from("+jsesc(Array.from(argument),options)+")";if(isArray(argument))return result=[],options.wrap=!0,inline1&&(options.__inline1__=!1,options.__inline2__=!0),inline2||increaseIndentation(),((array,callback)=>{const length=array.length;let index=-1;for(;++index{isEmpty=!1,inline2&&(options.__inline2__=!1),result.push((compact||inline2?"":indent)+jsesc(value,options))})),isEmpty?"[]":inline2?"["+result.join(", ")+"]":"["+newLine+result.join(","+newLine)+newLine+(compact?"":oldIndent)+"]";if(!(value=>"number"==typeof value||"[object Number]"==toString.call(value))(argument))return(value=>"[object Object]"==toString.call(value))(argument)?(result=[],options.wrap=!0,increaseIndentation(),forOwn(argument,((key,value)=>{isEmpty=!1,result.push((compact?"":indent)+jsesc(key,options)+":"+(compact?"":" ")+jsesc(value,options))})),isEmpty?"{}":"{"+newLine+result.join(","+newLine)+newLine+(compact?"":oldIndent)+"}"):json?JSON.stringify(argument)||"null":String(argument);if(json)return JSON.stringify(argument);if(useDecNumbers)return String(argument);if(useHexNumbers){let hexadecimal=argument.toString(16);return lowercaseHex||(hexadecimal=hexadecimal.toUpperCase()),"0x"+hexadecimal}if(useBinNumbers)return"0b"+argument.toString(2);if(useOctNumbers)return"0o"+argument.toString(8)}const string=argument;let index=-1;const length=string.length;for(result="";++index=55296&&first<=56319&&length>index+1){const second=string.charCodeAt(index+1);if(second>=56320&&second<=57343){let hexadecimal=(1024*(first-55296)+second-56320+65536).toString(16);lowercaseHex||(hexadecimal=hexadecimal.toUpperCase()),result+="\\u{"+hexadecimal+"}",++index;continue}}}if(!options.escapeEverything){if(regexWhitelist.test(character)){result+=character;continue}if('"'==character){result+=quote==character?'\\"':character;continue}if("`"==character){result+=quote==character?"\\`":character;continue}if("'"==character){result+=quote==character?"\\'":character;continue}}if("\0"==character&&!json&&!regexDigit.test(string.charAt(index+1))){result+="\\0";continue}if(regexSingleEscape.test(character)){result+=singleEscapes[character];continue}const charCode=character.charCodeAt(0);if(options.minimal&&8232!=charCode&&8233!=charCode){result+=character;continue}let hexadecimal=charCode.toString(16);lowercaseHex||(hexadecimal=hexadecimal.toUpperCase());const longhand=hexadecimal.length>2||json,escaped="\\"+(longhand?"u":"x")+("0000"+hexadecimal).slice(longhand?-4:-2);result+=escaped}return options.wrap&&(result=quote+result+quote),"`"==quote&&(result=result.replace(/\$\{/g,"\\${")),options.isScriptContext?result.replace(/<\/(script|style)/gi,"<\\/$1").replace(/ + +-------------------------------------------------------------------------------- + + + + + +## Table of Contents + +- [Examples](#examples) + - [Consuming a source map](#consuming-a-source-map) + - [Generating a source map](#generating-a-source-map) + - [With SourceNode (high level API)](#with-sourcenode-high-level-api) + - [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api) +- [API](#api) + - [SourceMapConsumer](#sourcemapconsumer) + - [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap) + - [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans) + - [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition) + - [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition) + - [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition) + - [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources) + - [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing) + - [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order) + - [SourceMapGenerator](#sourcemapgenerator) + - [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap) + - [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer) + - [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping) + - [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath) + - [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring) + - [SourceNode](#sourcenode) + - [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name) + - [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath) + - [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk) + - [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk) + - [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn) + - [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn) + - [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep) + - [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement) + - [SourceNode.prototype.toString()](#sourcenodeprototypetostring) + - [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap) + + + +## Examples + +### Consuming a source map + +```js +var rawSourceMap = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['one.js', 'two.js'], + sourceRoot: 'http://example.com/www/js/', + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' +}; + +var smc = new SourceMapConsumer(rawSourceMap); + +console.log(smc.sources); +// [ 'http://example.com/www/js/one.js', +// 'http://example.com/www/js/two.js' ] + +console.log(smc.originalPositionFor({ + line: 2, + column: 28 +})); +// { source: 'http://example.com/www/js/two.js', +// line: 2, +// column: 10, +// name: 'n' } + +console.log(smc.generatedPositionFor({ + source: 'http://example.com/www/js/two.js', + line: 2, + column: 10 +})); +// { line: 2, column: 28 } + +smc.eachMapping(function (m) { + // ... +}); +``` + +### Generating a source map + +In depth guide: +[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/) + +#### With SourceNode (high level API) + +```js +function compile(ast) { + switch (ast.type) { + case 'BinaryExpression': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + [compile(ast.left), " + ", compile(ast.right)] + ); + case 'Literal': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + String(ast.value) + ); + // ... + default: + throw new Error("Bad AST"); + } +} + +var ast = parse("40 + 2", "add.js"); +console.log(compile(ast).toStringWithSourceMap({ + file: 'add.js' +})); +// { code: '40 + 2', +// map: [object SourceMapGenerator] } +``` + +#### With SourceMapGenerator (low level API) + +```js +var map = new SourceMapGenerator({ + file: "source-mapped.js" +}); + +map.addMapping({ + generated: { + line: 10, + column: 35 + }, + source: "foo.js", + original: { + line: 33, + column: 2 + }, + name: "christopher" +}); + +console.log(map.toString()); +// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}' +``` + +## API + +Get a reference to the module: + +```js +// Node.js +var sourceMap = require('source-map'); + +// Browser builds +var sourceMap = window.sourceMap; + +// Inside Firefox +const sourceMap = require("devtools/toolkit/sourcemap/source-map.js"); +``` + +### SourceMapConsumer + +A SourceMapConsumer instance represents a parsed source map which we can query +for information about the original file positions by giving it a file position +in the generated source. + +#### new SourceMapConsumer(rawSourceMap) + +The only parameter is the raw source map (either as a string which can be +`JSON.parse`'d, or an object). According to the spec, source maps have the +following attributes: + +* `version`: Which version of the source map spec this map is following. + +* `sources`: An array of URLs to the original source files. + +* `names`: An array of identifiers which can be referenced by individual + mappings. + +* `sourceRoot`: Optional. The URL root from which all sources are relative. + +* `sourcesContent`: Optional. An array of contents of the original source files. + +* `mappings`: A string of base64 VLQs which contain the actual mappings. + +* `file`: Optional. The generated filename this source map is associated with. + +```js +var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData); +``` + +#### SourceMapConsumer.prototype.computeColumnSpans() + +Compute the last column for each generated mapping. The last column is +inclusive. + +```js +// Before: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] + +consumer.computeColumnSpans(); + +// After: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1, +// lastColumn: 9 }, +// { line: 2, +// column: 10, +// lastColumn: 19 }, +// { line: 2, +// column: 20, +// lastColumn: Infinity } ] + +``` + +#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) + +Returns the original source, line, and column information for the generated +source's line and column positions provided. The only argument is an object with +the following properties: + +* `line`: The line number in the generated source. Line numbers in + this library are 1-based (note that the underlying source map + specification uses 0-based line numbers -- this library handles the + translation). + +* `column`: The column number in the generated source. Column numbers + in this library are 0-based. + +* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or + `SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest + element that is smaller than or greater than the one we are searching for, + respectively, if the exact element cannot be found. Defaults to + `SourceMapConsumer.GREATEST_LOWER_BOUND`. + +and an object is returned with the following properties: + +* `source`: The original source file, or null if this information is not + available. + +* `line`: The line number in the original source, or null if this information is + not available. The line number is 1-based. + +* `column`: The column number in the original source, or null if this + information is not available. The column number is 0-based. + +* `name`: The original identifier, or null if this information is not available. + +```js +consumer.originalPositionFor({ line: 2, column: 10 }) +// { source: 'foo.coffee', +// line: 2, +// column: 2, +// name: null } + +consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 }) +// { source: null, +// line: null, +// column: null, +// name: null } +``` + +#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition) + +Returns the generated line and column information for the original source, +line, and column positions provided. The only argument is an object with +the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. The line number is + 1-based. + +* `column`: The column number in the original source. The column + number is 0-based. + +and an object is returned with the following properties: + +* `line`: The line number in the generated source, or null. The line + number is 1-based. + +* `column`: The column number in the generated source, or null. The + column number is 0-based. + +```js +consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 }) +// { line: 1, +// column: 56 } +``` + +#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition) + +Returns all generated line and column information for the original source, line, +and column provided. If no column is provided, returns all mappings +corresponding to a either the line we are searching for or the next closest line +that has any mappings. Otherwise, returns all mappings corresponding to the +given line and either the column we are searching for or the next closest column +that has any offsets. + +The only argument is an object with the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. The line number is + 1-based. + +* `column`: Optional. The column number in the original source. The + column number is 0-based. + +and an array of objects is returned, each with the following properties: + +* `line`: The line number in the generated source, or null. The line + number is 1-based. + +* `column`: The column number in the generated source, or null. The + column number is 0-based. + +```js +consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] +``` + +#### SourceMapConsumer.prototype.hasContentsOfAllSources() + +Return true if we have the embedded source content for every source listed in +the source map, false otherwise. + +In other words, if this method returns `true`, then +`consumer.sourceContentFor(s)` will succeed for every source `s` in +`consumer.sources`. + +```js +// ... +if (consumer.hasContentsOfAllSources()) { + consumerReadyCallback(consumer); +} else { + fetchSources(consumer, consumerReadyCallback); +} +// ... +``` + +#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing]) + +Returns the original source content for the source provided. The only +argument is the URL of the original source file. + +If the source content for the given source is not found, then an error is +thrown. Optionally, pass `true` as the second param to have `null` returned +instead. + +```js +consumer.sources +// [ "my-cool-lib.clj" ] + +consumer.sourceContentFor("my-cool-lib.clj") +// "..." + +consumer.sourceContentFor("this is not in the source map"); +// Error: "this is not in the source map" is not in the source map + +consumer.sourceContentFor("this is not in the source map", true); +// null +``` + +#### SourceMapConsumer.prototype.eachMapping(callback, context, order) + +Iterate over each mapping between an original source/line/column and a +generated line/column in this source map. + +* `callback`: The function that is called with each mapping. Mappings have the + form `{ source, generatedLine, generatedColumn, originalLine, originalColumn, + name }` + +* `context`: Optional. If specified, this object will be the value of `this` + every time that `callback` is called. + +* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or + `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over + the mappings sorted by the generated file's line/column order or the + original's source/line/column order, respectively. Defaults to + `SourceMapConsumer.GENERATED_ORDER`. + +```js +consumer.eachMapping(function (m) { console.log(m); }) +// ... +// { source: 'illmatic.js', +// generatedLine: 1, +// generatedColumn: 0, +// originalLine: 1, +// originalColumn: 0, +// name: null } +// { source: 'illmatic.js', +// generatedLine: 2, +// generatedColumn: 0, +// originalLine: 2, +// originalColumn: 0, +// name: null } +// ... +``` +### SourceMapGenerator + +An instance of the SourceMapGenerator represents a source map which is being +built incrementally. + +#### new SourceMapGenerator([startOfSourceMap]) + +You may pass an object with the following properties: + +* `file`: The filename of the generated source that this source map is + associated with. + +* `sourceRoot`: A root for all relative URLs in this source map. + +* `skipValidation`: Optional. When `true`, disables validation of mappings as + they are added. This can improve performance but should be used with + discretion, as a last resort. Even then, one should avoid using this flag when + running tests, if possible. + +```js +var generator = new sourceMap.SourceMapGenerator({ + file: "my-generated-javascript-file.js", + sourceRoot: "http://example.com/app/js/" +}); +``` + +#### SourceMapGenerator.fromSourceMap(sourceMapConsumer) + +Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance. + +* `sourceMapConsumer` The SourceMap. + +```js +var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer); +``` + +#### SourceMapGenerator.prototype.addMapping(mapping) + +Add a single mapping from original source line and column to the generated +source's line and column for this source map being created. The mapping object +should have the following properties: + +* `generated`: An object with the generated line and column positions. + +* `original`: An object with the original line and column positions. + +* `source`: The original source file (relative to the sourceRoot). + +* `name`: An optional original token name for this mapping. + +```js +generator.addMapping({ + source: "module-one.scm", + original: { line: 128, column: 0 }, + generated: { line: 3, column: 456 } +}) +``` + +#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for an original source file. + +* `sourceFile` the URL of the original source file. + +* `sourceContent` the content of the source file. + +```js +generator.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]]) + +Applies a SourceMap for a source file to the SourceMap. +Each mapping to the supplied source file is rewritten using the +supplied SourceMap. Note: The resolution for the resulting mappings +is the minimum of this map and the supplied map. + +* `sourceMapConsumer`: The SourceMap to be applied. + +* `sourceFile`: Optional. The filename of the source file. + If omitted, sourceMapConsumer.file will be used, if it exists. + Otherwise an error will be thrown. + +* `sourceMapPath`: Optional. The dirname of the path to the SourceMap + to be applied. If relative, it is relative to the SourceMap. + + This parameter is needed when the two SourceMaps aren't in the same + directory, and the SourceMap to be applied contains relative source + paths. If so, those relative source paths need to be rewritten + relative to the SourceMap. + + If omitted, it is assumed that both SourceMaps are in the same directory, + thus not needing any rewriting. (Supplying `'.'` has the same effect.) + +#### SourceMapGenerator.prototype.toString() + +Renders the source map being generated to a string. + +```js +generator.toString() +// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}' +``` + +### SourceNode + +SourceNodes provide a way to abstract over interpolating and/or concatenating +snippets of generated JavaScript source code, while maintaining the line and +column information associated between those snippets and the original source +code. This is useful as the final intermediate representation a compiler might +use before outputting the generated JS and source map. + +#### new SourceNode([line, column, source[, chunk[, name]]]) + +* `line`: The original line number associated with this source node, or null if + it isn't associated with an original line. The line number is 1-based. + +* `column`: The original column number associated with this source node, or null + if it isn't associated with an original column. The column number + is 0-based. + +* `source`: The original source's filename; null if no filename is provided. + +* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see + below. + +* `name`: Optional. The original identifier. + +```js +var node = new SourceNode(1, 2, "a.cpp", [ + new SourceNode(3, 4, "b.cpp", "extern int status;\n"), + new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"), + new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"), +]); +``` + +#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath]) + +Creates a SourceNode from generated code and a SourceMapConsumer. + +* `code`: The generated code + +* `sourceMapConsumer` The SourceMap for the generated code + +* `relativePath` The optional path that relative sources in `sourceMapConsumer` + should be relative to. + +```js +var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8")); +var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"), + consumer); +``` + +#### SourceNode.prototype.add(chunk) + +Add a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.add(" + "); +node.add(otherNode); +node.add([leftHandOperandNode, " + ", rightHandOperandNode]); +``` + +#### SourceNode.prototype.prepend(chunk) + +Prepend a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.prepend("/** Build Id: f783haef86324gf **/\n\n"); +``` + +#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for a source file. This will be added to the +`SourceMap` in the `sourcesContent` field. + +* `sourceFile`: The filename of the source file + +* `sourceContent`: The content of the source file + +```js +node.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceNode.prototype.walk(fn) + +Walk over the tree of JS snippets in this node and its children. The walking +function is called once for each snippet of JS and is passed that snippet and +the its original associated source's line/column location. + +* `fn`: The traversal function. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.walk(function (code, loc) { console.log("WALK:", code, loc); }) +// WALK: uno { source: 'b.js', line: 3, column: 4, name: null } +// WALK: dos { source: 'a.js', line: 1, column: 2, name: null } +// WALK: tres { source: 'a.js', line: 1, column: 2, name: null } +// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null } +``` + +#### SourceNode.prototype.walkSourceContents(fn) + +Walk over the tree of SourceNodes. The walking function is called for each +source file content and is passed the filename and source content. + +* `fn`: The traversal function. + +```js +var a = new SourceNode(1, 2, "a.js", "generated from a"); +a.setSourceContent("a.js", "original a"); +var b = new SourceNode(1, 2, "b.js", "generated from b"); +b.setSourceContent("b.js", "original b"); +var c = new SourceNode(1, 2, "c.js", "generated from c"); +c.setSourceContent("c.js", "original c"); + +var node = new SourceNode(null, null, null, [a, b, c]); +node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); }) +// WALK: a.js : original a +// WALK: b.js : original b +// WALK: c.js : original c +``` + +#### SourceNode.prototype.join(sep) + +Like `Array.prototype.join` except for SourceNodes. Inserts the separator +between each of this source node's children. + +* `sep`: The separator. + +```js +var lhs = new SourceNode(1, 2, "a.rs", "my_copy"); +var operand = new SourceNode(3, 4, "a.rs", "="); +var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()"); + +var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]); +var joinedNode = node.join(" "); +``` + +#### SourceNode.prototype.replaceRight(pattern, replacement) + +Call `String.prototype.replace` on the very right-most source snippet. Useful +for trimming white space from the end of a source node, etc. + +* `pattern`: The pattern to replace. + +* `replacement`: The thing to replace the pattern with. + +```js +// Trim trailing white space. +node.replaceRight(/\s*$/, ""); +``` + +#### SourceNode.prototype.toString() + +Return the string representation of this source node. Walks over the tree and +concatenates all the various snippets together to one string. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toString() +// 'unodostresquatro' +``` + +#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap]) + +Returns the string representation of this tree of source nodes, plus a +SourceMapGenerator which contains all the mappings between the generated and +original sources. + +The arguments are the same as those to `new SourceMapGenerator`. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toStringWithSourceMap({ file: "my-output-file.js" }) +// { code: 'unodostresquatro', +// map: [object SourceMapGenerator] } +``` diff --git a/node_modules/source-map-js/lib/array-set.js b/node_modules/source-map-js/lib/array-set.js new file mode 100644 index 0000000..fbd5c81 --- /dev/null +++ b/node_modules/source-map-js/lib/array-set.js @@ -0,0 +1,121 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var has = Object.prototype.hasOwnProperty; +var hasNativeMap = typeof Map !== "undefined"; + +/** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ +function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); +} + +/** + * Static method for creating ArraySet instances from an existing array. + */ +ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; +}; + +/** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ +ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; +}; + +/** + * Add the given string to this set. + * + * @param String aStr + */ +ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } +}; + +/** + * Is the given string a member of this set? + * + * @param String aStr + */ +ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } +}; + +/** + * What is the index of the given string in the array? + * + * @param String aStr + */ +ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); +}; + +/** + * What is the element at the given index? + * + * @param Number aIdx + */ +ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); +}; + +/** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ +ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); +}; + +exports.ArraySet = ArraySet; diff --git a/node_modules/source-map-js/lib/base64-vlq.js b/node_modules/source-map-js/lib/base64-vlq.js new file mode 100644 index 0000000..612b404 --- /dev/null +++ b/node_modules/source-map-js/lib/base64-vlq.js @@ -0,0 +1,140 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +var base64 = require('./base64'); + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +var VLQ_BASE_SHIFT = 5; + +// binary: 100000 +var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +var VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +var VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; + +/** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ +exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; +}; diff --git a/node_modules/source-map-js/lib/base64.js b/node_modules/source-map-js/lib/base64.js new file mode 100644 index 0000000..8aa86b3 --- /dev/null +++ b/node_modules/source-map-js/lib/base64.js @@ -0,0 +1,67 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); +}; + +/** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ +exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; +}; diff --git a/node_modules/source-map-js/lib/binary-search.js b/node_modules/source-map-js/lib/binary-search.js new file mode 100644 index 0000000..010ac94 --- /dev/null +++ b/node_modules/source-map-js/lib/binary-search.js @@ -0,0 +1,111 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +exports.GREATEST_LOWER_BOUND = 1; +exports.LEAST_UPPER_BOUND = 2; + +/** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ +function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } +} + +/** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ +exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; +}; diff --git a/node_modules/source-map-js/lib/mapping-list.js b/node_modules/source-map-js/lib/mapping-list.js new file mode 100644 index 0000000..06d1274 --- /dev/null +++ b/node_modules/source-map-js/lib/mapping-list.js @@ -0,0 +1,79 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); + +/** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ +function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; +} + +/** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ +function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; +} + +/** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ +MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + +/** + * Add the given source mapping. + * + * @param Object aMapping + */ +MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } +}; + +/** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ +MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; +}; + +exports.MappingList = MappingList; diff --git a/node_modules/source-map-js/lib/quick-sort.js b/node_modules/source-map-js/lib/quick-sort.js new file mode 100644 index 0000000..23f9eda --- /dev/null +++ b/node_modules/source-map-js/lib/quick-sort.js @@ -0,0 +1,132 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +// It turns out that some (most?) JavaScript engines don't self-host +// `Array.prototype.sort`. This makes sense because C++ will likely remain +// faster than JS when doing raw CPU-intensive sorting. However, when using a +// custom comparator function, calling back and forth between the VM's C++ and +// JIT'd JS is rather slow *and* loses JIT type information, resulting in +// worse generated code for the comparator function than would be optimal. In +// fact, when sorting with a comparator, these costs outweigh the benefits of +// sorting in C++. By using our own JS-implemented Quick Sort (below), we get +// a ~3500ms mean speed-up in `bench/bench.html`. + +function SortTemplate(comparator) { + +/** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ +function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; +} + +/** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ +function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); +} + +/** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ +function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot, false) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } +} + + return doQuickSort; +} + +function cloneSort(comparator) { + let template = SortTemplate.toString(); + let templateFn = new Function(`return ${template}`)(); + return templateFn(comparator); +} + +/** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + +let sortCache = new WeakMap(); +exports.quickSort = function (ary, comparator, start = 0) { + let doQuickSort = sortCache.get(comparator); + if (doQuickSort === void 0) { + doQuickSort = cloneSort(comparator); + sortCache.set(comparator, doQuickSort); + } + doQuickSort(ary, comparator, start, ary.length - 1); +}; diff --git a/node_modules/source-map-js/lib/source-map-consumer.js b/node_modules/source-map-js/lib/source-map-consumer.js new file mode 100644 index 0000000..4bd7a4a --- /dev/null +++ b/node_modules/source-map-js/lib/source-map-consumer.js @@ -0,0 +1,1184 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var binarySearch = require('./binary-search'); +var ArraySet = require('./array-set').ArraySet; +var base64VLQ = require('./base64-vlq'); +var quickSort = require('./quick-sort').quickSort; + +function SourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) + : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); +} + +SourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); +} + +/** + * The version of the source mapping spec that we are consuming. + */ +SourceMapConsumer.prototype._version = 3; + +// `__generatedMappings` and `__originalMappings` are arrays that hold the +// parsed mapping coordinates from the source map's "mappings" attribute. They +// are lazily instantiated, accessed via the `_generatedMappings` and +// `_originalMappings` getters respectively, and we only parse the mappings +// and create these arrays once queried for a source location. We jump through +// these hoops because there can be many thousands of mappings, and parsing +// them is expensive, so we only want to do it if we must. +// +// Each object in the arrays is of the form: +// +// { +// generatedLine: The line number in the generated code, +// generatedColumn: The column number in the generated code, +// source: The path to the original source file that generated this +// chunk of code, +// originalLine: The line number in the original source that +// corresponds to this chunk of generated code, +// originalColumn: The column number in the original source that +// corresponds to this chunk of generated code, +// name: The name of the original symbol which generated this chunk of +// code. +// } +// +// All properties except for `generatedLine` and `generatedColumn` can be +// `null`. +// +// `_generatedMappings` is ordered by the generated positions. +// +// `_originalMappings` is ordered by the original positions. + +SourceMapConsumer.prototype.__generatedMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } +}); + +SourceMapConsumer.prototype.__originalMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } +}); + +SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + +SourceMapConsumer.GENERATED_ORDER = 1; +SourceMapConsumer.ORIGINAL_ORDER = 2; + +SourceMapConsumer.GREATEST_LOWER_BOUND = 1; +SourceMapConsumer.LEAST_UPPER_BOUND = 2; + +/** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ +SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + var boundCallback = aCallback.bind(context); + var names = this._names; + var sources = this._sources; + var sourceMapURL = this._sourceMapURL; + + for (var i = 0, n = mappings.length; i < n; i++) { + var mapping = mappings[i]; + var source = mapping.source === null ? null : sources.at(mapping.source); + source = util.computeSourceURL(sourceRoot, source, sourceMapURL); + boundCallback({ + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : names.at(mapping.name) + }); + } + }; + +/** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number is 1-based. + * - column: Optional. the column number in the original source. + * The column number is 0-based. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + needle.source = this._findSourceIndex(needle.source); + if (needle.source < 0) { + return []; + } + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + +exports.SourceMapConsumer = SourceMapConsumer; + +/** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The first parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ +function BasicSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + if (sourceRoot) { + sourceRoot = util.normalize(sourceRoot); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this._absoluteSources = this._sources.toArray().map(function (s) { + return util.computeSourceURL(sourceRoot, s, aSourceMapURL); + }); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this._sourceMapURL = aSourceMapURL; + this.file = file; +} + +BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + +/** + * Utility function to find the index of a source. Returns -1 if not + * found. + */ +BasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) { + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + if (this._sources.has(relativeSource)) { + return this._sources.indexOf(relativeSource); + } + + // Maybe aSource is an absolute URL as returned by |sources|. In + // this case we can't simply undo the transform. + var i; + for (i = 0; i < this._absoluteSources.length; ++i) { + if (this._absoluteSources[i] == aSource) { + return i; + } + } + + return -1; +}; + +/** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @param String aSourceMapURL + * The URL at which the source map can be found (optional) + * @returns BasicSourceMapConsumer + */ +BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + smc._sourceMapURL = aSourceMapURL; + smc._absoluteSources = smc._sources.toArray().map(function (s) { + return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL); + }); + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + +/** + * The version of the source mapping spec that we are consuming. + */ +BasicSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._absoluteSources.slice(); + } +}); + +/** + * Provide the JIT with a nice shape / hidden class. + */ +function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; +} + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + +const compareGenerated = util.compareByGeneratedPositionsDeflatedNoLine; +function sortGenerated(array, start) { + let l = array.length; + let n = array.length - start; + if (n <= 1) { + return; + } else if (n == 2) { + let a = array[start]; + let b = array[start + 1]; + if (compareGenerated(a, b) > 0) { + array[start] = b; + array[start + 1] = a; + } + } else if (n < 20) { + for (let i = start; i < l; i++) { + for (let j = i; j > start; j--) { + let a = array[j - 1]; + let b = array[j]; + if (compareGenerated(a, b) <= 0) { + break; + } + array[j - 1] = b; + array[j] = a; + } + } + } else { + quickSort(array, compareGenerated, start); + } +} +BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + let subarrayStart = 0; + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + + sortGenerated(generatedMappings, subarrayStart); + subarrayStart = generatedMappings.length; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + let currentSource = mapping.source; + while (originalMappings.length <= currentSource) { + originalMappings.push(null); + } + if (originalMappings[currentSource] === null) { + originalMappings[currentSource] = []; + } + originalMappings[currentSource].push(mapping); + } + } + } + + sortGenerated(generatedMappings, subarrayStart); + this.__generatedMappings = generatedMappings; + + for (var i = 0; i < originalMappings.length; i++) { + if (originalMappings[i] != null) { + quickSort(originalMappings[i], util.compareByOriginalPositionsNoSource); + } + } + this.__originalMappings = [].concat(...originalMappings); + }; + +/** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ +BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + +/** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ +BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + var index = this._findSourceIndex(aSource); + if (index >= 0) { + return this.sourcesContent[index]; + } + + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + relativeSource)) { + return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + relativeSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + source = this._findSourceIndex(source); + if (source < 0) { + return { + line: null, + column: null, + lastColumn: null + }; + } + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + +exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + +/** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The first parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ +function IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL) + } + }); +} + +IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + +/** + * The version of the source mapping spec that we are consuming. + */ +IndexedSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } +}); + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL); + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = null; + if (mapping.name) { + name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + } + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + +exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; diff --git a/node_modules/source-map-js/lib/source-map-generator.js b/node_modules/source-map-js/lib/source-map-generator.js new file mode 100644 index 0000000..508bcfb --- /dev/null +++ b/node_modules/source-map-js/lib/source-map-generator.js @@ -0,0 +1,425 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var base64VLQ = require('./base64-vlq'); +var util = require('./util'); +var ArraySet = require('./array-set').ArraySet; +var MappingList = require('./mapping-list').MappingList; + +/** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ +function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; +} + +SourceMapGenerator.prototype._version = 3; + +/** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ +SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var sourceRelative = sourceFile; + if (sourceRoot !== null) { + sourceRelative = util.relative(sourceRoot, sourceFile); + } + + if (!generator._sources.has(sourceRelative)) { + generator._sources.add(sourceRelative); + } + + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + +/** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ +SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + +/** + * Set the source content for a source file. + */ +SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + +/** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ +SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + +/** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ +SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + +/** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ +SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + +SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + +/** + * Externalize the source map. + */ +SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + +/** + * Render the source map being generated to a string. + */ +SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + +exports.SourceMapGenerator = SourceMapGenerator; diff --git a/node_modules/source-map-js/lib/source-node.js b/node_modules/source-map-js/lib/source-node.js new file mode 100644 index 0000000..8bcdbe3 --- /dev/null +++ b/node_modules/source-map-js/lib/source-node.js @@ -0,0 +1,413 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; +var util = require('./util'); + +// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other +// operating systems these days (capturing the result). +var REGEX_NEWLINE = /(\r?\n)/; + +// Newline character code for charCodeAt() comparisons +var NEWLINE_CODE = 10; + +// Private symbol for identifying `SourceNode`s when multiple versions of +// the source-map library are loaded. This MUST NOT CHANGE across +// versions! +var isSourceNode = "$$$isSourceNode$$$"; + +/** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ +function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); +} + +/** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ +SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex] || ''; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex] || ''; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + +/** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } +}; + +/** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ +SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; +}; + +/** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ +SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; +}; + +/** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ +SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + +/** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + +/** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ +SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; +}; + +/** + * Returns the string representation of this source node along with a source + * map. + */ +SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; +}; + +exports.SourceNode = SourceNode; diff --git a/node_modules/source-map-js/lib/util.js b/node_modules/source-map-js/lib/util.js new file mode 100644 index 0000000..430e2d0 --- /dev/null +++ b/node_modules/source-map-js/lib/util.js @@ -0,0 +1,594 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +/** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ +function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } +} +exports.getArg = getArg; + +var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; +var dataUrlRegexp = /^data:.+\,.+$/; + +function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; +} +exports.urlParse = urlParse; + +function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; +} +exports.urlGenerate = urlGenerate; + +var MAX_CACHED_INPUTS = 32; + +/** + * Takes some function `f(input) -> result` and returns a memoized version of + * `f`. + * + * We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The + * memoization is a dumb-simple, linear least-recently-used cache. + */ +function lruMemoize(f) { + var cache = []; + + return function(input) { + for (var i = 0; i < cache.length; i++) { + if (cache[i].input === input) { + var temp = cache[0]; + cache[0] = cache[i]; + cache[i] = temp; + return cache[0].result; + } + } + + var result = f(input); + + cache.unshift({ + input, + result, + }); + + if (cache.length > MAX_CACHED_INPUTS) { + cache.pop(); + } + + return result; + }; +} + +/** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ +var normalize = lruMemoize(function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + // Split the path into parts between `/` characters. This is much faster than + // using `.split(/\/+/g)`. + var parts = []; + var start = 0; + var i = 0; + while (true) { + start = i; + i = path.indexOf("/", start); + if (i === -1) { + parts.push(path.slice(start)); + break; + } else { + parts.push(path.slice(start, i)); + while (i < path.length && path[i] === "/") { + i++; + } + } + } + + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; +}); +exports.normalize = normalize; + +/** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ +function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; +} +exports.join = join; + +exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || urlRegexp.test(aPath); +}; + +/** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ +function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); +} +exports.relative = relative; + +var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); +}()); + +function identity (s) { + return s; +} + +/** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ +function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; +} +exports.toSetString = supportsNullProto ? identity : toSetString; + +function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; +} +exports.fromSetString = supportsNullProto ? identity : fromSetString; + +function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; +} + +/** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ +function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByOriginalPositions = compareByOriginalPositions; + +function compareByOriginalPositionsNoSource(mappingA, mappingB, onlyCompareOriginal) { + var cmp + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByOriginalPositionsNoSource = compareByOriginalPositionsNoSource; + +/** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ +function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + +function compareByGeneratedPositionsDeflatedNoLine(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsDeflatedNoLine = compareByGeneratedPositionsDeflatedNoLine; + +function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 === null) { + return 1; // aStr2 !== null + } + + if (aStr2 === null) { + return -1; // aStr1 !== null + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; +} + +/** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ +function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + +/** + * Strip any JSON XSSI avoidance prefix from the string (as documented + * in the source maps specification), and then parse the string as + * JSON. + */ +function parseSourceMapInput(str) { + return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, '')); +} +exports.parseSourceMapInput = parseSourceMapInput; + +/** + * Compute the URL of a source given the the source root, the source's + * URL, and the source map's URL. + */ +function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { + sourceURL = sourceURL || ''; + + if (sourceRoot) { + // This follows what Chrome does. + if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') { + sourceRoot += '/'; + } + // The spec says: + // Line 4: An optional source root, useful for relocating source + // files on a server or removing repeated values in the + // “sources” entry. This value is prepended to the individual + // entries in the “source” field. + sourceURL = sourceRoot + sourceURL; + } + + // Historically, SourceMapConsumer did not take the sourceMapURL as + // a parameter. This mode is still somewhat supported, which is why + // this code block is conditional. However, it's preferable to pass + // the source map URL to SourceMapConsumer, so that this function + // can implement the source URL resolution algorithm as outlined in + // the spec. This block is basically the equivalent of: + // new URL(sourceURL, sourceMapURL).toString() + // ... except it avoids using URL, which wasn't available in the + // older releases of node still supported by this library. + // + // The spec says: + // If the sources are not absolute URLs after prepending of the + // “sourceRoot”, the sources are resolved relative to the + // SourceMap (like resolving script src in a html document). + if (sourceMapURL) { + var parsed = urlParse(sourceMapURL); + if (!parsed) { + throw new Error("sourceMapURL could not be parsed"); + } + if (parsed.path) { + // Strip the last path component, but keep the "/". + var index = parsed.path.lastIndexOf('/'); + if (index >= 0) { + parsed.path = parsed.path.substring(0, index + 1); + } + } + sourceURL = join(urlGenerate(parsed), sourceURL); + } + + return normalize(sourceURL); +} +exports.computeSourceURL = computeSourceURL; diff --git a/node_modules/source-map-js/package.json b/node_modules/source-map-js/package.json new file mode 100644 index 0000000..501fafe --- /dev/null +++ b/node_modules/source-map-js/package.json @@ -0,0 +1,71 @@ +{ + "name": "source-map-js", + "description": "Generates and consumes source maps", + "version": "1.0.2", + "homepage": "https://github.com/7rulnik/source-map-js", + "author": "Valentin 7rulnik Semirulnik ", + "contributors": [ + "Nick Fitzgerald ", + "Tobias Koppers ", + "Duncan Beevers ", + "Stephen Crane ", + "Ryan Seddon ", + "Miles Elam ", + "Mihai Bazon ", + "Michael Ficarra ", + "Todd Wolfson ", + "Alexander Solovyov ", + "Felix Gnass ", + "Conrad Irwin ", + "usrbincc ", + "David Glasser ", + "Chase Douglas ", + "Evan Wallace ", + "Heather Arthur ", + "Hugh Kennedy ", + "David Glasser ", + "Simon Lydell ", + "Jmeas Smith ", + "Michael Z Goddard ", + "azu ", + "John Gozde ", + "Adam Kirkton ", + "Chris Montgomery ", + "J. Ryan Stinnett ", + "Jack Herrington ", + "Chris Truter ", + "Daniel Espeset ", + "Jamie Wong ", + "Eddy Bruël ", + "Hawken Rives ", + "Gilad Peleg ", + "djchie ", + "Gary Ye ", + "Nicolas Lalevée " + ], + "repository": "7rulnik/source-map-js", + "main": "./source-map.js", + "files": [ + "source-map.js", + "source-map.d.ts", + "lib/" + ], + "engines": { + "node": ">=0.10.0" + }, + "license": "BSD-3-Clause", + "scripts": { + "test": "npm run build && node test/run-tests.js", + "build": "webpack --color", + "toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md" + }, + "devDependencies": { + "clean-publish": "^3.1.0", + "doctoc": "^0.15.0", + "webpack": "^1.12.0" + }, + "clean-publish": { + "cleanDocs": true + }, + "typings": "source-map.d.ts" +} diff --git a/node_modules/source-map-js/source-map.d.ts b/node_modules/source-map-js/source-map.d.ts new file mode 100644 index 0000000..9f8a4b3 --- /dev/null +++ b/node_modules/source-map-js/source-map.d.ts @@ -0,0 +1,115 @@ +declare module 'source-map-js' { + export interface StartOfSourceMap { + file?: string; + sourceRoot?: string; + } + + export interface RawSourceMap extends StartOfSourceMap { + version: string; + sources: string[]; + names: string[]; + sourcesContent?: string[]; + mappings: string; + } + + export interface Position { + line: number; + column: number; + } + + export interface LineRange extends Position { + lastColumn: number; + } + + export interface FindPosition extends Position { + // SourceMapConsumer.GREATEST_LOWER_BOUND or SourceMapConsumer.LEAST_UPPER_BOUND + bias?: number; + } + + export interface SourceFindPosition extends FindPosition { + source: string; + } + + export interface MappedPosition extends Position { + source: string; + name?: string; + } + + export interface MappingItem { + source: string; + generatedLine: number; + generatedColumn: number; + originalLine: number; + originalColumn: number; + name: string; + } + + export class SourceMapConsumer { + static GENERATED_ORDER: number; + static ORIGINAL_ORDER: number; + + static GREATEST_LOWER_BOUND: number; + static LEAST_UPPER_BOUND: number; + + constructor(rawSourceMap: RawSourceMap); + computeColumnSpans(): void; + originalPositionFor(generatedPosition: FindPosition): MappedPosition; + generatedPositionFor(originalPosition: SourceFindPosition): LineRange; + allGeneratedPositionsFor(originalPosition: MappedPosition): Position[]; + hasContentsOfAllSources(): boolean; + sourceContentFor(source: string, returnNullOnMissing?: boolean): string; + eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void; + } + + export interface Mapping { + generated: Position; + original: Position; + source: string; + name?: string; + } + + export class SourceMapGenerator { + constructor(startOfSourceMap?: StartOfSourceMap); + static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator; + addMapping(mapping: Mapping): void; + setSourceContent(sourceFile: string, sourceContent: string): void; + applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void; + toString(): string; + } + + export interface CodeWithSourceMap { + code: string; + map: SourceMapGenerator; + } + + export class SourceNode { + constructor(); + constructor(line: number, column: number, source: string); + constructor(line: number, column: number, source: string, chunk?: string, name?: string); + static fromStringWithSourceMap(code: string, sourceMapConsumer: SourceMapConsumer, relativePath?: string): SourceNode; + add(chunk: string): void; + prepend(chunk: string): void; + setSourceContent(sourceFile: string, sourceContent: string): void; + walk(fn: (chunk: string, mapping: MappedPosition) => void): void; + walkSourceContents(fn: (file: string, content: string) => void): void; + join(sep: string): SourceNode; + replaceRight(pattern: string, replacement: string): SourceNode; + toString(): string; + toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap; + } +} + +declare module 'source-map-js/lib/source-map-generator' { + import { SourceMapGenerator } from 'source-map-js' + export { SourceMapGenerator } +} + +declare module 'source-map-js/lib/source-map-consumer' { + import { SourceMapConsumer } from 'source-map-js' + export { SourceMapConsumer } +} + +declare module 'source-map-js/lib/source-node' { + import { SourceNode } from 'source-map-js' + export { SourceNode } +} diff --git a/node_modules/source-map-js/source-map.js b/node_modules/source-map-js/source-map.js new file mode 100644 index 0000000..bc88fe8 --- /dev/null +++ b/node_modules/source-map-js/source-map.js @@ -0,0 +1,8 @@ +/* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ +exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator; +exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer; +exports.SourceNode = require('./lib/source-node').SourceNode; diff --git a/node_modules/source-map/CHANGELOG.md b/node_modules/source-map/CHANGELOG.md new file mode 100644 index 0000000..3a8c066 --- /dev/null +++ b/node_modules/source-map/CHANGELOG.md @@ -0,0 +1,301 @@ +# Change Log + +## 0.5.6 + +* Fix for regression when people were using numbers as names in source maps. See + #236. + +## 0.5.5 + +* Fix "regression" of unsupported, implementation behavior that half the world + happens to have come to depend on. See #235. + +* Fix regression involving function hoisting in SpiderMonkey. See #233. + +## 0.5.4 + +* Large performance improvements to source-map serialization. See #228 and #229. + +## 0.5.3 + +* Do not include unnecessary distribution files. See + commit ef7006f8d1647e0a83fdc60f04f5a7ca54886f86. + +## 0.5.2 + +* Include browser distributions of the library in package.json's `files`. See + issue #212. + +## 0.5.1 + +* Fix latent bugs in IndexedSourceMapConsumer.prototype._parseMappings. See + ff05274becc9e6e1295ed60f3ea090d31d843379. + +## 0.5.0 + +* Node 0.8 is no longer supported. + +* Use webpack instead of dryice for bundling. + +* Big speedups serializing source maps. See pull request #203. + +* Fix a bug with `SourceMapConsumer.prototype.sourceContentFor` and sources that + explicitly start with the source root. See issue #199. + +## 0.4.4 + +* Fix an issue where using a `SourceMapGenerator` after having created a + `SourceMapConsumer` from it via `SourceMapConsumer.fromSourceMap` failed. See + issue #191. + +* Fix an issue with where `SourceMapGenerator` would mistakenly consider + different mappings as duplicates of each other and avoid generating them. See + issue #192. + +## 0.4.3 + +* A very large number of performance improvements, particularly when parsing + source maps. Collectively about 75% of time shaved off of the source map + parsing benchmark! + +* Fix a bug in `SourceMapConsumer.prototype.allGeneratedPositionsFor` and fuzzy + searching in the presence of a column option. See issue #177. + +* Fix a bug with joining a source and its source root when the source is above + the root. See issue #182. + +* Add the `SourceMapConsumer.prototype.hasContentsOfAllSources` method to + determine when all sources' contents are inlined into the source map. See + issue #190. + +## 0.4.2 + +* Add an `.npmignore` file so that the benchmarks aren't pulled down by + dependent projects. Issue #169. + +* Add an optional `column` argument to + `SourceMapConsumer.prototype.allGeneratedPositionsFor` and better handle lines + with no mappings. Issues #172 and #173. + +## 0.4.1 + +* Fix accidentally defining a global variable. #170. + +## 0.4.0 + +* The default direction for fuzzy searching was changed back to its original + direction. See #164. + +* There is now a `bias` option you can supply to `SourceMapConsumer` to control + the fuzzy searching direction. See #167. + +* About an 8% speed up in parsing source maps. See #159. + +* Added a benchmark for parsing and generating source maps. + +## 0.3.0 + +* Change the default direction that searching for positions fuzzes when there is + not an exact match. See #154. + +* Support for environments using json2.js for JSON serialization. See #156. + +## 0.2.0 + +* Support for consuming "indexed" source maps which do not have any remote + sections. See pull request #127. This introduces a minor backwards + incompatibility if you are monkey patching `SourceMapConsumer.prototype` + methods. + +## 0.1.43 + +* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue + #148 for some discussion and issues #150, #151, and #152 for implementations. + +## 0.1.42 + +* Fix an issue where `SourceNode`s from different versions of the source-map + library couldn't be used in conjunction with each other. See issue #142. + +## 0.1.41 + +* Fix a bug with getting the source content of relative sources with a "./" + prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768). + +* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the + column span of each mapping. + +* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find + all generated positions associated with a given original source and line. + +## 0.1.40 + +* Performance improvements for parsing source maps in SourceMapConsumer. + +## 0.1.39 + +* Fix a bug where setting a source's contents to null before any source content + had been set before threw a TypeError. See issue #131. + +## 0.1.38 + +* Fix a bug where finding relative paths from an empty path were creating + absolute paths. See issue #129. + +## 0.1.37 + +* Fix a bug where if the source root was an empty string, relative source paths + would turn into absolute source paths. Issue #124. + +## 0.1.36 + +* Allow the `names` mapping property to be an empty string. Issue #121. + +## 0.1.35 + +* A third optional parameter was added to `SourceNode.fromStringWithSourceMap` + to specify a path that relative sources in the second parameter should be + relative to. Issue #105. + +* If no file property is given to a `SourceMapGenerator`, then the resulting + source map will no longer have a `null` file property. The property will + simply not exist. Issue #104. + +* Fixed a bug where consecutive newlines were ignored in `SourceNode`s. + Issue #116. + +## 0.1.34 + +* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103. + +* Fix bug involving source contents and the + `SourceMapGenerator.prototype.applySourceMap`. Issue #100. + +## 0.1.33 + +* Fix some edge cases surrounding path joining and URL resolution. + +* Add a third parameter for relative path to + `SourceMapGenerator.prototype.applySourceMap`. + +* Fix issues with mappings and EOLs. + +## 0.1.32 + +* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns + (issue 92). + +* Fixed test runner to actually report number of failed tests as its process + exit code. + +* Fixed a typo when reporting bad mappings (issue 87). + +## 0.1.31 + +* Delay parsing the mappings in SourceMapConsumer until queried for a source + location. + +* Support Sass source maps (which at the time of writing deviate from the spec + in small ways) in SourceMapConsumer. + +## 0.1.30 + +* Do not join source root with a source, when the source is a data URI. + +* Extend the test runner to allow running single specific test files at a time. + +* Performance improvements in `SourceNode.prototype.walk` and + `SourceMapConsumer.prototype.eachMapping`. + +* Source map browser builds will now work inside Workers. + +* Better error messages when attempting to add an invalid mapping to a + `SourceMapGenerator`. + +## 0.1.29 + +* Allow duplicate entries in the `names` and `sources` arrays of source maps + (usually from TypeScript) we are parsing. Fixes github issue 72. + +## 0.1.28 + +* Skip duplicate mappings when creating source maps from SourceNode; github + issue 75. + +## 0.1.27 + +* Don't throw an error when the `file` property is missing in SourceMapConsumer, + we don't use it anyway. + +## 0.1.26 + +* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70. + +## 0.1.25 + +* Make compatible with browserify + +## 0.1.24 + +* Fix issue with absolute paths and `file://` URIs. See + https://bugzilla.mozilla.org/show_bug.cgi?id=885597 + +## 0.1.23 + +* Fix issue with absolute paths and sourcesContent, github issue 64. + +## 0.1.22 + +* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21. + +## 0.1.21 + +* Fixed handling of sources that start with a slash so that they are relative to + the source root's host. + +## 0.1.20 + +* Fixed github issue #43: absolute URLs aren't joined with the source root + anymore. + +## 0.1.19 + +* Using Travis CI to run tests. + +## 0.1.18 + +* Fixed a bug in the handling of sourceRoot. + +## 0.1.17 + +* Added SourceNode.fromStringWithSourceMap. + +## 0.1.16 + +* Added missing documentation. + +* Fixed the generating of empty mappings in SourceNode. + +## 0.1.15 + +* Added SourceMapGenerator.applySourceMap. + +## 0.1.14 + +* The sourceRoot is now handled consistently. + +## 0.1.13 + +* Added SourceMapGenerator.fromSourceMap. + +## 0.1.12 + +* SourceNode now generates empty mappings too. + +## 0.1.11 + +* Added name support to SourceNode. + +## 0.1.10 + +* Added sourcesContent support to the customer and generator. diff --git a/node_modules/source-map/LICENSE b/node_modules/source-map/LICENSE new file mode 100644 index 0000000..ed1b7cf --- /dev/null +++ b/node_modules/source-map/LICENSE @@ -0,0 +1,28 @@ + +Copyright (c) 2009-2011, Mozilla Foundation and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the names of the Mozilla Foundation nor the names of project + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/source-map/README.md b/node_modules/source-map/README.md new file mode 100644 index 0000000..fea4beb --- /dev/null +++ b/node_modules/source-map/README.md @@ -0,0 +1,742 @@ +# Source Map + +[![Build Status](https://travis-ci.org/mozilla/source-map.png?branch=master)](https://travis-ci.org/mozilla/source-map) + +[![NPM](https://nodei.co/npm/source-map.png?downloads=true&downloadRank=true)](https://www.npmjs.com/package/source-map) + +This is a library to generate and consume the source map format +[described here][format]. + +[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit + +## Use with Node + + $ npm install source-map + +## Use on the Web + + + +-------------------------------------------------------------------------------- + + + + + +## Table of Contents + +- [Examples](#examples) + - [Consuming a source map](#consuming-a-source-map) + - [Generating a source map](#generating-a-source-map) + - [With SourceNode (high level API)](#with-sourcenode-high-level-api) + - [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api) +- [API](#api) + - [SourceMapConsumer](#sourcemapconsumer) + - [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap) + - [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans) + - [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition) + - [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition) + - [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition) + - [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources) + - [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing) + - [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order) + - [SourceMapGenerator](#sourcemapgenerator) + - [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap) + - [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer) + - [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping) + - [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath) + - [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring) + - [SourceNode](#sourcenode) + - [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name) + - [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath) + - [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk) + - [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk) + - [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn) + - [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn) + - [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep) + - [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement) + - [SourceNode.prototype.toString()](#sourcenodeprototypetostring) + - [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap) + + + +## Examples + +### Consuming a source map + +```js +var rawSourceMap = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['one.js', 'two.js'], + sourceRoot: 'http://example.com/www/js/', + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' +}; + +var smc = new SourceMapConsumer(rawSourceMap); + +console.log(smc.sources); +// [ 'http://example.com/www/js/one.js', +// 'http://example.com/www/js/two.js' ] + +console.log(smc.originalPositionFor({ + line: 2, + column: 28 +})); +// { source: 'http://example.com/www/js/two.js', +// line: 2, +// column: 10, +// name: 'n' } + +console.log(smc.generatedPositionFor({ + source: 'http://example.com/www/js/two.js', + line: 2, + column: 10 +})); +// { line: 2, column: 28 } + +smc.eachMapping(function (m) { + // ... +}); +``` + +### Generating a source map + +In depth guide: +[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/) + +#### With SourceNode (high level API) + +```js +function compile(ast) { + switch (ast.type) { + case 'BinaryExpression': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + [compile(ast.left), " + ", compile(ast.right)] + ); + case 'Literal': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + String(ast.value) + ); + // ... + default: + throw new Error("Bad AST"); + } +} + +var ast = parse("40 + 2", "add.js"); +console.log(compile(ast).toStringWithSourceMap({ + file: 'add.js' +})); +// { code: '40 + 2', +// map: [object SourceMapGenerator] } +``` + +#### With SourceMapGenerator (low level API) + +```js +var map = new SourceMapGenerator({ + file: "source-mapped.js" +}); + +map.addMapping({ + generated: { + line: 10, + column: 35 + }, + source: "foo.js", + original: { + line: 33, + column: 2 + }, + name: "christopher" +}); + +console.log(map.toString()); +// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}' +``` + +## API + +Get a reference to the module: + +```js +// Node.js +var sourceMap = require('source-map'); + +// Browser builds +var sourceMap = window.sourceMap; + +// Inside Firefox +const sourceMap = require("devtools/toolkit/sourcemap/source-map.js"); +``` + +### SourceMapConsumer + +A SourceMapConsumer instance represents a parsed source map which we can query +for information about the original file positions by giving it a file position +in the generated source. + +#### new SourceMapConsumer(rawSourceMap) + +The only parameter is the raw source map (either as a string which can be +`JSON.parse`'d, or an object). According to the spec, source maps have the +following attributes: + +* `version`: Which version of the source map spec this map is following. + +* `sources`: An array of URLs to the original source files. + +* `names`: An array of identifiers which can be referenced by individual + mappings. + +* `sourceRoot`: Optional. The URL root from which all sources are relative. + +* `sourcesContent`: Optional. An array of contents of the original source files. + +* `mappings`: A string of base64 VLQs which contain the actual mappings. + +* `file`: Optional. The generated filename this source map is associated with. + +```js +var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData); +``` + +#### SourceMapConsumer.prototype.computeColumnSpans() + +Compute the last column for each generated mapping. The last column is +inclusive. + +```js +// Before: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] + +consumer.computeColumnSpans(); + +// After: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1, +// lastColumn: 9 }, +// { line: 2, +// column: 10, +// lastColumn: 19 }, +// { line: 2, +// column: 20, +// lastColumn: Infinity } ] + +``` + +#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) + +Returns the original source, line, and column information for the generated +source's line and column positions provided. The only argument is an object with +the following properties: + +* `line`: The line number in the generated source. Line numbers in + this library are 1-based (note that the underlying source map + specification uses 0-based line numbers -- this library handles the + translation). + +* `column`: The column number in the generated source. Column numbers + in this library are 0-based. + +* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or + `SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest + element that is smaller than or greater than the one we are searching for, + respectively, if the exact element cannot be found. Defaults to + `SourceMapConsumer.GREATEST_LOWER_BOUND`. + +and an object is returned with the following properties: + +* `source`: The original source file, or null if this information is not + available. + +* `line`: The line number in the original source, or null if this information is + not available. The line number is 1-based. + +* `column`: The column number in the original source, or null if this + information is not available. The column number is 0-based. + +* `name`: The original identifier, or null if this information is not available. + +```js +consumer.originalPositionFor({ line: 2, column: 10 }) +// { source: 'foo.coffee', +// line: 2, +// column: 2, +// name: null } + +consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 }) +// { source: null, +// line: null, +// column: null, +// name: null } +``` + +#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition) + +Returns the generated line and column information for the original source, +line, and column positions provided. The only argument is an object with +the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. The line number is + 1-based. + +* `column`: The column number in the original source. The column + number is 0-based. + +and an object is returned with the following properties: + +* `line`: The line number in the generated source, or null. The line + number is 1-based. + +* `column`: The column number in the generated source, or null. The + column number is 0-based. + +```js +consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 }) +// { line: 1, +// column: 56 } +``` + +#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition) + +Returns all generated line and column information for the original source, line, +and column provided. If no column is provided, returns all mappings +corresponding to a either the line we are searching for or the next closest line +that has any mappings. Otherwise, returns all mappings corresponding to the +given line and either the column we are searching for or the next closest column +that has any offsets. + +The only argument is an object with the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. The line number is + 1-based. + +* `column`: Optional. The column number in the original source. The + column number is 0-based. + +and an array of objects is returned, each with the following properties: + +* `line`: The line number in the generated source, or null. The line + number is 1-based. + +* `column`: The column number in the generated source, or null. The + column number is 0-based. + +```js +consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] +``` + +#### SourceMapConsumer.prototype.hasContentsOfAllSources() + +Return true if we have the embedded source content for every source listed in +the source map, false otherwise. + +In other words, if this method returns `true`, then +`consumer.sourceContentFor(s)` will succeed for every source `s` in +`consumer.sources`. + +```js +// ... +if (consumer.hasContentsOfAllSources()) { + consumerReadyCallback(consumer); +} else { + fetchSources(consumer, consumerReadyCallback); +} +// ... +``` + +#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing]) + +Returns the original source content for the source provided. The only +argument is the URL of the original source file. + +If the source content for the given source is not found, then an error is +thrown. Optionally, pass `true` as the second param to have `null` returned +instead. + +```js +consumer.sources +// [ "my-cool-lib.clj" ] + +consumer.sourceContentFor("my-cool-lib.clj") +// "..." + +consumer.sourceContentFor("this is not in the source map"); +// Error: "this is not in the source map" is not in the source map + +consumer.sourceContentFor("this is not in the source map", true); +// null +``` + +#### SourceMapConsumer.prototype.eachMapping(callback, context, order) + +Iterate over each mapping between an original source/line/column and a +generated line/column in this source map. + +* `callback`: The function that is called with each mapping. Mappings have the + form `{ source, generatedLine, generatedColumn, originalLine, originalColumn, + name }` + +* `context`: Optional. If specified, this object will be the value of `this` + every time that `callback` is called. + +* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or + `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over + the mappings sorted by the generated file's line/column order or the + original's source/line/column order, respectively. Defaults to + `SourceMapConsumer.GENERATED_ORDER`. + +```js +consumer.eachMapping(function (m) { console.log(m); }) +// ... +// { source: 'illmatic.js', +// generatedLine: 1, +// generatedColumn: 0, +// originalLine: 1, +// originalColumn: 0, +// name: null } +// { source: 'illmatic.js', +// generatedLine: 2, +// generatedColumn: 0, +// originalLine: 2, +// originalColumn: 0, +// name: null } +// ... +``` +### SourceMapGenerator + +An instance of the SourceMapGenerator represents a source map which is being +built incrementally. + +#### new SourceMapGenerator([startOfSourceMap]) + +You may pass an object with the following properties: + +* `file`: The filename of the generated source that this source map is + associated with. + +* `sourceRoot`: A root for all relative URLs in this source map. + +* `skipValidation`: Optional. When `true`, disables validation of mappings as + they are added. This can improve performance but should be used with + discretion, as a last resort. Even then, one should avoid using this flag when + running tests, if possible. + +```js +var generator = new sourceMap.SourceMapGenerator({ + file: "my-generated-javascript-file.js", + sourceRoot: "http://example.com/app/js/" +}); +``` + +#### SourceMapGenerator.fromSourceMap(sourceMapConsumer) + +Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance. + +* `sourceMapConsumer` The SourceMap. + +```js +var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer); +``` + +#### SourceMapGenerator.prototype.addMapping(mapping) + +Add a single mapping from original source line and column to the generated +source's line and column for this source map being created. The mapping object +should have the following properties: + +* `generated`: An object with the generated line and column positions. + +* `original`: An object with the original line and column positions. + +* `source`: The original source file (relative to the sourceRoot). + +* `name`: An optional original token name for this mapping. + +```js +generator.addMapping({ + source: "module-one.scm", + original: { line: 128, column: 0 }, + generated: { line: 3, column: 456 } +}) +``` + +#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for an original source file. + +* `sourceFile` the URL of the original source file. + +* `sourceContent` the content of the source file. + +```js +generator.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]]) + +Applies a SourceMap for a source file to the SourceMap. +Each mapping to the supplied source file is rewritten using the +supplied SourceMap. Note: The resolution for the resulting mappings +is the minimum of this map and the supplied map. + +* `sourceMapConsumer`: The SourceMap to be applied. + +* `sourceFile`: Optional. The filename of the source file. + If omitted, sourceMapConsumer.file will be used, if it exists. + Otherwise an error will be thrown. + +* `sourceMapPath`: Optional. The dirname of the path to the SourceMap + to be applied. If relative, it is relative to the SourceMap. + + This parameter is needed when the two SourceMaps aren't in the same + directory, and the SourceMap to be applied contains relative source + paths. If so, those relative source paths need to be rewritten + relative to the SourceMap. + + If omitted, it is assumed that both SourceMaps are in the same directory, + thus not needing any rewriting. (Supplying `'.'` has the same effect.) + +#### SourceMapGenerator.prototype.toString() + +Renders the source map being generated to a string. + +```js +generator.toString() +// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}' +``` + +### SourceNode + +SourceNodes provide a way to abstract over interpolating and/or concatenating +snippets of generated JavaScript source code, while maintaining the line and +column information associated between those snippets and the original source +code. This is useful as the final intermediate representation a compiler might +use before outputting the generated JS and source map. + +#### new SourceNode([line, column, source[, chunk[, name]]]) + +* `line`: The original line number associated with this source node, or null if + it isn't associated with an original line. The line number is 1-based. + +* `column`: The original column number associated with this source node, or null + if it isn't associated with an original column. The column number + is 0-based. + +* `source`: The original source's filename; null if no filename is provided. + +* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see + below. + +* `name`: Optional. The original identifier. + +```js +var node = new SourceNode(1, 2, "a.cpp", [ + new SourceNode(3, 4, "b.cpp", "extern int status;\n"), + new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"), + new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"), +]); +``` + +#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath]) + +Creates a SourceNode from generated code and a SourceMapConsumer. + +* `code`: The generated code + +* `sourceMapConsumer` The SourceMap for the generated code + +* `relativePath` The optional path that relative sources in `sourceMapConsumer` + should be relative to. + +```js +var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8")); +var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"), + consumer); +``` + +#### SourceNode.prototype.add(chunk) + +Add a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.add(" + "); +node.add(otherNode); +node.add([leftHandOperandNode, " + ", rightHandOperandNode]); +``` + +#### SourceNode.prototype.prepend(chunk) + +Prepend a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.prepend("/** Build Id: f783haef86324gf **/\n\n"); +``` + +#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for a source file. This will be added to the +`SourceMap` in the `sourcesContent` field. + +* `sourceFile`: The filename of the source file + +* `sourceContent`: The content of the source file + +```js +node.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceNode.prototype.walk(fn) + +Walk over the tree of JS snippets in this node and its children. The walking +function is called once for each snippet of JS and is passed that snippet and +the its original associated source's line/column location. + +* `fn`: The traversal function. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.walk(function (code, loc) { console.log("WALK:", code, loc); }) +// WALK: uno { source: 'b.js', line: 3, column: 4, name: null } +// WALK: dos { source: 'a.js', line: 1, column: 2, name: null } +// WALK: tres { source: 'a.js', line: 1, column: 2, name: null } +// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null } +``` + +#### SourceNode.prototype.walkSourceContents(fn) + +Walk over the tree of SourceNodes. The walking function is called for each +source file content and is passed the filename and source content. + +* `fn`: The traversal function. + +```js +var a = new SourceNode(1, 2, "a.js", "generated from a"); +a.setSourceContent("a.js", "original a"); +var b = new SourceNode(1, 2, "b.js", "generated from b"); +b.setSourceContent("b.js", "original b"); +var c = new SourceNode(1, 2, "c.js", "generated from c"); +c.setSourceContent("c.js", "original c"); + +var node = new SourceNode(null, null, null, [a, b, c]); +node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); }) +// WALK: a.js : original a +// WALK: b.js : original b +// WALK: c.js : original c +``` + +#### SourceNode.prototype.join(sep) + +Like `Array.prototype.join` except for SourceNodes. Inserts the separator +between each of this source node's children. + +* `sep`: The separator. + +```js +var lhs = new SourceNode(1, 2, "a.rs", "my_copy"); +var operand = new SourceNode(3, 4, "a.rs", "="); +var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()"); + +var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]); +var joinedNode = node.join(" "); +``` + +#### SourceNode.prototype.replaceRight(pattern, replacement) + +Call `String.prototype.replace` on the very right-most source snippet. Useful +for trimming white space from the end of a source node, etc. + +* `pattern`: The pattern to replace. + +* `replacement`: The thing to replace the pattern with. + +```js +// Trim trailing white space. +node.replaceRight(/\s*$/, ""); +``` + +#### SourceNode.prototype.toString() + +Return the string representation of this source node. Walks over the tree and +concatenates all the various snippets together to one string. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toString() +// 'unodostresquatro' +``` + +#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap]) + +Returns the string representation of this tree of source nodes, plus a +SourceMapGenerator which contains all the mappings between the generated and +original sources. + +The arguments are the same as those to `new SourceMapGenerator`. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toStringWithSourceMap({ file: "my-output-file.js" }) +// { code: 'unodostresquatro', +// map: [object SourceMapGenerator] } +``` diff --git a/node_modules/source-map/dist/source-map.debug.js b/node_modules/source-map/dist/source-map.debug.js new file mode 100644 index 0000000..aad0620 --- /dev/null +++ b/node_modules/source-map/dist/source-map.debug.js @@ -0,0 +1,3234 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["sourceMap"] = factory(); + else + root["sourceMap"] = factory(); +})(this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) +/******/ return installedModules[moduleId].exports; +/******/ +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ exports: {}, +/******/ id: moduleId, +/******/ loaded: false +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.loaded = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(0); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, exports, __webpack_require__) { + + /* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ + exports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + exports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer; + exports.SourceNode = __webpack_require__(10).SourceNode; + + +/***/ }), +/* 1 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var base64VLQ = __webpack_require__(2); + var util = __webpack_require__(4); + var ArraySet = __webpack_require__(5).ArraySet; + var MappingList = __webpack_require__(6).MappingList; + + /** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ + function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; + } + + SourceMapGenerator.prototype._version = 3; + + /** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ + SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var sourceRelative = sourceFile; + if (sourceRoot !== null) { + sourceRelative = util.relative(sourceRoot, sourceFile); + } + + if (!generator._sources.has(sourceRelative)) { + generator._sources.add(sourceRelative); + } + + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + + /** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ + SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + + /** + * Set the source content for a source file. + */ + SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + + /** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ + SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + + /** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ + SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + + /** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ + SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + + SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + + /** + * Externalize the source map. + */ + SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + + /** + * Render the source map being generated to a string. + */ + SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + + exports.SourceMapGenerator = SourceMapGenerator; + + +/***/ }), +/* 2 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + var base64 = __webpack_require__(3); + + // A single base 64 digit can contain 6 bits of data. For the base 64 variable + // length quantities we use in the source map spec, the first bit is the sign, + // the next four bits are the actual value, and the 6th bit is the + // continuation bit. The continuation bit tells us whether there are more + // digits in this value following this digit. + // + // Continuation + // | Sign + // | | + // V V + // 101011 + + var VLQ_BASE_SHIFT = 5; + + // binary: 100000 + var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + + // binary: 011111 + var VLQ_BASE_MASK = VLQ_BASE - 1; + + // binary: 100000 + var VLQ_CONTINUATION_BIT = VLQ_BASE; + + /** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ + function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; + } + + /** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ + function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; + } + + /** + * Returns the base 64 VLQ encoded value. + */ + exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; + }; + + /** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ + exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; + }; + + +/***/ }), +/* 3 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + + /** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ + exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); + }; + + /** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ + exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; + }; + + +/***/ }), +/* 4 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + /** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ + function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } + } + exports.getArg = getArg; + + var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; + var dataUrlRegexp = /^data:.+\,.+$/; + + function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; + } + exports.urlParse = urlParse; + + function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; + } + exports.urlGenerate = urlGenerate; + + /** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ + function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; + } + exports.normalize = normalize; + + /** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ + function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; + } + exports.join = join; + + exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || urlRegexp.test(aPath); + }; + + /** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ + function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); + } + exports.relative = relative; + + var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); + }()); + + function identity (s) { + return s; + } + + /** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ + function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; + } + exports.toSetString = supportsNullProto ? identity : toSetString; + + function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; + } + exports.fromSetString = supportsNullProto ? identity : fromSetString; + + function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; + } + + /** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ + function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByOriginalPositions = compareByOriginalPositions; + + /** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ + function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + + function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 === null) { + return 1; // aStr2 !== null + } + + if (aStr2 === null) { + return -1; // aStr1 !== null + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; + } + + /** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ + function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + + /** + * Strip any JSON XSSI avoidance prefix from the string (as documented + * in the source maps specification), and then parse the string as + * JSON. + */ + function parseSourceMapInput(str) { + return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, '')); + } + exports.parseSourceMapInput = parseSourceMapInput; + + /** + * Compute the URL of a source given the the source root, the source's + * URL, and the source map's URL. + */ + function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { + sourceURL = sourceURL || ''; + + if (sourceRoot) { + // This follows what Chrome does. + if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') { + sourceRoot += '/'; + } + // The spec says: + // Line 4: An optional source root, useful for relocating source + // files on a server or removing repeated values in the + // “sources” entry. This value is prepended to the individual + // entries in the “source” field. + sourceURL = sourceRoot + sourceURL; + } + + // Historically, SourceMapConsumer did not take the sourceMapURL as + // a parameter. This mode is still somewhat supported, which is why + // this code block is conditional. However, it's preferable to pass + // the source map URL to SourceMapConsumer, so that this function + // can implement the source URL resolution algorithm as outlined in + // the spec. This block is basically the equivalent of: + // new URL(sourceURL, sourceMapURL).toString() + // ... except it avoids using URL, which wasn't available in the + // older releases of node still supported by this library. + // + // The spec says: + // If the sources are not absolute URLs after prepending of the + // “sourceRoot”, the sources are resolved relative to the + // SourceMap (like resolving script src in a html document). + if (sourceMapURL) { + var parsed = urlParse(sourceMapURL); + if (!parsed) { + throw new Error("sourceMapURL could not be parsed"); + } + if (parsed.path) { + // Strip the last path component, but keep the "/". + var index = parsed.path.lastIndexOf('/'); + if (index >= 0) { + parsed.path = parsed.path.substring(0, index + 1); + } + } + sourceURL = join(urlGenerate(parsed), sourceURL); + } + + return normalize(sourceURL); + } + exports.computeSourceURL = computeSourceURL; + + +/***/ }), +/* 5 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var has = Object.prototype.hasOwnProperty; + var hasNativeMap = typeof Map !== "undefined"; + + /** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ + function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); + } + + /** + * Static method for creating ArraySet instances from an existing array. + */ + ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; + }; + + /** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ + ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; + }; + + /** + * Add the given string to this set. + * + * @param String aStr + */ + ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } + }; + + /** + * Is the given string a member of this set? + * + * @param String aStr + */ + ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } + }; + + /** + * What is the index of the given string in the array? + * + * @param String aStr + */ + ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); + }; + + /** + * What is the element at the given index? + * + * @param Number aIdx + */ + ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); + }; + + /** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ + ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); + }; + + exports.ArraySet = ArraySet; + + +/***/ }), +/* 6 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + + /** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ + function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; + } + + /** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ + function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + }; + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; + }; + + exports.MappingList = MappingList; + + +/***/ }), +/* 7 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var binarySearch = __webpack_require__(8); + var ArraySet = __webpack_require__(5).ArraySet; + var base64VLQ = __webpack_require__(2); + var quickSort = __webpack_require__(9).quickSort; + + function SourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) + : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); + } + + SourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); + } + + /** + * The version of the source mapping spec that we are consuming. + */ + SourceMapConsumer.prototype._version = 3; + + // `__generatedMappings` and `__originalMappings` are arrays that hold the + // parsed mapping coordinates from the source map's "mappings" attribute. They + // are lazily instantiated, accessed via the `_generatedMappings` and + // `_originalMappings` getters respectively, and we only parse the mappings + // and create these arrays once queried for a source location. We jump through + // these hoops because there can be many thousands of mappings, and parsing + // them is expensive, so we only want to do it if we must. + // + // Each object in the arrays is of the form: + // + // { + // generatedLine: The line number in the generated code, + // generatedColumn: The column number in the generated code, + // source: The path to the original source file that generated this + // chunk of code, + // originalLine: The line number in the original source that + // corresponds to this chunk of generated code, + // originalColumn: The column number in the original source that + // corresponds to this chunk of generated code, + // name: The name of the original symbol which generated this chunk of + // code. + // } + // + // All properties except for `generatedLine` and `generatedColumn` can be + // `null`. + // + // `_generatedMappings` is ordered by the generated positions. + // + // `_originalMappings` is ordered by the original positions. + + SourceMapConsumer.prototype.__generatedMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } + }); + + SourceMapConsumer.prototype.__originalMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } + }); + + SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + + SourceMapConsumer.GENERATED_ORDER = 1; + SourceMapConsumer.ORIGINAL_ORDER = 2; + + SourceMapConsumer.GREATEST_LOWER_BOUND = 1; + SourceMapConsumer.LEAST_UPPER_BOUND = 2; + + /** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ + SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL); + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + + /** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number is 1-based. + * - column: Optional. the column number in the original source. + * The column number is 0-based. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + needle.source = this._findSourceIndex(needle.source); + if (needle.source < 0) { + return []; + } + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + + exports.SourceMapConsumer = SourceMapConsumer; + + /** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The first parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ + function BasicSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + if (sourceRoot) { + sourceRoot = util.normalize(sourceRoot); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this._absoluteSources = this._sources.toArray().map(function (s) { + return util.computeSourceURL(sourceRoot, s, aSourceMapURL); + }); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this._sourceMapURL = aSourceMapURL; + this.file = file; + } + + BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + + /** + * Utility function to find the index of a source. Returns -1 if not + * found. + */ + BasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) { + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + if (this._sources.has(relativeSource)) { + return this._sources.indexOf(relativeSource); + } + + // Maybe aSource is an absolute URL as returned by |sources|. In + // this case we can't simply undo the transform. + var i; + for (i = 0; i < this._absoluteSources.length; ++i) { + if (this._absoluteSources[i] == aSource) { + return i; + } + } + + return -1; + }; + + /** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @param String aSourceMapURL + * The URL at which the source map can be found (optional) + * @returns BasicSourceMapConsumer + */ + BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + smc._sourceMapURL = aSourceMapURL; + smc._absoluteSources = smc._sources.toArray().map(function (s) { + return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL); + }); + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + + /** + * The version of the source mapping spec that we are consuming. + */ + BasicSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._absoluteSources.slice(); + } + }); + + /** + * Provide the JIT with a nice shape / hidden class. + */ + function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + + /** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ + BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ + BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + var index = this._findSourceIndex(aSource); + if (index >= 0) { + return this.sourcesContent[index]; + } + + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + relativeSource)) { + return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + relativeSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + source = this._findSourceIndex(source); + if (source < 0) { + return { + line: null, + column: null, + lastColumn: null + }; + } + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + + exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + + /** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The first parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ + function IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL) + } + }); + } + + IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + + /** + * The version of the source mapping spec that we are consuming. + */ + IndexedSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } + }); + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ + IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL); + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = null; + if (mapping.name) { + name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + } + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + + exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + + +/***/ }), +/* 8 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + exports.GREATEST_LOWER_BOUND = 1; + exports.LEAST_UPPER_BOUND = 2; + + /** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ + function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } + } + + /** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ + exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; + }; + + +/***/ }), +/* 9 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + // It turns out that some (most?) JavaScript engines don't self-host + // `Array.prototype.sort`. This makes sense because C++ will likely remain + // faster than JS when doing raw CPU-intensive sorting. However, when using a + // custom comparator function, calling back and forth between the VM's C++ and + // JIT'd JS is rather slow *and* loses JIT type information, resulting in + // worse generated code for the comparator function than would be optimal. In + // fact, when sorting with a comparator, these costs outweigh the benefits of + // sorting in C++. By using our own JS-implemented Quick Sort (below), we get + // a ~3500ms mean speed-up in `bench/bench.html`. + + /** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ + function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; + } + + /** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ + function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); + } + + /** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ + function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } + } + + /** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); + }; + + +/***/ }), +/* 10 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + var util = __webpack_require__(4); + + // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other + // operating systems these days (capturing the result). + var REGEX_NEWLINE = /(\r?\n)/; + + // Newline character code for charCodeAt() comparisons + var NEWLINE_CODE = 10; + + // Private symbol for identifying `SourceNode`s when multiple versions of + // the source-map library are loaded. This MUST NOT CHANGE across + // versions! + var isSourceNode = "$$$isSourceNode$$$"; + + /** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ + function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); + } + + /** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ + SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex] || ''; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex] || ''; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + + /** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } + }; + + /** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ + SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; + }; + + /** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ + SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; + }; + + /** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ + SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + + /** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + + /** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ + SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; + }; + + /** + * Returns the string representation of this source node along with a source + * map. + */ + SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; + }; + + exports.SourceNode = SourceNode; + + +/***/ }) +/******/ ]) +}); +; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"sources":["webpack:///webpack/universalModuleDefinition","webpack:///webpack/bootstrap 1624c7299b887f7bdf64","webpack:///./source-map.js","webpack:///./lib/source-map-generator.js","webpack:///./lib/base64-vlq.js","webpack:///./lib/base64.js","webpack:///./lib/util.js","webpack:///./lib/array-set.js","webpack:///./lib/mapping-list.js","webpack:///./lib/source-map-consumer.js","webpack:///./lib/binary-search.js","webpack:///./lib/quick-sort.js","webpack:///./lib/source-node.js"],"names":[],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD,O;ACVA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,uBAAe;AACf;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACPA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA,MAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,2CAA0C,SAAS;AACnD;AACA;;AAEA;AACA;AACA;AACA,qBAAoB;AACpB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;ACxaA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4DAA2D;AAC3D,qBAAoB;AACpB;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;;AAEH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,IAAG;;AAEH;AACA;AACA;;;;;;;AC3IA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,iBAAgB;AAChB,iBAAgB;;AAEhB,oBAAmB;AACnB,qBAAoB;;AAEpB,iBAAgB;AAChB,iBAAgB;;AAEhB,iBAAgB;AAChB,kBAAiB;;AAEjB;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;;;;;;AClEA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,+CAA8C,QAAQ;AACtD;AACA;AACA;AACA,MAAK;AACL;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,EAAC;;AAED;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,4BAA2B,QAAQ;AACnC;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,cAAa;AACb;;AAEA;AACA,eAAc;AACd;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,uCAAsC;AACtC;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;;;;;;ACveA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,uCAAsC,SAAS;AAC/C;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;ACxHA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAgB;AAChB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;AC9EA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA,oBAAmB;AACnB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;;AAEX;AACA;AACA,QAAO;AACP;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;;AAEX;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4BAA2B,MAAM;AACjC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;;AAEL;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,IAAG;;AAEH;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,cAAa,kCAAkC;AAC/C;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;;AAEL;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA,uDAAsD,YAAY;AAClE;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,oCAAmC;AACnC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,0BAAyB,cAAc;AACvC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,wBAAuB,wCAAwC;AAC/D;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gDAA+C,mBAAmB,EAAE;AACpE;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,kBAAiB,oBAAoB;AACrC;AACA;AACA;AACA;AACA;AACA,8BAA6B,MAAM;AACnC;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA,IAAG;AACH;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C,sBAAqB,+CAA+C;AACpE;AACA;AACA;AACA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,QAAO;AACP;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;AACA;AACA,sBAAqB,4BAA4B;AACjD;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;;;;;;ACxnCA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;;;;;;AC9GA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,SAAS;AACpB;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,OAAO;AAC1B;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,SAAS;AACpB;AACA;AACA;AACA;AACA;;;;;;;ACjHA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;;AAEL;;AAEA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,kCAAiC,QAAQ;AACzC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,8CAA6C,SAAS;AACtD;AACA;AACA;AACA;AACA;AACA;AACA,qBAAoB;AACpB;AACA;AACA,uCAAsC;AACtC;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gBAAe,WAAW;AAC1B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gDAA+C,SAAS;AACxD;AACA;AACA;AACA;;AAEA;AACA,0CAAyC,SAAS;AAClD;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;AACX;AACA;AACA;AACA,YAAW;AACX;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA,6CAA4C,cAAc;AAC1D;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA,cAAa;AACb;AACA;AACA;AACA,cAAa;AACb;AACA,YAAW;AACX;AACA,QAAO;AACP;AACA;AACA;AACA,IAAG;AACH;AACA;AACA,IAAG;;AAEH,WAAU;AACV;;AAEA","file":"source-map.debug.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"sourceMap\"] = factory();\n\telse\n\t\troot[\"sourceMap\"] = factory();\n})(this, function() {\nreturn \n\n\n// WEBPACK FOOTER //\n// webpack/universalModuleDefinition"," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId])\n \t\t\treturn installedModules[moduleId].exports;\n\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\texports: {},\n \t\t\tid: moduleId,\n \t\t\tloaded: false\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.loaded = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(0);\n\n\n\n// WEBPACK FOOTER //\n// webpack/bootstrap 1624c7299b887f7bdf64","/*\n * Copyright 2009-2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE.txt or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\nexports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator;\nexports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer;\nexports.SourceNode = require('./lib/source-node').SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./source-map.js\n// module id = 0\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar base64VLQ = require('./base64-vlq');\nvar util = require('./util');\nvar ArraySet = require('./array-set').ArraySet;\nvar MappingList = require('./mapping-list').MappingList;\n\n/**\n * An instance of the SourceMapGenerator represents a source map which is\n * being built incrementally. You may pass an object with the following\n * properties:\n *\n *   - file: The filename of the generated source.\n *   - sourceRoot: A root for all relative URLs in this source map.\n */\nfunction SourceMapGenerator(aArgs) {\n  if (!aArgs) {\n    aArgs = {};\n  }\n  this._file = util.getArg(aArgs, 'file', null);\n  this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);\n  this._skipValidation = util.getArg(aArgs, 'skipValidation', false);\n  this._sources = new ArraySet();\n  this._names = new ArraySet();\n  this._mappings = new MappingList();\n  this._sourcesContents = null;\n}\n\nSourceMapGenerator.prototype._version = 3;\n\n/**\n * Creates a new SourceMapGenerator based on a SourceMapConsumer\n *\n * @param aSourceMapConsumer The SourceMap.\n */\nSourceMapGenerator.fromSourceMap =\n  function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {\n    var sourceRoot = aSourceMapConsumer.sourceRoot;\n    var generator = new SourceMapGenerator({\n      file: aSourceMapConsumer.file,\n      sourceRoot: sourceRoot\n    });\n    aSourceMapConsumer.eachMapping(function (mapping) {\n      var newMapping = {\n        generated: {\n          line: mapping.generatedLine,\n          column: mapping.generatedColumn\n        }\n      };\n\n      if (mapping.source != null) {\n        newMapping.source = mapping.source;\n        if (sourceRoot != null) {\n          newMapping.source = util.relative(sourceRoot, newMapping.source);\n        }\n\n        newMapping.original = {\n          line: mapping.originalLine,\n          column: mapping.originalColumn\n        };\n\n        if (mapping.name != null) {\n          newMapping.name = mapping.name;\n        }\n      }\n\n      generator.addMapping(newMapping);\n    });\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var sourceRelative = sourceFile;\n      if (sourceRoot !== null) {\n        sourceRelative = util.relative(sourceRoot, sourceFile);\n      }\n\n      if (!generator._sources.has(sourceRelative)) {\n        generator._sources.add(sourceRelative);\n      }\n\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        generator.setSourceContent(sourceFile, content);\n      }\n    });\n    return generator;\n  };\n\n/**\n * Add a single mapping from original source line and column to the generated\n * source's line and column for this source map being created. The mapping\n * object should have the following properties:\n *\n *   - generated: An object with the generated line and column positions.\n *   - original: An object with the original line and column positions.\n *   - source: The original source file (relative to the sourceRoot).\n *   - name: An optional original token name for this mapping.\n */\nSourceMapGenerator.prototype.addMapping =\n  function SourceMapGenerator_addMapping(aArgs) {\n    var generated = util.getArg(aArgs, 'generated');\n    var original = util.getArg(aArgs, 'original', null);\n    var source = util.getArg(aArgs, 'source', null);\n    var name = util.getArg(aArgs, 'name', null);\n\n    if (!this._skipValidation) {\n      this._validateMapping(generated, original, source, name);\n    }\n\n    if (source != null) {\n      source = String(source);\n      if (!this._sources.has(source)) {\n        this._sources.add(source);\n      }\n    }\n\n    if (name != null) {\n      name = String(name);\n      if (!this._names.has(name)) {\n        this._names.add(name);\n      }\n    }\n\n    this._mappings.add({\n      generatedLine: generated.line,\n      generatedColumn: generated.column,\n      originalLine: original != null && original.line,\n      originalColumn: original != null && original.column,\n      source: source,\n      name: name\n    });\n  };\n\n/**\n * Set the source content for a source file.\n */\nSourceMapGenerator.prototype.setSourceContent =\n  function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {\n    var source = aSourceFile;\n    if (this._sourceRoot != null) {\n      source = util.relative(this._sourceRoot, source);\n    }\n\n    if (aSourceContent != null) {\n      // Add the source content to the _sourcesContents map.\n      // Create a new _sourcesContents map if the property is null.\n      if (!this._sourcesContents) {\n        this._sourcesContents = Object.create(null);\n      }\n      this._sourcesContents[util.toSetString(source)] = aSourceContent;\n    } else if (this._sourcesContents) {\n      // Remove the source file from the _sourcesContents map.\n      // If the _sourcesContents map is empty, set the property to null.\n      delete this._sourcesContents[util.toSetString(source)];\n      if (Object.keys(this._sourcesContents).length === 0) {\n        this._sourcesContents = null;\n      }\n    }\n  };\n\n/**\n * Applies the mappings of a sub-source-map for a specific source file to the\n * source map being generated. Each mapping to the supplied source file is\n * rewritten using the supplied source map. Note: The resolution for the\n * resulting mappings is the minimium of this map and the supplied map.\n *\n * @param aSourceMapConsumer The source map to be applied.\n * @param aSourceFile Optional. The filename of the source file.\n *        If omitted, SourceMapConsumer's file property will be used.\n * @param aSourceMapPath Optional. The dirname of the path to the source map\n *        to be applied. If relative, it is relative to the SourceMapConsumer.\n *        This parameter is needed when the two source maps aren't in the same\n *        directory, and the source map to be applied contains relative source\n *        paths. If so, those relative source paths need to be rewritten\n *        relative to the SourceMapGenerator.\n */\nSourceMapGenerator.prototype.applySourceMap =\n  function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {\n    var sourceFile = aSourceFile;\n    // If aSourceFile is omitted, we will use the file property of the SourceMap\n    if (aSourceFile == null) {\n      if (aSourceMapConsumer.file == null) {\n        throw new Error(\n          'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +\n          'or the source map\\'s \"file\" property. Both were omitted.'\n        );\n      }\n      sourceFile = aSourceMapConsumer.file;\n    }\n    var sourceRoot = this._sourceRoot;\n    // Make \"sourceFile\" relative if an absolute Url is passed.\n    if (sourceRoot != null) {\n      sourceFile = util.relative(sourceRoot, sourceFile);\n    }\n    // Applying the SourceMap can add and remove items from the sources and\n    // the names array.\n    var newSources = new ArraySet();\n    var newNames = new ArraySet();\n\n    // Find mappings for the \"sourceFile\"\n    this._mappings.unsortedForEach(function (mapping) {\n      if (mapping.source === sourceFile && mapping.originalLine != null) {\n        // Check if it can be mapped by the source map, then update the mapping.\n        var original = aSourceMapConsumer.originalPositionFor({\n          line: mapping.originalLine,\n          column: mapping.originalColumn\n        });\n        if (original.source != null) {\n          // Copy mapping\n          mapping.source = original.source;\n          if (aSourceMapPath != null) {\n            mapping.source = util.join(aSourceMapPath, mapping.source)\n          }\n          if (sourceRoot != null) {\n            mapping.source = util.relative(sourceRoot, mapping.source);\n          }\n          mapping.originalLine = original.line;\n          mapping.originalColumn = original.column;\n          if (original.name != null) {\n            mapping.name = original.name;\n          }\n        }\n      }\n\n      var source = mapping.source;\n      if (source != null && !newSources.has(source)) {\n        newSources.add(source);\n      }\n\n      var name = mapping.name;\n      if (name != null && !newNames.has(name)) {\n        newNames.add(name);\n      }\n\n    }, this);\n    this._sources = newSources;\n    this._names = newNames;\n\n    // Copy sourcesContents of applied map.\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        if (aSourceMapPath != null) {\n          sourceFile = util.join(aSourceMapPath, sourceFile);\n        }\n        if (sourceRoot != null) {\n          sourceFile = util.relative(sourceRoot, sourceFile);\n        }\n        this.setSourceContent(sourceFile, content);\n      }\n    }, this);\n  };\n\n/**\n * A mapping can have one of the three levels of data:\n *\n *   1. Just the generated position.\n *   2. The Generated position, original position, and original source.\n *   3. Generated and original position, original source, as well as a name\n *      token.\n *\n * To maintain consistency, we validate that any new mapping being added falls\n * in to one of these categories.\n */\nSourceMapGenerator.prototype._validateMapping =\n  function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,\n                                              aName) {\n    // When aOriginal is truthy but has empty values for .line and .column,\n    // it is most likely a programmer error. In this case we throw a very\n    // specific error message to try to guide them the right way.\n    // For example: https://github.com/Polymer/polymer-bundler/pull/519\n    if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') {\n        throw new Error(\n            'original.line and original.column are not numbers -- you probably meant to omit ' +\n            'the original mapping entirely and only map the generated position. If so, pass ' +\n            'null for the original mapping instead of an object with empty or null values.'\n        );\n    }\n\n    if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n        && aGenerated.line > 0 && aGenerated.column >= 0\n        && !aOriginal && !aSource && !aName) {\n      // Case 1.\n      return;\n    }\n    else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n             && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n             && aGenerated.line > 0 && aGenerated.column >= 0\n             && aOriginal.line > 0 && aOriginal.column >= 0\n             && aSource) {\n      // Cases 2 and 3.\n      return;\n    }\n    else {\n      throw new Error('Invalid mapping: ' + JSON.stringify({\n        generated: aGenerated,\n        source: aSource,\n        original: aOriginal,\n        name: aName\n      }));\n    }\n  };\n\n/**\n * Serialize the accumulated mappings in to the stream of base 64 VLQs\n * specified by the source map format.\n */\nSourceMapGenerator.prototype._serializeMappings =\n  function SourceMapGenerator_serializeMappings() {\n    var previousGeneratedColumn = 0;\n    var previousGeneratedLine = 1;\n    var previousOriginalColumn = 0;\n    var previousOriginalLine = 0;\n    var previousName = 0;\n    var previousSource = 0;\n    var result = '';\n    var next;\n    var mapping;\n    var nameIdx;\n    var sourceIdx;\n\n    var mappings = this._mappings.toArray();\n    for (var i = 0, len = mappings.length; i < len; i++) {\n      mapping = mappings[i];\n      next = ''\n\n      if (mapping.generatedLine !== previousGeneratedLine) {\n        previousGeneratedColumn = 0;\n        while (mapping.generatedLine !== previousGeneratedLine) {\n          next += ';';\n          previousGeneratedLine++;\n        }\n      }\n      else {\n        if (i > 0) {\n          if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n            continue;\n          }\n          next += ',';\n        }\n      }\n\n      next += base64VLQ.encode(mapping.generatedColumn\n                                 - previousGeneratedColumn);\n      previousGeneratedColumn = mapping.generatedColumn;\n\n      if (mapping.source != null) {\n        sourceIdx = this._sources.indexOf(mapping.source);\n        next += base64VLQ.encode(sourceIdx - previousSource);\n        previousSource = sourceIdx;\n\n        // lines are stored 0-based in SourceMap spec version 3\n        next += base64VLQ.encode(mapping.originalLine - 1\n                                   - previousOriginalLine);\n        previousOriginalLine = mapping.originalLine - 1;\n\n        next += base64VLQ.encode(mapping.originalColumn\n                                   - previousOriginalColumn);\n        previousOriginalColumn = mapping.originalColumn;\n\n        if (mapping.name != null) {\n          nameIdx = this._names.indexOf(mapping.name);\n          next += base64VLQ.encode(nameIdx - previousName);\n          previousName = nameIdx;\n        }\n      }\n\n      result += next;\n    }\n\n    return result;\n  };\n\nSourceMapGenerator.prototype._generateSourcesContent =\n  function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n    return aSources.map(function (source) {\n      if (!this._sourcesContents) {\n        return null;\n      }\n      if (aSourceRoot != null) {\n        source = util.relative(aSourceRoot, source);\n      }\n      var key = util.toSetString(source);\n      return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n        ? this._sourcesContents[key]\n        : null;\n    }, this);\n  };\n\n/**\n * Externalize the source map.\n */\nSourceMapGenerator.prototype.toJSON =\n  function SourceMapGenerator_toJSON() {\n    var map = {\n      version: this._version,\n      sources: this._sources.toArray(),\n      names: this._names.toArray(),\n      mappings: this._serializeMappings()\n    };\n    if (this._file != null) {\n      map.file = this._file;\n    }\n    if (this._sourceRoot != null) {\n      map.sourceRoot = this._sourceRoot;\n    }\n    if (this._sourcesContents) {\n      map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n    }\n\n    return map;\n  };\n\n/**\n * Render the source map being generated to a string.\n */\nSourceMapGenerator.prototype.toString =\n  function SourceMapGenerator_toString() {\n    return JSON.stringify(this.toJSON());\n  };\n\nexports.SourceMapGenerator = SourceMapGenerator;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-generator.js\n// module id = 1\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n *\n * Based on the Base 64 VLQ implementation in Closure Compiler:\n * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n *\n * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n *  * Redistributions of source code must retain the above copyright\n *    notice, this list of conditions and the following disclaimer.\n *  * Redistributions in binary form must reproduce the above\n *    copyright notice, this list of conditions and the following\n *    disclaimer in the documentation and/or other materials provided\n *    with the distribution.\n *  * Neither the name of Google Inc. nor the names of its\n *    contributors may be used to endorse or promote products derived\n *    from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\nvar base64 = require('./base64');\n\n// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n// length quantities we use in the source map spec, the first bit is the sign,\n// the next four bits are the actual value, and the 6th bit is the\n// continuation bit. The continuation bit tells us whether there are more\n// digits in this value following this digit.\n//\n//   Continuation\n//   |    Sign\n//   |    |\n//   V    V\n//   101011\n\nvar VLQ_BASE_SHIFT = 5;\n\n// binary: 100000\nvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\n// binary: 011111\nvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\n// binary: 100000\nvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\n/**\n * Converts from a two-complement value to a value where the sign bit is\n * placed in the least significant bit.  For example, as decimals:\n *   1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n *   2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n */\nfunction toVLQSigned(aValue) {\n  return aValue < 0\n    ? ((-aValue) << 1) + 1\n    : (aValue << 1) + 0;\n}\n\n/**\n * Converts to a two-complement value from a value where the sign bit is\n * placed in the least significant bit.  For example, as decimals:\n *   2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n *   4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n */\nfunction fromVLQSigned(aValue) {\n  var isNegative = (aValue & 1) === 1;\n  var shifted = aValue >> 1;\n  return isNegative\n    ? -shifted\n    : shifted;\n}\n\n/**\n * Returns the base 64 VLQ encoded value.\n */\nexports.encode = function base64VLQ_encode(aValue) {\n  var encoded = \"\";\n  var digit;\n\n  var vlq = toVLQSigned(aValue);\n\n  do {\n    digit = vlq & VLQ_BASE_MASK;\n    vlq >>>= VLQ_BASE_SHIFT;\n    if (vlq > 0) {\n      // There are still more digits in this value, so we must make sure the\n      // continuation bit is marked.\n      digit |= VLQ_CONTINUATION_BIT;\n    }\n    encoded += base64.encode(digit);\n  } while (vlq > 0);\n\n  return encoded;\n};\n\n/**\n * Decodes the next base 64 VLQ value from the given string and returns the\n * value and the rest of the string via the out parameter.\n */\nexports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n  var strLen = aStr.length;\n  var result = 0;\n  var shift = 0;\n  var continuation, digit;\n\n  do {\n    if (aIndex >= strLen) {\n      throw new Error(\"Expected more digits in base 64 VLQ value.\");\n    }\n\n    digit = base64.decode(aStr.charCodeAt(aIndex++));\n    if (digit === -1) {\n      throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n    }\n\n    continuation = !!(digit & VLQ_CONTINUATION_BIT);\n    digit &= VLQ_BASE_MASK;\n    result = result + (digit << shift);\n    shift += VLQ_BASE_SHIFT;\n  } while (continuation);\n\n  aOutParam.value = fromVLQSigned(result);\n  aOutParam.rest = aIndex;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64-vlq.js\n// module id = 2\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\n/**\n * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n */\nexports.encode = function (number) {\n  if (0 <= number && number < intToCharMap.length) {\n    return intToCharMap[number];\n  }\n  throw new TypeError(\"Must be between 0 and 63: \" + number);\n};\n\n/**\n * Decode a single base 64 character code digit to an integer. Returns -1 on\n * failure.\n */\nexports.decode = function (charCode) {\n  var bigA = 65;     // 'A'\n  var bigZ = 90;     // 'Z'\n\n  var littleA = 97;  // 'a'\n  var littleZ = 122; // 'z'\n\n  var zero = 48;     // '0'\n  var nine = 57;     // '9'\n\n  var plus = 43;     // '+'\n  var slash = 47;    // '/'\n\n  var littleOffset = 26;\n  var numberOffset = 52;\n\n  // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n  if (bigA <= charCode && charCode <= bigZ) {\n    return (charCode - bigA);\n  }\n\n  // 26 - 51: abcdefghijklmnopqrstuvwxyz\n  if (littleA <= charCode && charCode <= littleZ) {\n    return (charCode - littleA + littleOffset);\n  }\n\n  // 52 - 61: 0123456789\n  if (zero <= charCode && charCode <= nine) {\n    return (charCode - zero + numberOffset);\n  }\n\n  // 62: +\n  if (charCode == plus) {\n    return 62;\n  }\n\n  // 63: /\n  if (charCode == slash) {\n    return 63;\n  }\n\n  // Invalid base64 digit.\n  return -1;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64.js\n// module id = 3\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n/**\n * This is a helper function for getting values from parameter/options\n * objects.\n *\n * @param args The object we are extracting values from\n * @param name The name of the property we are getting.\n * @param defaultValue An optional value to return if the property is missing\n * from the object. If this is not specified and the property is missing, an\n * error will be thrown.\n */\nfunction getArg(aArgs, aName, aDefaultValue) {\n  if (aName in aArgs) {\n    return aArgs[aName];\n  } else if (arguments.length === 3) {\n    return aDefaultValue;\n  } else {\n    throw new Error('\"' + aName + '\" is a required argument.');\n  }\n}\nexports.getArg = getArg;\n\nvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.-]*)(?::(\\d+))?(.*)$/;\nvar dataUrlRegexp = /^data:.+\\,.+$/;\n\nfunction urlParse(aUrl) {\n  var match = aUrl.match(urlRegexp);\n  if (!match) {\n    return null;\n  }\n  return {\n    scheme: match[1],\n    auth: match[2],\n    host: match[3],\n    port: match[4],\n    path: match[5]\n  };\n}\nexports.urlParse = urlParse;\n\nfunction urlGenerate(aParsedUrl) {\n  var url = '';\n  if (aParsedUrl.scheme) {\n    url += aParsedUrl.scheme + ':';\n  }\n  url += '//';\n  if (aParsedUrl.auth) {\n    url += aParsedUrl.auth + '@';\n  }\n  if (aParsedUrl.host) {\n    url += aParsedUrl.host;\n  }\n  if (aParsedUrl.port) {\n    url += \":\" + aParsedUrl.port\n  }\n  if (aParsedUrl.path) {\n    url += aParsedUrl.path;\n  }\n  return url;\n}\nexports.urlGenerate = urlGenerate;\n\n/**\n * Normalizes a path, or the path portion of a URL:\n *\n * - Replaces consecutive slashes with one slash.\n * - Removes unnecessary '.' parts.\n * - Removes unnecessary '<dir>/..' parts.\n *\n * Based on code in the Node.js 'path' core module.\n *\n * @param aPath The path or url to normalize.\n */\nfunction normalize(aPath) {\n  var path = aPath;\n  var url = urlParse(aPath);\n  if (url) {\n    if (!url.path) {\n      return aPath;\n    }\n    path = url.path;\n  }\n  var isAbsolute = exports.isAbsolute(path);\n\n  var parts = path.split(/\\/+/);\n  for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n    part = parts[i];\n    if (part === '.') {\n      parts.splice(i, 1);\n    } else if (part === '..') {\n      up++;\n    } else if (up > 0) {\n      if (part === '') {\n        // The first part is blank if the path is absolute. Trying to go\n        // above the root is a no-op. Therefore we can remove all '..' parts\n        // directly after the root.\n        parts.splice(i + 1, up);\n        up = 0;\n      } else {\n        parts.splice(i, 2);\n        up--;\n      }\n    }\n  }\n  path = parts.join('/');\n\n  if (path === '') {\n    path = isAbsolute ? '/' : '.';\n  }\n\n  if (url) {\n    url.path = path;\n    return urlGenerate(url);\n  }\n  return path;\n}\nexports.normalize = normalize;\n\n/**\n * Joins two paths/URLs.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be joined with the root.\n *\n * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n *   scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n *   first.\n * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n *   is updated with the result and aRoot is returned. Otherwise the result\n *   is returned.\n *   - If aPath is absolute, the result is aPath.\n *   - Otherwise the two paths are joined with a slash.\n * - Joining for example 'http://' and 'www.example.com' is also supported.\n */\nfunction join(aRoot, aPath) {\n  if (aRoot === \"\") {\n    aRoot = \".\";\n  }\n  if (aPath === \"\") {\n    aPath = \".\";\n  }\n  var aPathUrl = urlParse(aPath);\n  var aRootUrl = urlParse(aRoot);\n  if (aRootUrl) {\n    aRoot = aRootUrl.path || '/';\n  }\n\n  // `join(foo, '//www.example.org')`\n  if (aPathUrl && !aPathUrl.scheme) {\n    if (aRootUrl) {\n      aPathUrl.scheme = aRootUrl.scheme;\n    }\n    return urlGenerate(aPathUrl);\n  }\n\n  if (aPathUrl || aPath.match(dataUrlRegexp)) {\n    return aPath;\n  }\n\n  // `join('http://', 'www.example.com')`\n  if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n    aRootUrl.host = aPath;\n    return urlGenerate(aRootUrl);\n  }\n\n  var joined = aPath.charAt(0) === '/'\n    ? aPath\n    : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\n  if (aRootUrl) {\n    aRootUrl.path = joined;\n    return urlGenerate(aRootUrl);\n  }\n  return joined;\n}\nexports.join = join;\n\nexports.isAbsolute = function (aPath) {\n  return aPath.charAt(0) === '/' || urlRegexp.test(aPath);\n};\n\n/**\n * Make a path relative to a URL or another path.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be made relative to aRoot.\n */\nfunction relative(aRoot, aPath) {\n  if (aRoot === \"\") {\n    aRoot = \".\";\n  }\n\n  aRoot = aRoot.replace(/\\/$/, '');\n\n  // It is possible for the path to be above the root. In this case, simply\n  // checking whether the root is a prefix of the path won't work. Instead, we\n  // need to remove components from the root one by one, until either we find\n  // a prefix that fits, or we run out of components to remove.\n  var level = 0;\n  while (aPath.indexOf(aRoot + '/') !== 0) {\n    var index = aRoot.lastIndexOf(\"/\");\n    if (index < 0) {\n      return aPath;\n    }\n\n    // If the only part of the root that is left is the scheme (i.e. http://,\n    // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n    // have exhausted all components, so the path is not relative to the root.\n    aRoot = aRoot.slice(0, index);\n    if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n      return aPath;\n    }\n\n    ++level;\n  }\n\n  // Make sure we add a \"../\" for each component we removed from the root.\n  return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n}\nexports.relative = relative;\n\nvar supportsNullProto = (function () {\n  var obj = Object.create(null);\n  return !('__proto__' in obj);\n}());\n\nfunction identity (s) {\n  return s;\n}\n\n/**\n * Because behavior goes wacky when you set `__proto__` on objects, we\n * have to prefix all the strings in our set with an arbitrary character.\n *\n * See https://github.com/mozilla/source-map/pull/31 and\n * https://github.com/mozilla/source-map/issues/30\n *\n * @param String aStr\n */\nfunction toSetString(aStr) {\n  if (isProtoString(aStr)) {\n    return '$' + aStr;\n  }\n\n  return aStr;\n}\nexports.toSetString = supportsNullProto ? identity : toSetString;\n\nfunction fromSetString(aStr) {\n  if (isProtoString(aStr)) {\n    return aStr.slice(1);\n  }\n\n  return aStr;\n}\nexports.fromSetString = supportsNullProto ? identity : fromSetString;\n\nfunction isProtoString(s) {\n  if (!s) {\n    return false;\n  }\n\n  var length = s.length;\n\n  if (length < 9 /* \"__proto__\".length */) {\n    return false;\n  }\n\n  if (s.charCodeAt(length - 1) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 2) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n      s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n      s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n      s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n      s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n      s.charCodeAt(length - 8) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 9) !== 95  /* '_' */) {\n    return false;\n  }\n\n  for (var i = length - 10; i >= 0; i--) {\n    if (s.charCodeAt(i) !== 36 /* '$' */) {\n      return false;\n    }\n  }\n\n  return true;\n}\n\n/**\n * Comparator between two mappings where the original positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same original source/line/column, but different generated\n * line and column the same. Useful when searching for a mapping with a\n * stubbed out mapping.\n */\nfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n  var cmp = strcmp(mappingA.source, mappingB.source);\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0 || onlyCompareOriginal) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByOriginalPositions = compareByOriginalPositions;\n\n/**\n * Comparator between two mappings with deflated source and name indices where\n * the generated positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same generated line and column, but different\n * source/name/original line and column the same. Useful when searching for a\n * mapping with a stubbed out mapping.\n */\nfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n  var cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0 || onlyCompareGenerated) {\n    return cmp;\n  }\n\n  cmp = strcmp(mappingA.source, mappingB.source);\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\nfunction strcmp(aStr1, aStr2) {\n  if (aStr1 === aStr2) {\n    return 0;\n  }\n\n  if (aStr1 === null) {\n    return 1; // aStr2 !== null\n  }\n\n  if (aStr2 === null) {\n    return -1; // aStr1 !== null\n  }\n\n  if (aStr1 > aStr2) {\n    return 1;\n  }\n\n  return -1;\n}\n\n/**\n * Comparator between two mappings with inflated source and name strings where\n * the generated positions are compared.\n */\nfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n  var cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = strcmp(mappingA.source, mappingB.source);\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n/**\n * Strip any JSON XSSI avoidance prefix from the string (as documented\n * in the source maps specification), and then parse the string as\n * JSON.\n */\nfunction parseSourceMapInput(str) {\n  return JSON.parse(str.replace(/^\\)]}'[^\\n]*\\n/, ''));\n}\nexports.parseSourceMapInput = parseSourceMapInput;\n\n/**\n * Compute the URL of a source given the the source root, the source's\n * URL, and the source map's URL.\n */\nfunction computeSourceURL(sourceRoot, sourceURL, sourceMapURL) {\n  sourceURL = sourceURL || '';\n\n  if (sourceRoot) {\n    // This follows what Chrome does.\n    if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') {\n      sourceRoot += '/';\n    }\n    // The spec says:\n    //   Line 4: An optional source root, useful for relocating source\n    //   files on a server or removing repeated values in the\n    //   “sources” entry.  This value is prepended to the individual\n    //   entries in the “source” field.\n    sourceURL = sourceRoot + sourceURL;\n  }\n\n  // Historically, SourceMapConsumer did not take the sourceMapURL as\n  // a parameter.  This mode is still somewhat supported, which is why\n  // this code block is conditional.  However, it's preferable to pass\n  // the source map URL to SourceMapConsumer, so that this function\n  // can implement the source URL resolution algorithm as outlined in\n  // the spec.  This block is basically the equivalent of:\n  //    new URL(sourceURL, sourceMapURL).toString()\n  // ... except it avoids using URL, which wasn't available in the\n  // older releases of node still supported by this library.\n  //\n  // The spec says:\n  //   If the sources are not absolute URLs after prepending of the\n  //   “sourceRoot”, the sources are resolved relative to the\n  //   SourceMap (like resolving script src in a html document).\n  if (sourceMapURL) {\n    var parsed = urlParse(sourceMapURL);\n    if (!parsed) {\n      throw new Error(\"sourceMapURL could not be parsed\");\n    }\n    if (parsed.path) {\n      // Strip the last path component, but keep the \"/\".\n      var index = parsed.path.lastIndexOf('/');\n      if (index >= 0) {\n        parsed.path = parsed.path.substring(0, index + 1);\n      }\n    }\n    sourceURL = join(urlGenerate(parsed), sourceURL);\n  }\n\n  return normalize(sourceURL);\n}\nexports.computeSourceURL = computeSourceURL;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/util.js\n// module id = 4\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar has = Object.prototype.hasOwnProperty;\nvar hasNativeMap = typeof Map !== \"undefined\";\n\n/**\n * A data structure which is a combination of an array and a set. Adding a new\n * member is O(1), testing for membership is O(1), and finding the index of an\n * element is O(1). Removing elements from the set is not supported. Only\n * strings are supported for membership.\n */\nfunction ArraySet() {\n  this._array = [];\n  this._set = hasNativeMap ? new Map() : Object.create(null);\n}\n\n/**\n * Static method for creating ArraySet instances from an existing array.\n */\nArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n  var set = new ArraySet();\n  for (var i = 0, len = aArray.length; i < len; i++) {\n    set.add(aArray[i], aAllowDuplicates);\n  }\n  return set;\n};\n\n/**\n * Return how many unique items are in this ArraySet. If duplicates have been\n * added, than those do not count towards the size.\n *\n * @returns Number\n */\nArraySet.prototype.size = function ArraySet_size() {\n  return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;\n};\n\n/**\n * Add the given string to this set.\n *\n * @param String aStr\n */\nArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n  var sStr = hasNativeMap ? aStr : util.toSetString(aStr);\n  var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);\n  var idx = this._array.length;\n  if (!isDuplicate || aAllowDuplicates) {\n    this._array.push(aStr);\n  }\n  if (!isDuplicate) {\n    if (hasNativeMap) {\n      this._set.set(aStr, idx);\n    } else {\n      this._set[sStr] = idx;\n    }\n  }\n};\n\n/**\n * Is the given string a member of this set?\n *\n * @param String aStr\n */\nArraySet.prototype.has = function ArraySet_has(aStr) {\n  if (hasNativeMap) {\n    return this._set.has(aStr);\n  } else {\n    var sStr = util.toSetString(aStr);\n    return has.call(this._set, sStr);\n  }\n};\n\n/**\n * What is the index of the given string in the array?\n *\n * @param String aStr\n */\nArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n  if (hasNativeMap) {\n    var idx = this._set.get(aStr);\n    if (idx >= 0) {\n        return idx;\n    }\n  } else {\n    var sStr = util.toSetString(aStr);\n    if (has.call(this._set, sStr)) {\n      return this._set[sStr];\n    }\n  }\n\n  throw new Error('\"' + aStr + '\" is not in the set.');\n};\n\n/**\n * What is the element at the given index?\n *\n * @param Number aIdx\n */\nArraySet.prototype.at = function ArraySet_at(aIdx) {\n  if (aIdx >= 0 && aIdx < this._array.length) {\n    return this._array[aIdx];\n  }\n  throw new Error('No element indexed by ' + aIdx);\n};\n\n/**\n * Returns the array representation of this set (which has the proper indices\n * indicated by indexOf). Note that this is a copy of the internal array used\n * for storing the members so that no one can mess with internal state.\n */\nArraySet.prototype.toArray = function ArraySet_toArray() {\n  return this._array.slice();\n};\n\nexports.ArraySet = ArraySet;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/array-set.js\n// module id = 5\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2014 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\n\n/**\n * Determine whether mappingB is after mappingA with respect to generated\n * position.\n */\nfunction generatedPositionAfter(mappingA, mappingB) {\n  // Optimized for most common case\n  var lineA = mappingA.generatedLine;\n  var lineB = mappingB.generatedLine;\n  var columnA = mappingA.generatedColumn;\n  var columnB = mappingB.generatedColumn;\n  return lineB > lineA || lineB == lineA && columnB >= columnA ||\n         util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n}\n\n/**\n * A data structure to provide a sorted view of accumulated mappings in a\n * performance conscious manner. It trades a neglibable overhead in general\n * case for a large speedup in case of mappings being added in order.\n */\nfunction MappingList() {\n  this._array = [];\n  this._sorted = true;\n  // Serves as infimum\n  this._last = {generatedLine: -1, generatedColumn: 0};\n}\n\n/**\n * Iterate through internal items. This method takes the same arguments that\n * `Array.prototype.forEach` takes.\n *\n * NOTE: The order of the mappings is NOT guaranteed.\n */\nMappingList.prototype.unsortedForEach =\n  function MappingList_forEach(aCallback, aThisArg) {\n    this._array.forEach(aCallback, aThisArg);\n  };\n\n/**\n * Add the given source mapping.\n *\n * @param Object aMapping\n */\nMappingList.prototype.add = function MappingList_add(aMapping) {\n  if (generatedPositionAfter(this._last, aMapping)) {\n    this._last = aMapping;\n    this._array.push(aMapping);\n  } else {\n    this._sorted = false;\n    this._array.push(aMapping);\n  }\n};\n\n/**\n * Returns the flat, sorted array of mappings. The mappings are sorted by\n * generated position.\n *\n * WARNING: This method returns internal data without copying, for\n * performance. The return value must NOT be mutated, and should be treated as\n * an immutable borrow. If you want to take ownership, you must make your own\n * copy.\n */\nMappingList.prototype.toArray = function MappingList_toArray() {\n  if (!this._sorted) {\n    this._array.sort(util.compareByGeneratedPositionsInflated);\n    this._sorted = true;\n  }\n  return this._array;\n};\n\nexports.MappingList = MappingList;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/mapping-list.js\n// module id = 6\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar binarySearch = require('./binary-search');\nvar ArraySet = require('./array-set').ArraySet;\nvar base64VLQ = require('./base64-vlq');\nvar quickSort = require('./quick-sort').quickSort;\n\nfunction SourceMapConsumer(aSourceMap, aSourceMapURL) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = util.parseSourceMapInput(aSourceMap);\n  }\n\n  return sourceMap.sections != null\n    ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL)\n    : new BasicSourceMapConsumer(sourceMap, aSourceMapURL);\n}\n\nSourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) {\n  return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL);\n}\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nSourceMapConsumer.prototype._version = 3;\n\n// `__generatedMappings` and `__originalMappings` are arrays that hold the\n// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n// are lazily instantiated, accessed via the `_generatedMappings` and\n// `_originalMappings` getters respectively, and we only parse the mappings\n// and create these arrays once queried for a source location. We jump through\n// these hoops because there can be many thousands of mappings, and parsing\n// them is expensive, so we only want to do it if we must.\n//\n// Each object in the arrays is of the form:\n//\n//     {\n//       generatedLine: The line number in the generated code,\n//       generatedColumn: The column number in the generated code,\n//       source: The path to the original source file that generated this\n//               chunk of code,\n//       originalLine: The line number in the original source that\n//                     corresponds to this chunk of generated code,\n//       originalColumn: The column number in the original source that\n//                       corresponds to this chunk of generated code,\n//       name: The name of the original symbol which generated this chunk of\n//             code.\n//     }\n//\n// All properties except for `generatedLine` and `generatedColumn` can be\n// `null`.\n//\n// `_generatedMappings` is ordered by the generated positions.\n//\n// `_originalMappings` is ordered by the original positions.\n\nSourceMapConsumer.prototype.__generatedMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n  configurable: true,\n  enumerable: true,\n  get: function () {\n    if (!this.__generatedMappings) {\n      this._parseMappings(this._mappings, this.sourceRoot);\n    }\n\n    return this.__generatedMappings;\n  }\n});\n\nSourceMapConsumer.prototype.__originalMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n  configurable: true,\n  enumerable: true,\n  get: function () {\n    if (!this.__originalMappings) {\n      this._parseMappings(this._mappings, this.sourceRoot);\n    }\n\n    return this.__originalMappings;\n  }\n});\n\nSourceMapConsumer.prototype._charIsMappingSeparator =\n  function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n    var c = aStr.charAt(index);\n    return c === \";\" || c === \",\";\n  };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nSourceMapConsumer.prototype._parseMappings =\n  function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    throw new Error(\"Subclasses must implement _parseMappings\");\n  };\n\nSourceMapConsumer.GENERATED_ORDER = 1;\nSourceMapConsumer.ORIGINAL_ORDER = 2;\n\nSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\nSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\n/**\n * Iterate over each mapping between an original source/line/column and a\n * generated line/column in this source map.\n *\n * @param Function aCallback\n *        The function that is called with each mapping.\n * @param Object aContext\n *        Optional. If specified, this object will be the value of `this` every\n *        time that `aCallback` is called.\n * @param aOrder\n *        Either `SourceMapConsumer.GENERATED_ORDER` or\n *        `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n *        iterate over the mappings sorted by the generated file's line/column\n *        order or the original's source/line/column order, respectively. Defaults to\n *        `SourceMapConsumer.GENERATED_ORDER`.\n */\nSourceMapConsumer.prototype.eachMapping =\n  function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n    var context = aContext || null;\n    var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\n    var mappings;\n    switch (order) {\n    case SourceMapConsumer.GENERATED_ORDER:\n      mappings = this._generatedMappings;\n      break;\n    case SourceMapConsumer.ORIGINAL_ORDER:\n      mappings = this._originalMappings;\n      break;\n    default:\n      throw new Error(\"Unknown order of iteration.\");\n    }\n\n    var sourceRoot = this.sourceRoot;\n    mappings.map(function (mapping) {\n      var source = mapping.source === null ? null : this._sources.at(mapping.source);\n      source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL);\n      return {\n        source: source,\n        generatedLine: mapping.generatedLine,\n        generatedColumn: mapping.generatedColumn,\n        originalLine: mapping.originalLine,\n        originalColumn: mapping.originalColumn,\n        name: mapping.name === null ? null : this._names.at(mapping.name)\n      };\n    }, this).forEach(aCallback, context);\n  };\n\n/**\n * Returns all generated line and column information for the original source,\n * line, and column provided. If no column is provided, returns all mappings\n * corresponding to a either the line we are searching for or the next\n * closest line that has any mappings. Otherwise, returns all mappings\n * corresponding to the given line and either the column we are searching for\n * or the next closest column that has any offsets.\n *\n * The only argument is an object with the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.  The line number is 1-based.\n *   - column: Optional. the column number in the original source.\n *    The column number is 0-based.\n *\n * and an array of objects is returned, each with the following properties:\n *\n *   - line: The line number in the generated source, or null.  The\n *    line number is 1-based.\n *   - column: The column number in the generated source, or null.\n *    The column number is 0-based.\n */\nSourceMapConsumer.prototype.allGeneratedPositionsFor =\n  function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n    var line = util.getArg(aArgs, 'line');\n\n    // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n    // returns the index of the closest mapping less than the needle. By\n    // setting needle.originalColumn to 0, we thus find the last mapping for\n    // the given line, provided such a mapping exists.\n    var needle = {\n      source: util.getArg(aArgs, 'source'),\n      originalLine: line,\n      originalColumn: util.getArg(aArgs, 'column', 0)\n    };\n\n    needle.source = this._findSourceIndex(needle.source);\n    if (needle.source < 0) {\n      return [];\n    }\n\n    var mappings = [];\n\n    var index = this._findMapping(needle,\n                                  this._originalMappings,\n                                  \"originalLine\",\n                                  \"originalColumn\",\n                                  util.compareByOriginalPositions,\n                                  binarySearch.LEAST_UPPER_BOUND);\n    if (index >= 0) {\n      var mapping = this._originalMappings[index];\n\n      if (aArgs.column === undefined) {\n        var originalLine = mapping.originalLine;\n\n        // Iterate until either we run out of mappings, or we run into\n        // a mapping for a different line than the one we found. Since\n        // mappings are sorted, this is guaranteed to find all mappings for\n        // the line we found.\n        while (mapping && mapping.originalLine === originalLine) {\n          mappings.push({\n            line: util.getArg(mapping, 'generatedLine', null),\n            column: util.getArg(mapping, 'generatedColumn', null),\n            lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n          });\n\n          mapping = this._originalMappings[++index];\n        }\n      } else {\n        var originalColumn = mapping.originalColumn;\n\n        // Iterate until either we run out of mappings, or we run into\n        // a mapping for a different line than the one we were searching for.\n        // Since mappings are sorted, this is guaranteed to find all mappings for\n        // the line we are searching for.\n        while (mapping &&\n               mapping.originalLine === line &&\n               mapping.originalColumn == originalColumn) {\n          mappings.push({\n            line: util.getArg(mapping, 'generatedLine', null),\n            column: util.getArg(mapping, 'generatedColumn', null),\n            lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n          });\n\n          mapping = this._originalMappings[++index];\n        }\n      }\n    }\n\n    return mappings;\n  };\n\nexports.SourceMapConsumer = SourceMapConsumer;\n\n/**\n * A BasicSourceMapConsumer instance represents a parsed source map which we can\n * query for information about the original file positions by giving it a file\n * position in the generated source.\n *\n * The first parameter is the raw source map (either as a JSON string, or\n * already parsed to an object). According to the spec, source maps have the\n * following attributes:\n *\n *   - version: Which version of the source map spec this map is following.\n *   - sources: An array of URLs to the original source files.\n *   - names: An array of identifiers which can be referrenced by individual mappings.\n *   - sourceRoot: Optional. The URL root from which all sources are relative.\n *   - sourcesContent: Optional. An array of contents of the original source files.\n *   - mappings: A string of base64 VLQs which contain the actual mappings.\n *   - file: Optional. The generated file this source map is associated with.\n *\n * Here is an example source map, taken from the source map spec[0]:\n *\n *     {\n *       version : 3,\n *       file: \"out.js\",\n *       sourceRoot : \"\",\n *       sources: [\"foo.js\", \"bar.js\"],\n *       names: [\"src\", \"maps\", \"are\", \"fun\"],\n *       mappings: \"AA,AB;;ABCDE;\"\n *     }\n *\n * The second parameter, if given, is a string whose value is the URL\n * at which the source map was found.  This URL is used to compute the\n * sources array.\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n */\nfunction BasicSourceMapConsumer(aSourceMap, aSourceMapURL) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = util.parseSourceMapInput(aSourceMap);\n  }\n\n  var version = util.getArg(sourceMap, 'version');\n  var sources = util.getArg(sourceMap, 'sources');\n  // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n  // requires the array) to play nice here.\n  var names = util.getArg(sourceMap, 'names', []);\n  var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n  var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n  var mappings = util.getArg(sourceMap, 'mappings');\n  var file = util.getArg(sourceMap, 'file', null);\n\n  // Once again, Sass deviates from the spec and supplies the version as a\n  // string rather than a number, so we use loose equality checking here.\n  if (version != this._version) {\n    throw new Error('Unsupported version: ' + version);\n  }\n\n  if (sourceRoot) {\n    sourceRoot = util.normalize(sourceRoot);\n  }\n\n  sources = sources\n    .map(String)\n    // Some source maps produce relative source paths like \"./foo.js\" instead of\n    // \"foo.js\".  Normalize these first so that future comparisons will succeed.\n    // See bugzil.la/1090768.\n    .map(util.normalize)\n    // Always ensure that absolute sources are internally stored relative to\n    // the source root, if the source root is absolute. Not doing this would\n    // be particularly problematic when the source root is a prefix of the\n    // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n    .map(function (source) {\n      return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n        ? util.relative(sourceRoot, source)\n        : source;\n    });\n\n  // Pass `true` below to allow duplicate names and sources. While source maps\n  // are intended to be compressed and deduplicated, the TypeScript compiler\n  // sometimes generates source maps with duplicates in them. See Github issue\n  // #72 and bugzil.la/889492.\n  this._names = ArraySet.fromArray(names.map(String), true);\n  this._sources = ArraySet.fromArray(sources, true);\n\n  this._absoluteSources = this._sources.toArray().map(function (s) {\n    return util.computeSourceURL(sourceRoot, s, aSourceMapURL);\n  });\n\n  this.sourceRoot = sourceRoot;\n  this.sourcesContent = sourcesContent;\n  this._mappings = mappings;\n  this._sourceMapURL = aSourceMapURL;\n  this.file = file;\n}\n\nBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\n/**\n * Utility function to find the index of a source.  Returns -1 if not\n * found.\n */\nBasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) {\n  var relativeSource = aSource;\n  if (this.sourceRoot != null) {\n    relativeSource = util.relative(this.sourceRoot, relativeSource);\n  }\n\n  if (this._sources.has(relativeSource)) {\n    return this._sources.indexOf(relativeSource);\n  }\n\n  // Maybe aSource is an absolute URL as returned by |sources|.  In\n  // this case we can't simply undo the transform.\n  var i;\n  for (i = 0; i < this._absoluteSources.length; ++i) {\n    if (this._absoluteSources[i] == aSource) {\n      return i;\n    }\n  }\n\n  return -1;\n};\n\n/**\n * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n *\n * @param SourceMapGenerator aSourceMap\n *        The source map that will be consumed.\n * @param String aSourceMapURL\n *        The URL at which the source map can be found (optional)\n * @returns BasicSourceMapConsumer\n */\nBasicSourceMapConsumer.fromSourceMap =\n  function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) {\n    var smc = Object.create(BasicSourceMapConsumer.prototype);\n\n    var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n    var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n    smc.sourceRoot = aSourceMap._sourceRoot;\n    smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n                                                            smc.sourceRoot);\n    smc.file = aSourceMap._file;\n    smc._sourceMapURL = aSourceMapURL;\n    smc._absoluteSources = smc._sources.toArray().map(function (s) {\n      return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL);\n    });\n\n    // Because we are modifying the entries (by converting string sources and\n    // names to indices into the sources and names ArraySets), we have to make\n    // a copy of the entry or else bad things happen. Shared mutable state\n    // strikes again! See github issue #191.\n\n    var generatedMappings = aSourceMap._mappings.toArray().slice();\n    var destGeneratedMappings = smc.__generatedMappings = [];\n    var destOriginalMappings = smc.__originalMappings = [];\n\n    for (var i = 0, length = generatedMappings.length; i < length; i++) {\n      var srcMapping = generatedMappings[i];\n      var destMapping = new Mapping;\n      destMapping.generatedLine = srcMapping.generatedLine;\n      destMapping.generatedColumn = srcMapping.generatedColumn;\n\n      if (srcMapping.source) {\n        destMapping.source = sources.indexOf(srcMapping.source);\n        destMapping.originalLine = srcMapping.originalLine;\n        destMapping.originalColumn = srcMapping.originalColumn;\n\n        if (srcMapping.name) {\n          destMapping.name = names.indexOf(srcMapping.name);\n        }\n\n        destOriginalMappings.push(destMapping);\n      }\n\n      destGeneratedMappings.push(destMapping);\n    }\n\n    quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\n    return smc;\n  };\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nBasicSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n  get: function () {\n    return this._absoluteSources.slice();\n  }\n});\n\n/**\n * Provide the JIT with a nice shape / hidden class.\n */\nfunction Mapping() {\n  this.generatedLine = 0;\n  this.generatedColumn = 0;\n  this.source = null;\n  this.originalLine = null;\n  this.originalColumn = null;\n  this.name = null;\n}\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nBasicSourceMapConsumer.prototype._parseMappings =\n  function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    var generatedLine = 1;\n    var previousGeneratedColumn = 0;\n    var previousOriginalLine = 0;\n    var previousOriginalColumn = 0;\n    var previousSource = 0;\n    var previousName = 0;\n    var length = aStr.length;\n    var index = 0;\n    var cachedSegments = {};\n    var temp = {};\n    var originalMappings = [];\n    var generatedMappings = [];\n    var mapping, str, segment, end, value;\n\n    while (index < length) {\n      if (aStr.charAt(index) === ';') {\n        generatedLine++;\n        index++;\n        previousGeneratedColumn = 0;\n      }\n      else if (aStr.charAt(index) === ',') {\n        index++;\n      }\n      else {\n        mapping = new Mapping();\n        mapping.generatedLine = generatedLine;\n\n        // Because each offset is encoded relative to the previous one,\n        // many segments often have the same encoding. We can exploit this\n        // fact by caching the parsed variable length fields of each segment,\n        // allowing us to avoid a second parse if we encounter the same\n        // segment again.\n        for (end = index; end < length; end++) {\n          if (this._charIsMappingSeparator(aStr, end)) {\n            break;\n          }\n        }\n        str = aStr.slice(index, end);\n\n        segment = cachedSegments[str];\n        if (segment) {\n          index += str.length;\n        } else {\n          segment = [];\n          while (index < end) {\n            base64VLQ.decode(aStr, index, temp);\n            value = temp.value;\n            index = temp.rest;\n            segment.push(value);\n          }\n\n          if (segment.length === 2) {\n            throw new Error('Found a source, but no line and column');\n          }\n\n          if (segment.length === 3) {\n            throw new Error('Found a source and line, but no column');\n          }\n\n          cachedSegments[str] = segment;\n        }\n\n        // Generated column.\n        mapping.generatedColumn = previousGeneratedColumn + segment[0];\n        previousGeneratedColumn = mapping.generatedColumn;\n\n        if (segment.length > 1) {\n          // Original source.\n          mapping.source = previousSource + segment[1];\n          previousSource += segment[1];\n\n          // Original line.\n          mapping.originalLine = previousOriginalLine + segment[2];\n          previousOriginalLine = mapping.originalLine;\n          // Lines are stored 0-based\n          mapping.originalLine += 1;\n\n          // Original column.\n          mapping.originalColumn = previousOriginalColumn + segment[3];\n          previousOriginalColumn = mapping.originalColumn;\n\n          if (segment.length > 4) {\n            // Original name.\n            mapping.name = previousName + segment[4];\n            previousName += segment[4];\n          }\n        }\n\n        generatedMappings.push(mapping);\n        if (typeof mapping.originalLine === 'number') {\n          originalMappings.push(mapping);\n        }\n      }\n    }\n\n    quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n    this.__generatedMappings = generatedMappings;\n\n    quickSort(originalMappings, util.compareByOriginalPositions);\n    this.__originalMappings = originalMappings;\n  };\n\n/**\n * Find the mapping that best matches the hypothetical \"needle\" mapping that\n * we are searching for in the given \"haystack\" of mappings.\n */\nBasicSourceMapConsumer.prototype._findMapping =\n  function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n                                         aColumnName, aComparator, aBias) {\n    // To return the position we are searching for, we must first find the\n    // mapping for the given position and then return the opposite position it\n    // points to. Because the mappings are sorted, we can use binary search to\n    // find the best mapping.\n\n    if (aNeedle[aLineName] <= 0) {\n      throw new TypeError('Line must be greater than or equal to 1, got '\n                          + aNeedle[aLineName]);\n    }\n    if (aNeedle[aColumnName] < 0) {\n      throw new TypeError('Column must be greater than or equal to 0, got '\n                          + aNeedle[aColumnName]);\n    }\n\n    return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n  };\n\n/**\n * Compute the last column for each generated mapping. The last column is\n * inclusive.\n */\nBasicSourceMapConsumer.prototype.computeColumnSpans =\n  function SourceMapConsumer_computeColumnSpans() {\n    for (var index = 0; index < this._generatedMappings.length; ++index) {\n      var mapping = this._generatedMappings[index];\n\n      // Mappings do not contain a field for the last generated columnt. We\n      // can come up with an optimistic estimate, however, by assuming that\n      // mappings are contiguous (i.e. given two consecutive mappings, the\n      // first mapping ends where the second one starts).\n      if (index + 1 < this._generatedMappings.length) {\n        var nextMapping = this._generatedMappings[index + 1];\n\n        if (mapping.generatedLine === nextMapping.generatedLine) {\n          mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n          continue;\n        }\n      }\n\n      // The last mapping for each line spans the entire line.\n      mapping.lastGeneratedColumn = Infinity;\n    }\n  };\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n *   - line: The line number in the generated source.  The line number\n *     is 1-based.\n *   - column: The column number in the generated source.  The column\n *     number is 0-based.\n *   - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n *     'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n *   - source: The original source file, or null.\n *   - line: The line number in the original source, or null.  The\n *     line number is 1-based.\n *   - column: The column number in the original source, or null.  The\n *     column number is 0-based.\n *   - name: The original identifier, or null.\n */\nBasicSourceMapConsumer.prototype.originalPositionFor =\n  function SourceMapConsumer_originalPositionFor(aArgs) {\n    var needle = {\n      generatedLine: util.getArg(aArgs, 'line'),\n      generatedColumn: util.getArg(aArgs, 'column')\n    };\n\n    var index = this._findMapping(\n      needle,\n      this._generatedMappings,\n      \"generatedLine\",\n      \"generatedColumn\",\n      util.compareByGeneratedPositionsDeflated,\n      util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n    );\n\n    if (index >= 0) {\n      var mapping = this._generatedMappings[index];\n\n      if (mapping.generatedLine === needle.generatedLine) {\n        var source = util.getArg(mapping, 'source', null);\n        if (source !== null) {\n          source = this._sources.at(source);\n          source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL);\n        }\n        var name = util.getArg(mapping, 'name', null);\n        if (name !== null) {\n          name = this._names.at(name);\n        }\n        return {\n          source: source,\n          line: util.getArg(mapping, 'originalLine', null),\n          column: util.getArg(mapping, 'originalColumn', null),\n          name: name\n        };\n      }\n    }\n\n    return {\n      source: null,\n      line: null,\n      column: null,\n      name: null\n    };\n  };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n  function BasicSourceMapConsumer_hasContentsOfAllSources() {\n    if (!this.sourcesContent) {\n      return false;\n    }\n    return this.sourcesContent.length >= this._sources.size() &&\n      !this.sourcesContent.some(function (sc) { return sc == null; });\n  };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nBasicSourceMapConsumer.prototype.sourceContentFor =\n  function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n    if (!this.sourcesContent) {\n      return null;\n    }\n\n    var index = this._findSourceIndex(aSource);\n    if (index >= 0) {\n      return this.sourcesContent[index];\n    }\n\n    var relativeSource = aSource;\n    if (this.sourceRoot != null) {\n      relativeSource = util.relative(this.sourceRoot, relativeSource);\n    }\n\n    var url;\n    if (this.sourceRoot != null\n        && (url = util.urlParse(this.sourceRoot))) {\n      // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n      // many users. We can help them out when they expect file:// URIs to\n      // behave like it would if they were running a local HTTP server. See\n      // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n      var fileUriAbsPath = relativeSource.replace(/^file:\\/\\//, \"\");\n      if (url.scheme == \"file\"\n          && this._sources.has(fileUriAbsPath)) {\n        return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n      }\n\n      if ((!url.path || url.path == \"/\")\n          && this._sources.has(\"/\" + relativeSource)) {\n        return this.sourcesContent[this._sources.indexOf(\"/\" + relativeSource)];\n      }\n    }\n\n    // This function is used recursively from\n    // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n    // don't want to throw if we can't find the source - we just want to\n    // return null, so we provide a flag to exit gracefully.\n    if (nullOnMissing) {\n      return null;\n    }\n    else {\n      throw new Error('\"' + relativeSource + '\" is not in the SourceMap.');\n    }\n  };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.  The line number\n *     is 1-based.\n *   - column: The column number in the original source.  The column\n *     number is 0-based.\n *   - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n *     'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n *   - line: The line number in the generated source, or null.  The\n *     line number is 1-based.\n *   - column: The column number in the generated source, or null.\n *     The column number is 0-based.\n */\nBasicSourceMapConsumer.prototype.generatedPositionFor =\n  function SourceMapConsumer_generatedPositionFor(aArgs) {\n    var source = util.getArg(aArgs, 'source');\n    source = this._findSourceIndex(source);\n    if (source < 0) {\n      return {\n        line: null,\n        column: null,\n        lastColumn: null\n      };\n    }\n\n    var needle = {\n      source: source,\n      originalLine: util.getArg(aArgs, 'line'),\n      originalColumn: util.getArg(aArgs, 'column')\n    };\n\n    var index = this._findMapping(\n      needle,\n      this._originalMappings,\n      \"originalLine\",\n      \"originalColumn\",\n      util.compareByOriginalPositions,\n      util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n    );\n\n    if (index >= 0) {\n      var mapping = this._originalMappings[index];\n\n      if (mapping.source === needle.source) {\n        return {\n          line: util.getArg(mapping, 'generatedLine', null),\n          column: util.getArg(mapping, 'generatedColumn', null),\n          lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n        };\n      }\n    }\n\n    return {\n      line: null,\n      column: null,\n      lastColumn: null\n    };\n  };\n\nexports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\n/**\n * An IndexedSourceMapConsumer instance represents a parsed source map which\n * we can query for information. It differs from BasicSourceMapConsumer in\n * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n * input.\n *\n * The first parameter is a raw source map (either as a JSON string, or already\n * parsed to an object). According to the spec for indexed source maps, they\n * have the following attributes:\n *\n *   - version: Which version of the source map spec this map is following.\n *   - file: Optional. The generated file this source map is associated with.\n *   - sections: A list of section definitions.\n *\n * Each value under the \"sections\" field has two fields:\n *   - offset: The offset into the original specified at which this section\n *       begins to apply, defined as an object with a \"line\" and \"column\"\n *       field.\n *   - map: A source map definition. This source map could also be indexed,\n *       but doesn't have to be.\n *\n * Instead of the \"map\" field, it's also possible to have a \"url\" field\n * specifying a URL to retrieve a source map from, but that's currently\n * unsupported.\n *\n * Here's an example source map, taken from the source map spec[0], but\n * modified to omit a section which uses the \"url\" field.\n *\n *  {\n *    version : 3,\n *    file: \"app.js\",\n *    sections: [{\n *      offset: {line:100, column:10},\n *      map: {\n *        version : 3,\n *        file: \"section.js\",\n *        sources: [\"foo.js\", \"bar.js\"],\n *        names: [\"src\", \"maps\", \"are\", \"fun\"],\n *        mappings: \"AAAA,E;;ABCDE;\"\n *      }\n *    }],\n *  }\n *\n * The second parameter, if given, is a string whose value is the URL\n * at which the source map was found.  This URL is used to compute the\n * sources array.\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n */\nfunction IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = util.parseSourceMapInput(aSourceMap);\n  }\n\n  var version = util.getArg(sourceMap, 'version');\n  var sections = util.getArg(sourceMap, 'sections');\n\n  if (version != this._version) {\n    throw new Error('Unsupported version: ' + version);\n  }\n\n  this._sources = new ArraySet();\n  this._names = new ArraySet();\n\n  var lastOffset = {\n    line: -1,\n    column: 0\n  };\n  this._sections = sections.map(function (s) {\n    if (s.url) {\n      // The url field will require support for asynchronicity.\n      // See https://github.com/mozilla/source-map/issues/16\n      throw new Error('Support for url field in sections not implemented.');\n    }\n    var offset = util.getArg(s, 'offset');\n    var offsetLine = util.getArg(offset, 'line');\n    var offsetColumn = util.getArg(offset, 'column');\n\n    if (offsetLine < lastOffset.line ||\n        (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n      throw new Error('Section offsets must be ordered and non-overlapping.');\n    }\n    lastOffset = offset;\n\n    return {\n      generatedOffset: {\n        // The offset fields are 0-based, but we use 1-based indices when\n        // encoding/decoding from VLQ.\n        generatedLine: offsetLine + 1,\n        generatedColumn: offsetColumn + 1\n      },\n      consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL)\n    }\n  });\n}\n\nIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nIndexedSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n  get: function () {\n    var sources = [];\n    for (var i = 0; i < this._sections.length; i++) {\n      for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n        sources.push(this._sections[i].consumer.sources[j]);\n      }\n    }\n    return sources;\n  }\n});\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n *   - line: The line number in the generated source.  The line number\n *     is 1-based.\n *   - column: The column number in the generated source.  The column\n *     number is 0-based.\n *\n * and an object is returned with the following properties:\n *\n *   - source: The original source file, or null.\n *   - line: The line number in the original source, or null.  The\n *     line number is 1-based.\n *   - column: The column number in the original source, or null.  The\n *     column number is 0-based.\n *   - name: The original identifier, or null.\n */\nIndexedSourceMapConsumer.prototype.originalPositionFor =\n  function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n    var needle = {\n      generatedLine: util.getArg(aArgs, 'line'),\n      generatedColumn: util.getArg(aArgs, 'column')\n    };\n\n    // Find the section containing the generated position we're trying to map\n    // to an original position.\n    var sectionIndex = binarySearch.search(needle, this._sections,\n      function(needle, section) {\n        var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n        if (cmp) {\n          return cmp;\n        }\n\n        return (needle.generatedColumn -\n                section.generatedOffset.generatedColumn);\n      });\n    var section = this._sections[sectionIndex];\n\n    if (!section) {\n      return {\n        source: null,\n        line: null,\n        column: null,\n        name: null\n      };\n    }\n\n    return section.consumer.originalPositionFor({\n      line: needle.generatedLine -\n        (section.generatedOffset.generatedLine - 1),\n      column: needle.generatedColumn -\n        (section.generatedOffset.generatedLine === needle.generatedLine\n         ? section.generatedOffset.generatedColumn - 1\n         : 0),\n      bias: aArgs.bias\n    });\n  };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n  function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n    return this._sections.every(function (s) {\n      return s.consumer.hasContentsOfAllSources();\n    });\n  };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nIndexedSourceMapConsumer.prototype.sourceContentFor =\n  function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n\n      var content = section.consumer.sourceContentFor(aSource, true);\n      if (content) {\n        return content;\n      }\n    }\n    if (nullOnMissing) {\n      return null;\n    }\n    else {\n      throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n    }\n  };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.  The line number\n *     is 1-based.\n *   - column: The column number in the original source.  The column\n *     number is 0-based.\n *\n * and an object is returned with the following properties:\n *\n *   - line: The line number in the generated source, or null.  The\n *     line number is 1-based. \n *   - column: The column number in the generated source, or null.\n *     The column number is 0-based.\n */\nIndexedSourceMapConsumer.prototype.generatedPositionFor =\n  function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n\n      // Only consider this section if the requested source is in the list of\n      // sources of the consumer.\n      if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) {\n        continue;\n      }\n      var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n      if (generatedPosition) {\n        var ret = {\n          line: generatedPosition.line +\n            (section.generatedOffset.generatedLine - 1),\n          column: generatedPosition.column +\n            (section.generatedOffset.generatedLine === generatedPosition.line\n             ? section.generatedOffset.generatedColumn - 1\n             : 0)\n        };\n        return ret;\n      }\n    }\n\n    return {\n      line: null,\n      column: null\n    };\n  };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nIndexedSourceMapConsumer.prototype._parseMappings =\n  function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    this.__generatedMappings = [];\n    this.__originalMappings = [];\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n      var sectionMappings = section.consumer._generatedMappings;\n      for (var j = 0; j < sectionMappings.length; j++) {\n        var mapping = sectionMappings[j];\n\n        var source = section.consumer._sources.at(mapping.source);\n        source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL);\n        this._sources.add(source);\n        source = this._sources.indexOf(source);\n\n        var name = null;\n        if (mapping.name) {\n          name = section.consumer._names.at(mapping.name);\n          this._names.add(name);\n          name = this._names.indexOf(name);\n        }\n\n        // The mappings coming from the consumer for the section have\n        // generated positions relative to the start of the section, so we\n        // need to offset them to be relative to the start of the concatenated\n        // generated file.\n        var adjustedMapping = {\n          source: source,\n          generatedLine: mapping.generatedLine +\n            (section.generatedOffset.generatedLine - 1),\n          generatedColumn: mapping.generatedColumn +\n            (section.generatedOffset.generatedLine === mapping.generatedLine\n            ? section.generatedOffset.generatedColumn - 1\n            : 0),\n          originalLine: mapping.originalLine,\n          originalColumn: mapping.originalColumn,\n          name: name\n        };\n\n        this.__generatedMappings.push(adjustedMapping);\n        if (typeof adjustedMapping.originalLine === 'number') {\n          this.__originalMappings.push(adjustedMapping);\n        }\n      }\n    }\n\n    quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n    quickSort(this.__originalMappings, util.compareByOriginalPositions);\n  };\n\nexports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-consumer.js\n// module id = 7\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nexports.GREATEST_LOWER_BOUND = 1;\nexports.LEAST_UPPER_BOUND = 2;\n\n/**\n * Recursive implementation of binary search.\n *\n * @param aLow Indices here and lower do not contain the needle.\n * @param aHigh Indices here and higher do not contain the needle.\n * @param aNeedle The element being searched for.\n * @param aHaystack The non-empty array being searched.\n * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n *     'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n */\nfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n  // This function terminates when one of the following is true:\n  //\n  //   1. We find the exact element we are looking for.\n  //\n  //   2. We did not find the exact element, but we can return the index of\n  //      the next-closest element.\n  //\n  //   3. We did not find the exact element, and there is no next-closest\n  //      element than the one we are searching for, so we return -1.\n  var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n  var cmp = aCompare(aNeedle, aHaystack[mid], true);\n  if (cmp === 0) {\n    // Found the element we are looking for.\n    return mid;\n  }\n  else if (cmp > 0) {\n    // Our needle is greater than aHaystack[mid].\n    if (aHigh - mid > 1) {\n      // The element is in the upper half.\n      return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n    }\n\n    // The exact needle element was not found in this haystack. Determine if\n    // we are in termination case (3) or (2) and return the appropriate thing.\n    if (aBias == exports.LEAST_UPPER_BOUND) {\n      return aHigh < aHaystack.length ? aHigh : -1;\n    } else {\n      return mid;\n    }\n  }\n  else {\n    // Our needle is less than aHaystack[mid].\n    if (mid - aLow > 1) {\n      // The element is in the lower half.\n      return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n    }\n\n    // we are in termination case (3) or (2) and return the appropriate thing.\n    if (aBias == exports.LEAST_UPPER_BOUND) {\n      return mid;\n    } else {\n      return aLow < 0 ? -1 : aLow;\n    }\n  }\n}\n\n/**\n * This is an implementation of binary search which will always try and return\n * the index of the closest element if there is no exact hit. This is because\n * mappings between original and generated line/col pairs are single points,\n * and there is an implicit region between each of them, so a miss just means\n * that you aren't on the very start of a region.\n *\n * @param aNeedle The element you are looking for.\n * @param aHaystack The array that is being searched.\n * @param aCompare A function which takes the needle and an element in the\n *     array and returns -1, 0, or 1 depending on whether the needle is less\n *     than, equal to, or greater than the element, respectively.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n *     'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n */\nexports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n  if (aHaystack.length === 0) {\n    return -1;\n  }\n\n  var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n                              aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n  if (index < 0) {\n    return -1;\n  }\n\n  // We have found either the exact element, or the next-closest element than\n  // the one we are searching for. However, there may be more than one such\n  // element. Make sure we always return the smallest of these.\n  while (index - 1 >= 0) {\n    if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n      break;\n    }\n    --index;\n  }\n\n  return index;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/binary-search.js\n// module id = 8\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n// It turns out that some (most?) JavaScript engines don't self-host\n// `Array.prototype.sort`. This makes sense because C++ will likely remain\n// faster than JS when doing raw CPU-intensive sorting. However, when using a\n// custom comparator function, calling back and forth between the VM's C++ and\n// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n// worse generated code for the comparator function than would be optimal. In\n// fact, when sorting with a comparator, these costs outweigh the benefits of\n// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n// a ~3500ms mean speed-up in `bench/bench.html`.\n\n/**\n * Swap the elements indexed by `x` and `y` in the array `ary`.\n *\n * @param {Array} ary\n *        The array.\n * @param {Number} x\n *        The index of the first item.\n * @param {Number} y\n *        The index of the second item.\n */\nfunction swap(ary, x, y) {\n  var temp = ary[x];\n  ary[x] = ary[y];\n  ary[y] = temp;\n}\n\n/**\n * Returns a random integer within the range `low .. high` inclusive.\n *\n * @param {Number} low\n *        The lower bound on the range.\n * @param {Number} high\n *        The upper bound on the range.\n */\nfunction randomIntInRange(low, high) {\n  return Math.round(low + (Math.random() * (high - low)));\n}\n\n/**\n * The Quick Sort algorithm.\n *\n * @param {Array} ary\n *        An array to sort.\n * @param {function} comparator\n *        Function to use to compare two items.\n * @param {Number} p\n *        Start index of the array\n * @param {Number} r\n *        End index of the array\n */\nfunction doQuickSort(ary, comparator, p, r) {\n  // If our lower bound is less than our upper bound, we (1) partition the\n  // array into two pieces and (2) recurse on each half. If it is not, this is\n  // the empty array and our base case.\n\n  if (p < r) {\n    // (1) Partitioning.\n    //\n    // The partitioning chooses a pivot between `p` and `r` and moves all\n    // elements that are less than or equal to the pivot to the before it, and\n    // all the elements that are greater than it after it. The effect is that\n    // once partition is done, the pivot is in the exact place it will be when\n    // the array is put in sorted order, and it will not need to be moved\n    // again. This runs in O(n) time.\n\n    // Always choose a random pivot so that an input array which is reverse\n    // sorted does not cause O(n^2) running time.\n    var pivotIndex = randomIntInRange(p, r);\n    var i = p - 1;\n\n    swap(ary, pivotIndex, r);\n    var pivot = ary[r];\n\n    // Immediately after `j` is incremented in this loop, the following hold\n    // true:\n    //\n    //   * Every element in `ary[p .. i]` is less than or equal to the pivot.\n    //\n    //   * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n    for (var j = p; j < r; j++) {\n      if (comparator(ary[j], pivot) <= 0) {\n        i += 1;\n        swap(ary, i, j);\n      }\n    }\n\n    swap(ary, i + 1, j);\n    var q = i + 1;\n\n    // (2) Recurse on each half.\n\n    doQuickSort(ary, comparator, p, q - 1);\n    doQuickSort(ary, comparator, q + 1, r);\n  }\n}\n\n/**\n * Sort the given array in-place with the given comparator function.\n *\n * @param {Array} ary\n *        An array to sort.\n * @param {function} comparator\n *        Function to use to compare two items.\n */\nexports.quickSort = function (ary, comparator) {\n  doQuickSort(ary, comparator, 0, ary.length - 1);\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/quick-sort.js\n// module id = 9\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;\nvar util = require('./util');\n\n// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n// operating systems these days (capturing the result).\nvar REGEX_NEWLINE = /(\\r?\\n)/;\n\n// Newline character code for charCodeAt() comparisons\nvar NEWLINE_CODE = 10;\n\n// Private symbol for identifying `SourceNode`s when multiple versions of\n// the source-map library are loaded. This MUST NOT CHANGE across\n// versions!\nvar isSourceNode = \"$$$isSourceNode$$$\";\n\n/**\n * SourceNodes provide a way to abstract over interpolating/concatenating\n * snippets of generated JavaScript source code while maintaining the line and\n * column information associated with the original source code.\n *\n * @param aLine The original line number.\n * @param aColumn The original column number.\n * @param aSource The original source's filename.\n * @param aChunks Optional. An array of strings which are snippets of\n *        generated JS, or other SourceNodes.\n * @param aName The original identifier.\n */\nfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n  this.children = [];\n  this.sourceContents = {};\n  this.line = aLine == null ? null : aLine;\n  this.column = aColumn == null ? null : aColumn;\n  this.source = aSource == null ? null : aSource;\n  this.name = aName == null ? null : aName;\n  this[isSourceNode] = true;\n  if (aChunks != null) this.add(aChunks);\n}\n\n/**\n * Creates a SourceNode from generated code and a SourceMapConsumer.\n *\n * @param aGeneratedCode The generated code\n * @param aSourceMapConsumer The SourceMap for the generated code\n * @param aRelativePath Optional. The path that relative sources in the\n *        SourceMapConsumer should be relative to.\n */\nSourceNode.fromStringWithSourceMap =\n  function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n    // The SourceNode we want to fill with the generated code\n    // and the SourceMap\n    var node = new SourceNode();\n\n    // All even indices of this array are one line of the generated code,\n    // while all odd indices are the newlines between two adjacent lines\n    // (since `REGEX_NEWLINE` captures its match).\n    // Processed fragments are accessed by calling `shiftNextLine`.\n    var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n    var remainingLinesIndex = 0;\n    var shiftNextLine = function() {\n      var lineContents = getNextLine();\n      // The last line of a file might not have a newline.\n      var newLine = getNextLine() || \"\";\n      return lineContents + newLine;\n\n      function getNextLine() {\n        return remainingLinesIndex < remainingLines.length ?\n            remainingLines[remainingLinesIndex++] : undefined;\n      }\n    };\n\n    // We need to remember the position of \"remainingLines\"\n    var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\n    // The generate SourceNodes we need a code range.\n    // To extract it current and last mapping is used.\n    // Here we store the last mapping.\n    var lastMapping = null;\n\n    aSourceMapConsumer.eachMapping(function (mapping) {\n      if (lastMapping !== null) {\n        // We add the code from \"lastMapping\" to \"mapping\":\n        // First check if there is a new line in between.\n        if (lastGeneratedLine < mapping.generatedLine) {\n          // Associate first line with \"lastMapping\"\n          addMappingWithCode(lastMapping, shiftNextLine());\n          lastGeneratedLine++;\n          lastGeneratedColumn = 0;\n          // The remaining code is added without mapping\n        } else {\n          // There is no new line in between.\n          // Associate the code between \"lastGeneratedColumn\" and\n          // \"mapping.generatedColumn\" with \"lastMapping\"\n          var nextLine = remainingLines[remainingLinesIndex] || '';\n          var code = nextLine.substr(0, mapping.generatedColumn -\n                                        lastGeneratedColumn);\n          remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -\n                                              lastGeneratedColumn);\n          lastGeneratedColumn = mapping.generatedColumn;\n          addMappingWithCode(lastMapping, code);\n          // No more remaining code, continue\n          lastMapping = mapping;\n          return;\n        }\n      }\n      // We add the generated code until the first mapping\n      // to the SourceNode without any mapping.\n      // Each line is added as separate string.\n      while (lastGeneratedLine < mapping.generatedLine) {\n        node.add(shiftNextLine());\n        lastGeneratedLine++;\n      }\n      if (lastGeneratedColumn < mapping.generatedColumn) {\n        var nextLine = remainingLines[remainingLinesIndex] || '';\n        node.add(nextLine.substr(0, mapping.generatedColumn));\n        remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);\n        lastGeneratedColumn = mapping.generatedColumn;\n      }\n      lastMapping = mapping;\n    }, this);\n    // We have processed all mappings.\n    if (remainingLinesIndex < remainingLines.length) {\n      if (lastMapping) {\n        // Associate the remaining code in the current line with \"lastMapping\"\n        addMappingWithCode(lastMapping, shiftNextLine());\n      }\n      // and add the remaining lines without any mapping\n      node.add(remainingLines.splice(remainingLinesIndex).join(\"\"));\n    }\n\n    // Copy sourcesContent into SourceNode\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        if (aRelativePath != null) {\n          sourceFile = util.join(aRelativePath, sourceFile);\n        }\n        node.setSourceContent(sourceFile, content);\n      }\n    });\n\n    return node;\n\n    function addMappingWithCode(mapping, code) {\n      if (mapping === null || mapping.source === undefined) {\n        node.add(code);\n      } else {\n        var source = aRelativePath\n          ? util.join(aRelativePath, mapping.source)\n          : mapping.source;\n        node.add(new SourceNode(mapping.originalLine,\n                                mapping.originalColumn,\n                                source,\n                                code,\n                                mapping.name));\n      }\n    }\n  };\n\n/**\n * Add a chunk of generated JS to this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n *        SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.add = function SourceNode_add(aChunk) {\n  if (Array.isArray(aChunk)) {\n    aChunk.forEach(function (chunk) {\n      this.add(chunk);\n    }, this);\n  }\n  else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n    if (aChunk) {\n      this.children.push(aChunk);\n    }\n  }\n  else {\n    throw new TypeError(\n      \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n    );\n  }\n  return this;\n};\n\n/**\n * Add a chunk of generated JS to the beginning of this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n *        SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n  if (Array.isArray(aChunk)) {\n    for (var i = aChunk.length-1; i >= 0; i--) {\n      this.prepend(aChunk[i]);\n    }\n  }\n  else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n    this.children.unshift(aChunk);\n  }\n  else {\n    throw new TypeError(\n      \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n    );\n  }\n  return this;\n};\n\n/**\n * Walk over the tree of JS snippets in this node and its children. The\n * walking function is called once for each snippet of JS and is passed that\n * snippet and the its original associated source's line/column location.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n  var chunk;\n  for (var i = 0, len = this.children.length; i < len; i++) {\n    chunk = this.children[i];\n    if (chunk[isSourceNode]) {\n      chunk.walk(aFn);\n    }\n    else {\n      if (chunk !== '') {\n        aFn(chunk, { source: this.source,\n                     line: this.line,\n                     column: this.column,\n                     name: this.name });\n      }\n    }\n  }\n};\n\n/**\n * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n * each of `this.children`.\n *\n * @param aSep The separator.\n */\nSourceNode.prototype.join = function SourceNode_join(aSep) {\n  var newChildren;\n  var i;\n  var len = this.children.length;\n  if (len > 0) {\n    newChildren = [];\n    for (i = 0; i < len-1; i++) {\n      newChildren.push(this.children[i]);\n      newChildren.push(aSep);\n    }\n    newChildren.push(this.children[i]);\n    this.children = newChildren;\n  }\n  return this;\n};\n\n/**\n * Call String.prototype.replace on the very right-most source snippet. Useful\n * for trimming whitespace from the end of a source node, etc.\n *\n * @param aPattern The pattern to replace.\n * @param aReplacement The thing to replace the pattern with.\n */\nSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n  var lastChild = this.children[this.children.length - 1];\n  if (lastChild[isSourceNode]) {\n    lastChild.replaceRight(aPattern, aReplacement);\n  }\n  else if (typeof lastChild === 'string') {\n    this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n  }\n  else {\n    this.children.push(''.replace(aPattern, aReplacement));\n  }\n  return this;\n};\n\n/**\n * Set the source content for a source file. This will be added to the SourceMapGenerator\n * in the sourcesContent field.\n *\n * @param aSourceFile The filename of the source file\n * @param aSourceContent The content of the source file\n */\nSourceNode.prototype.setSourceContent =\n  function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n    this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n  };\n\n/**\n * Walk over the tree of SourceNodes. The walking function is called for each\n * source file content and is passed the filename and source content.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walkSourceContents =\n  function SourceNode_walkSourceContents(aFn) {\n    for (var i = 0, len = this.children.length; i < len; i++) {\n      if (this.children[i][isSourceNode]) {\n        this.children[i].walkSourceContents(aFn);\n      }\n    }\n\n    var sources = Object.keys(this.sourceContents);\n    for (var i = 0, len = sources.length; i < len; i++) {\n      aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n    }\n  };\n\n/**\n * Return the string representation of this source node. Walks over the tree\n * and concatenates all the various snippets together to one string.\n */\nSourceNode.prototype.toString = function SourceNode_toString() {\n  var str = \"\";\n  this.walk(function (chunk) {\n    str += chunk;\n  });\n  return str;\n};\n\n/**\n * Returns the string representation of this source node along with a source\n * map.\n */\nSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n  var generated = {\n    code: \"\",\n    line: 1,\n    column: 0\n  };\n  var map = new SourceMapGenerator(aArgs);\n  var sourceMappingActive = false;\n  var lastOriginalSource = null;\n  var lastOriginalLine = null;\n  var lastOriginalColumn = null;\n  var lastOriginalName = null;\n  this.walk(function (chunk, original) {\n    generated.code += chunk;\n    if (original.source !== null\n        && original.line !== null\n        && original.column !== null) {\n      if(lastOriginalSource !== original.source\n         || lastOriginalLine !== original.line\n         || lastOriginalColumn !== original.column\n         || lastOriginalName !== original.name) {\n        map.addMapping({\n          source: original.source,\n          original: {\n            line: original.line,\n            column: original.column\n          },\n          generated: {\n            line: generated.line,\n            column: generated.column\n          },\n          name: original.name\n        });\n      }\n      lastOriginalSource = original.source;\n      lastOriginalLine = original.line;\n      lastOriginalColumn = original.column;\n      lastOriginalName = original.name;\n      sourceMappingActive = true;\n    } else if (sourceMappingActive) {\n      map.addMapping({\n        generated: {\n          line: generated.line,\n          column: generated.column\n        }\n      });\n      lastOriginalSource = null;\n      sourceMappingActive = false;\n    }\n    for (var idx = 0, length = chunk.length; idx < length; idx++) {\n      if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n        generated.line++;\n        generated.column = 0;\n        // Mappings end at eol\n        if (idx + 1 === length) {\n          lastOriginalSource = null;\n          sourceMappingActive = false;\n        } else if (sourceMappingActive) {\n          map.addMapping({\n            source: original.source,\n            original: {\n              line: original.line,\n              column: original.column\n            },\n            generated: {\n              line: generated.line,\n              column: generated.column\n            },\n            name: original.name\n          });\n        }\n      } else {\n        generated.column++;\n      }\n    }\n  });\n  this.walkSourceContents(function (sourceFile, sourceContent) {\n    map.setSourceContent(sourceFile, sourceContent);\n  });\n\n  return { code: generated.code, map: map };\n};\n\nexports.SourceNode = SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-node.js\n// module id = 10\n// module chunks = 0"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/source-map/dist/source-map.js b/node_modules/source-map/dist/source-map.js new file mode 100644 index 0000000..b4eb087 --- /dev/null +++ b/node_modules/source-map/dist/source-map.js @@ -0,0 +1,3233 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["sourceMap"] = factory(); + else + root["sourceMap"] = factory(); +})(this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; + +/******/ // The require function +/******/ function __webpack_require__(moduleId) { + +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) +/******/ return installedModules[moduleId].exports; + +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ exports: {}, +/******/ id: moduleId, +/******/ loaded: false +/******/ }; + +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); + +/******/ // Flag the module as loaded +/******/ module.loaded = true; + +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } + + +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; + +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; + +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; + +/******/ // Load entry module and return exports +/******/ return __webpack_require__(0); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, exports, __webpack_require__) { + + /* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ + exports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + exports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer; + exports.SourceNode = __webpack_require__(10).SourceNode; + + +/***/ }), +/* 1 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var base64VLQ = __webpack_require__(2); + var util = __webpack_require__(4); + var ArraySet = __webpack_require__(5).ArraySet; + var MappingList = __webpack_require__(6).MappingList; + + /** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ + function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; + } + + SourceMapGenerator.prototype._version = 3; + + /** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ + SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var sourceRelative = sourceFile; + if (sourceRoot !== null) { + sourceRelative = util.relative(sourceRoot, sourceFile); + } + + if (!generator._sources.has(sourceRelative)) { + generator._sources.add(sourceRelative); + } + + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + + /** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ + SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + + /** + * Set the source content for a source file. + */ + SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + + /** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ + SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + + /** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ + SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + + /** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ + SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + + SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + + /** + * Externalize the source map. + */ + SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + + /** + * Render the source map being generated to a string. + */ + SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + + exports.SourceMapGenerator = SourceMapGenerator; + + +/***/ }), +/* 2 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + var base64 = __webpack_require__(3); + + // A single base 64 digit can contain 6 bits of data. For the base 64 variable + // length quantities we use in the source map spec, the first bit is the sign, + // the next four bits are the actual value, and the 6th bit is the + // continuation bit. The continuation bit tells us whether there are more + // digits in this value following this digit. + // + // Continuation + // | Sign + // | | + // V V + // 101011 + + var VLQ_BASE_SHIFT = 5; + + // binary: 100000 + var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + + // binary: 011111 + var VLQ_BASE_MASK = VLQ_BASE - 1; + + // binary: 100000 + var VLQ_CONTINUATION_BIT = VLQ_BASE; + + /** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ + function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; + } + + /** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ + function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; + } + + /** + * Returns the base 64 VLQ encoded value. + */ + exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; + }; + + /** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ + exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; + }; + + +/***/ }), +/* 3 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + + /** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ + exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); + }; + + /** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ + exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; + }; + + +/***/ }), +/* 4 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + /** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ + function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } + } + exports.getArg = getArg; + + var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; + var dataUrlRegexp = /^data:.+\,.+$/; + + function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; + } + exports.urlParse = urlParse; + + function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; + } + exports.urlGenerate = urlGenerate; + + /** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ + function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; + } + exports.normalize = normalize; + + /** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ + function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; + } + exports.join = join; + + exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || urlRegexp.test(aPath); + }; + + /** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ + function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); + } + exports.relative = relative; + + var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); + }()); + + function identity (s) { + return s; + } + + /** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ + function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; + } + exports.toSetString = supportsNullProto ? identity : toSetString; + + function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; + } + exports.fromSetString = supportsNullProto ? identity : fromSetString; + + function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; + } + + /** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ + function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByOriginalPositions = compareByOriginalPositions; + + /** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ + function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + + function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 === null) { + return 1; // aStr2 !== null + } + + if (aStr2 === null) { + return -1; // aStr1 !== null + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; + } + + /** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ + function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + + /** + * Strip any JSON XSSI avoidance prefix from the string (as documented + * in the source maps specification), and then parse the string as + * JSON. + */ + function parseSourceMapInput(str) { + return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, '')); + } + exports.parseSourceMapInput = parseSourceMapInput; + + /** + * Compute the URL of a source given the the source root, the source's + * URL, and the source map's URL. + */ + function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { + sourceURL = sourceURL || ''; + + if (sourceRoot) { + // This follows what Chrome does. + if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') { + sourceRoot += '/'; + } + // The spec says: + // Line 4: An optional source root, useful for relocating source + // files on a server or removing repeated values in the + // “sources” entry. This value is prepended to the individual + // entries in the “source” field. + sourceURL = sourceRoot + sourceURL; + } + + // Historically, SourceMapConsumer did not take the sourceMapURL as + // a parameter. This mode is still somewhat supported, which is why + // this code block is conditional. However, it's preferable to pass + // the source map URL to SourceMapConsumer, so that this function + // can implement the source URL resolution algorithm as outlined in + // the spec. This block is basically the equivalent of: + // new URL(sourceURL, sourceMapURL).toString() + // ... except it avoids using URL, which wasn't available in the + // older releases of node still supported by this library. + // + // The spec says: + // If the sources are not absolute URLs after prepending of the + // “sourceRoot”, the sources are resolved relative to the + // SourceMap (like resolving script src in a html document). + if (sourceMapURL) { + var parsed = urlParse(sourceMapURL); + if (!parsed) { + throw new Error("sourceMapURL could not be parsed"); + } + if (parsed.path) { + // Strip the last path component, but keep the "/". + var index = parsed.path.lastIndexOf('/'); + if (index >= 0) { + parsed.path = parsed.path.substring(0, index + 1); + } + } + sourceURL = join(urlGenerate(parsed), sourceURL); + } + + return normalize(sourceURL); + } + exports.computeSourceURL = computeSourceURL; + + +/***/ }), +/* 5 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var has = Object.prototype.hasOwnProperty; + var hasNativeMap = typeof Map !== "undefined"; + + /** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ + function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); + } + + /** + * Static method for creating ArraySet instances from an existing array. + */ + ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; + }; + + /** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ + ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; + }; + + /** + * Add the given string to this set. + * + * @param String aStr + */ + ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } + }; + + /** + * Is the given string a member of this set? + * + * @param String aStr + */ + ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } + }; + + /** + * What is the index of the given string in the array? + * + * @param String aStr + */ + ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); + }; + + /** + * What is the element at the given index? + * + * @param Number aIdx + */ + ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); + }; + + /** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ + ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); + }; + + exports.ArraySet = ArraySet; + + +/***/ }), +/* 6 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + + /** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ + function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; + } + + /** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ + function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + }; + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; + }; + + exports.MappingList = MappingList; + + +/***/ }), +/* 7 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var binarySearch = __webpack_require__(8); + var ArraySet = __webpack_require__(5).ArraySet; + var base64VLQ = __webpack_require__(2); + var quickSort = __webpack_require__(9).quickSort; + + function SourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) + : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); + } + + SourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); + } + + /** + * The version of the source mapping spec that we are consuming. + */ + SourceMapConsumer.prototype._version = 3; + + // `__generatedMappings` and `__originalMappings` are arrays that hold the + // parsed mapping coordinates from the source map's "mappings" attribute. They + // are lazily instantiated, accessed via the `_generatedMappings` and + // `_originalMappings` getters respectively, and we only parse the mappings + // and create these arrays once queried for a source location. We jump through + // these hoops because there can be many thousands of mappings, and parsing + // them is expensive, so we only want to do it if we must. + // + // Each object in the arrays is of the form: + // + // { + // generatedLine: The line number in the generated code, + // generatedColumn: The column number in the generated code, + // source: The path to the original source file that generated this + // chunk of code, + // originalLine: The line number in the original source that + // corresponds to this chunk of generated code, + // originalColumn: The column number in the original source that + // corresponds to this chunk of generated code, + // name: The name of the original symbol which generated this chunk of + // code. + // } + // + // All properties except for `generatedLine` and `generatedColumn` can be + // `null`. + // + // `_generatedMappings` is ordered by the generated positions. + // + // `_originalMappings` is ordered by the original positions. + + SourceMapConsumer.prototype.__generatedMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } + }); + + SourceMapConsumer.prototype.__originalMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } + }); + + SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + + SourceMapConsumer.GENERATED_ORDER = 1; + SourceMapConsumer.ORIGINAL_ORDER = 2; + + SourceMapConsumer.GREATEST_LOWER_BOUND = 1; + SourceMapConsumer.LEAST_UPPER_BOUND = 2; + + /** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ + SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL); + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + + /** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number is 1-based. + * - column: Optional. the column number in the original source. + * The column number is 0-based. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + needle.source = this._findSourceIndex(needle.source); + if (needle.source < 0) { + return []; + } + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + + exports.SourceMapConsumer = SourceMapConsumer; + + /** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The first parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ + function BasicSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + if (sourceRoot) { + sourceRoot = util.normalize(sourceRoot); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this._absoluteSources = this._sources.toArray().map(function (s) { + return util.computeSourceURL(sourceRoot, s, aSourceMapURL); + }); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this._sourceMapURL = aSourceMapURL; + this.file = file; + } + + BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + + /** + * Utility function to find the index of a source. Returns -1 if not + * found. + */ + BasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) { + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + if (this._sources.has(relativeSource)) { + return this._sources.indexOf(relativeSource); + } + + // Maybe aSource is an absolute URL as returned by |sources|. In + // this case we can't simply undo the transform. + var i; + for (i = 0; i < this._absoluteSources.length; ++i) { + if (this._absoluteSources[i] == aSource) { + return i; + } + } + + return -1; + }; + + /** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @param String aSourceMapURL + * The URL at which the source map can be found (optional) + * @returns BasicSourceMapConsumer + */ + BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + smc._sourceMapURL = aSourceMapURL; + smc._absoluteSources = smc._sources.toArray().map(function (s) { + return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL); + }); + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + + /** + * The version of the source mapping spec that we are consuming. + */ + BasicSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._absoluteSources.slice(); + } + }); + + /** + * Provide the JIT with a nice shape / hidden class. + */ + function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + + /** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ + BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ + BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + var index = this._findSourceIndex(aSource); + if (index >= 0) { + return this.sourcesContent[index]; + } + + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + relativeSource)) { + return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + relativeSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + source = this._findSourceIndex(source); + if (source < 0) { + return { + line: null, + column: null, + lastColumn: null + }; + } + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + + exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + + /** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The first parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ + function IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL) + } + }); + } + + IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + + /** + * The version of the source mapping spec that we are consuming. + */ + IndexedSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } + }); + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ + IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL); + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = null; + if (mapping.name) { + name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + } + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + + exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + + +/***/ }), +/* 8 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + exports.GREATEST_LOWER_BOUND = 1; + exports.LEAST_UPPER_BOUND = 2; + + /** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ + function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } + } + + /** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ + exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; + }; + + +/***/ }), +/* 9 */ +/***/ (function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + // It turns out that some (most?) JavaScript engines don't self-host + // `Array.prototype.sort`. This makes sense because C++ will likely remain + // faster than JS when doing raw CPU-intensive sorting. However, when using a + // custom comparator function, calling back and forth between the VM's C++ and + // JIT'd JS is rather slow *and* loses JIT type information, resulting in + // worse generated code for the comparator function than would be optimal. In + // fact, when sorting with a comparator, these costs outweigh the benefits of + // sorting in C++. By using our own JS-implemented Quick Sort (below), we get + // a ~3500ms mean speed-up in `bench/bench.html`. + + /** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ + function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; + } + + /** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ + function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); + } + + /** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ + function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } + } + + /** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); + }; + + +/***/ }), +/* 10 */ +/***/ (function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + var util = __webpack_require__(4); + + // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other + // operating systems these days (capturing the result). + var REGEX_NEWLINE = /(\r?\n)/; + + // Newline character code for charCodeAt() comparisons + var NEWLINE_CODE = 10; + + // Private symbol for identifying `SourceNode`s when multiple versions of + // the source-map library are loaded. This MUST NOT CHANGE across + // versions! + var isSourceNode = "$$$isSourceNode$$$"; + + /** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ + function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); + } + + /** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ + SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex] || ''; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex] || ''; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + + /** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } + }; + + /** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ + SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; + }; + + /** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ + SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; + }; + + /** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ + SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + + /** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + + /** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ + SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; + }; + + /** + * Returns the string representation of this source node along with a source + * map. + */ + SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; + }; + + exports.SourceNode = SourceNode; + + +/***/ }) +/******/ ]) +}); +; \ No newline at end of file diff --git a/node_modules/source-map/dist/source-map.min.js b/node_modules/source-map/dist/source-map.min.js new file mode 100644 index 0000000..c7c72da --- /dev/null +++ b/node_modules/source-map/dist/source-map.min.js @@ -0,0 +1,2 @@ +!function(e,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define([],n):"object"==typeof exports?exports.sourceMap=n():e.sourceMap=n()}(this,function(){return function(e){function n(t){if(r[t])return r[t].exports;var o=r[t]={exports:{},id:t,loaded:!1};return e[t].call(o.exports,o,o.exports,n),o.loaded=!0,o.exports}var r={};return n.m=e,n.c=r,n.p="",n(0)}([function(e,n,r){n.SourceMapGenerator=r(1).SourceMapGenerator,n.SourceMapConsumer=r(7).SourceMapConsumer,n.SourceNode=r(10).SourceNode},function(e,n,r){function t(e){e||(e={}),this._file=i.getArg(e,"file",null),this._sourceRoot=i.getArg(e,"sourceRoot",null),this._skipValidation=i.getArg(e,"skipValidation",!1),this._sources=new s,this._names=new s,this._mappings=new a,this._sourcesContents=null}var o=r(2),i=r(4),s=r(5).ArraySet,a=r(6).MappingList;t.prototype._version=3,t.fromSourceMap=function(e){var n=e.sourceRoot,r=new t({file:e.file,sourceRoot:n});return e.eachMapping(function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};null!=e.source&&(t.source=e.source,null!=n&&(t.source=i.relative(n,t.source)),t.original={line:e.originalLine,column:e.originalColumn},null!=e.name&&(t.name=e.name)),r.addMapping(t)}),e.sources.forEach(function(t){var o=t;null!==n&&(o=i.relative(n,t)),r._sources.has(o)||r._sources.add(o);var s=e.sourceContentFor(t);null!=s&&r.setSourceContent(t,s)}),r},t.prototype.addMapping=function(e){var n=i.getArg(e,"generated"),r=i.getArg(e,"original",null),t=i.getArg(e,"source",null),o=i.getArg(e,"name",null);this._skipValidation||this._validateMapping(n,r,t,o),null!=t&&(t=String(t),this._sources.has(t)||this._sources.add(t)),null!=o&&(o=String(o),this._names.has(o)||this._names.add(o)),this._mappings.add({generatedLine:n.line,generatedColumn:n.column,originalLine:null!=r&&r.line,originalColumn:null!=r&&r.column,source:t,name:o})},t.prototype.setSourceContent=function(e,n){var r=e;null!=this._sourceRoot&&(r=i.relative(this._sourceRoot,r)),null!=n?(this._sourcesContents||(this._sourcesContents=Object.create(null)),this._sourcesContents[i.toSetString(r)]=n):this._sourcesContents&&(delete this._sourcesContents[i.toSetString(r)],0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))},t.prototype.applySourceMap=function(e,n,r){var t=n;if(null==n){if(null==e.file)throw new Error('SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, or the source map\'s "file" property. Both were omitted.');t=e.file}var o=this._sourceRoot;null!=o&&(t=i.relative(o,t));var a=new s,u=new s;this._mappings.unsortedForEach(function(n){if(n.source===t&&null!=n.originalLine){var s=e.originalPositionFor({line:n.originalLine,column:n.originalColumn});null!=s.source&&(n.source=s.source,null!=r&&(n.source=i.join(r,n.source)),null!=o&&(n.source=i.relative(o,n.source)),n.originalLine=s.line,n.originalColumn=s.column,null!=s.name&&(n.name=s.name))}var l=n.source;null==l||a.has(l)||a.add(l);var c=n.name;null==c||u.has(c)||u.add(c)},this),this._sources=a,this._names=u,e.sources.forEach(function(n){var t=e.sourceContentFor(n);null!=t&&(null!=r&&(n=i.join(r,n)),null!=o&&(n=i.relative(o,n)),this.setSourceContent(n,t))},this)},t.prototype._validateMapping=function(e,n,r,t){if(n&&"number"!=typeof n.line&&"number"!=typeof n.column)throw new Error("original.line and original.column are not numbers -- you probably meant to omit the original mapping entirely and only map the generated position. If so, pass null for the original mapping instead of an object with empty or null values.");if((!(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0)||n||r||t)&&!(e&&"line"in e&&"column"in e&&n&&"line"in n&&"column"in n&&e.line>0&&e.column>=0&&n.line>0&&n.column>=0&&r))throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:r,original:n,name:t}))},t.prototype._serializeMappings=function(){for(var e,n,r,t,s=0,a=1,u=0,l=0,c=0,g=0,p="",h=this._mappings.toArray(),f=0,d=h.length;f0){if(!i.compareByGeneratedPositionsInflated(n,h[f-1]))continue;e+=","}e+=o.encode(n.generatedColumn-s),s=n.generatedColumn,null!=n.source&&(t=this._sources.indexOf(n.source),e+=o.encode(t-g),g=t,e+=o.encode(n.originalLine-1-l),l=n.originalLine-1,e+=o.encode(n.originalColumn-u),u=n.originalColumn,null!=n.name&&(r=this._names.indexOf(n.name),e+=o.encode(r-c),c=r)),p+=e}return p},t.prototype._generateSourcesContent=function(e,n){return e.map(function(e){if(!this._sourcesContents)return null;null!=n&&(e=i.relative(n,e));var r=i.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,r)?this._sourcesContents[r]:null},this)},t.prototype.toJSON=function(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};return null!=this._file&&(e.file=this._file),null!=this._sourceRoot&&(e.sourceRoot=this._sourceRoot),this._sourcesContents&&(e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)),e},t.prototype.toString=function(){return JSON.stringify(this.toJSON())},n.SourceMapGenerator=t},function(e,n,r){function t(e){return e<0?(-e<<1)+1:(e<<1)+0}function o(e){var n=1===(1&e),r=e>>1;return n?-r:r}var i=r(3),s=5,a=1<>>=s,o>0&&(n|=l),r+=i.encode(n);while(o>0);return r},n.decode=function(e,n,r){var t,a,c=e.length,g=0,p=0;do{if(n>=c)throw new Error("Expected more digits in base 64 VLQ value.");if(a=i.decode(e.charCodeAt(n++)),a===-1)throw new Error("Invalid base64 digit: "+e.charAt(n-1));t=!!(a&l),a&=u,g+=a<=0;c--)s=u[c],"."===s?u.splice(c,1):".."===s?l++:l>0&&(""===s?(u.splice(c+1,l),l=0):(u.splice(c,2),l--));return r=u.join("/"),""===r&&(r=a?"/":"."),i?(i.path=r,o(i)):r}function s(e,n){""===e&&(e="."),""===n&&(n=".");var r=t(n),s=t(e);if(s&&(e=s.path||"/"),r&&!r.scheme)return s&&(r.scheme=s.scheme),o(r);if(r||n.match(y))return n;if(s&&!s.host&&!s.path)return s.host=n,o(s);var a="/"===n.charAt(0)?n:i(e.replace(/\/+$/,"")+"/"+n);return s?(s.path=a,o(s)):a}function a(e,n){""===e&&(e="."),e=e.replace(/\/$/,"");for(var r=0;0!==n.indexOf(e+"/");){var t=e.lastIndexOf("/");if(t<0)return n;if(e=e.slice(0,t),e.match(/^([^\/]+:\/)?\/*$/))return n;++r}return Array(r+1).join("../")+n.substr(e.length+1)}function u(e){return e}function l(e){return g(e)?"$"+e:e}function c(e){return g(e)?e.slice(1):e}function g(e){if(!e)return!1;var n=e.length;if(n<9)return!1;if(95!==e.charCodeAt(n-1)||95!==e.charCodeAt(n-2)||111!==e.charCodeAt(n-3)||116!==e.charCodeAt(n-4)||111!==e.charCodeAt(n-5)||114!==e.charCodeAt(n-6)||112!==e.charCodeAt(n-7)||95!==e.charCodeAt(n-8)||95!==e.charCodeAt(n-9))return!1;for(var r=n-10;r>=0;r--)if(36!==e.charCodeAt(r))return!1;return!0}function p(e,n,r){var t=f(e.source,n.source);return 0!==t?t:(t=e.originalLine-n.originalLine,0!==t?t:(t=e.originalColumn-n.originalColumn,0!==t||r?t:(t=e.generatedColumn-n.generatedColumn,0!==t?t:(t=e.generatedLine-n.generatedLine,0!==t?t:f(e.name,n.name)))))}function h(e,n,r){var t=e.generatedLine-n.generatedLine;return 0!==t?t:(t=e.generatedColumn-n.generatedColumn,0!==t||r?t:(t=f(e.source,n.source),0!==t?t:(t=e.originalLine-n.originalLine,0!==t?t:(t=e.originalColumn-n.originalColumn,0!==t?t:f(e.name,n.name)))))}function f(e,n){return e===n?0:null===e?1:null===n?-1:e>n?1:-1}function d(e,n){var r=e.generatedLine-n.generatedLine;return 0!==r?r:(r=e.generatedColumn-n.generatedColumn,0!==r?r:(r=f(e.source,n.source),0!==r?r:(r=e.originalLine-n.originalLine,0!==r?r:(r=e.originalColumn-n.originalColumn,0!==r?r:f(e.name,n.name)))))}function m(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}function _(e,n,r){if(n=n||"",e&&("/"!==e[e.length-1]&&"/"!==n[0]&&(e+="/"),n=e+n),r){var a=t(r);if(!a)throw new Error("sourceMapURL could not be parsed");if(a.path){var u=a.path.lastIndexOf("/");u>=0&&(a.path=a.path.substring(0,u+1))}n=s(o(a),n)}return i(n)}n.getArg=r;var v=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/,y=/^data:.+\,.+$/;n.urlParse=t,n.urlGenerate=o,n.normalize=i,n.join=s,n.isAbsolute=function(e){return"/"===e.charAt(0)||v.test(e)},n.relative=a;var C=function(){var e=Object.create(null);return!("__proto__"in e)}();n.toSetString=C?u:l,n.fromSetString=C?u:c,n.compareByOriginalPositions=p,n.compareByGeneratedPositionsDeflated=h,n.compareByGeneratedPositionsInflated=d,n.parseSourceMapInput=m,n.computeSourceURL=_},function(e,n,r){function t(){this._array=[],this._set=s?new Map:Object.create(null)}var o=r(4),i=Object.prototype.hasOwnProperty,s="undefined"!=typeof Map;t.fromArray=function(e,n){for(var r=new t,o=0,i=e.length;o=0)return n}else{var r=o.toSetString(e);if(i.call(this._set,r))return this._set[r]}throw new Error('"'+e+'" is not in the set.')},t.prototype.at=function(e){if(e>=0&&er||t==r&&s>=o||i.compareByGeneratedPositionsInflated(e,n)<=0}function o(){this._array=[],this._sorted=!0,this._last={generatedLine:-1,generatedColumn:0}}var i=r(4);o.prototype.unsortedForEach=function(e,n){this._array.forEach(e,n)},o.prototype.add=function(e){t(this._last,e)?(this._last=e,this._array.push(e)):(this._sorted=!1,this._array.push(e))},o.prototype.toArray=function(){return this._sorted||(this._array.sort(i.compareByGeneratedPositionsInflated),this._sorted=!0),this._array},n.MappingList=o},function(e,n,r){function t(e,n){var r=e;return"string"==typeof e&&(r=a.parseSourceMapInput(e)),null!=r.sections?new s(r,n):new o(r,n)}function o(e,n){var r=e;"string"==typeof e&&(r=a.parseSourceMapInput(e));var t=a.getArg(r,"version"),o=a.getArg(r,"sources"),i=a.getArg(r,"names",[]),s=a.getArg(r,"sourceRoot",null),u=a.getArg(r,"sourcesContent",null),c=a.getArg(r,"mappings"),g=a.getArg(r,"file",null);if(t!=this._version)throw new Error("Unsupported version: "+t);s&&(s=a.normalize(s)),o=o.map(String).map(a.normalize).map(function(e){return s&&a.isAbsolute(s)&&a.isAbsolute(e)?a.relative(s,e):e}),this._names=l.fromArray(i.map(String),!0),this._sources=l.fromArray(o,!0),this._absoluteSources=this._sources.toArray().map(function(e){return a.computeSourceURL(s,e,n)}),this.sourceRoot=s,this.sourcesContent=u,this._mappings=c,this._sourceMapURL=n,this.file=g}function i(){this.generatedLine=0,this.generatedColumn=0,this.source=null,this.originalLine=null,this.originalColumn=null,this.name=null}function s(e,n){var r=e;"string"==typeof e&&(r=a.parseSourceMapInput(e));var o=a.getArg(r,"version"),i=a.getArg(r,"sections");if(o!=this._version)throw new Error("Unsupported version: "+o);this._sources=new l,this._names=new l;var s={line:-1,column:0};this._sections=i.map(function(e){if(e.url)throw new Error("Support for url field in sections not implemented.");var r=a.getArg(e,"offset"),o=a.getArg(r,"line"),i=a.getArg(r,"column");if(o=0){var i=this._originalMappings[o];if(void 0===e.column)for(var s=i.originalLine;i&&i.originalLine===s;)t.push({line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}),i=this._originalMappings[++o];else for(var l=i.originalColumn;i&&i.originalLine===n&&i.originalColumn==l;)t.push({line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}),i=this._originalMappings[++o]}return t},n.SourceMapConsumer=t,o.prototype=Object.create(t.prototype),o.prototype.consumer=t,o.prototype._findSourceIndex=function(e){var n=e;if(null!=this.sourceRoot&&(n=a.relative(this.sourceRoot,n)),this._sources.has(n))return this._sources.indexOf(n);var r;for(r=0;r1&&(r.source=d+o[1],d+=o[1],r.originalLine=h+o[2],h=r.originalLine,r.originalLine+=1,r.originalColumn=f+o[3],f=r.originalColumn,o.length>4&&(r.name=m+o[4],m+=o[4])),A.push(r),"number"==typeof r.originalLine&&S.push(r)}g(A,a.compareByGeneratedPositionsDeflated),this.__generatedMappings=A,g(S,a.compareByOriginalPositions),this.__originalMappings=S},o.prototype._findMapping=function(e,n,r,t,o,i){if(e[r]<=0)throw new TypeError("Line must be greater than or equal to 1, got "+e[r]);if(e[t]<0)throw new TypeError("Column must be greater than or equal to 0, got "+e[t]);return u.search(e,n,o,i)},o.prototype.computeColumnSpans=function(){for(var e=0;e=0){var o=this._generatedMappings[r];if(o.generatedLine===n.generatedLine){var i=a.getArg(o,"source",null);null!==i&&(i=this._sources.at(i),i=a.computeSourceURL(this.sourceRoot,i,this._sourceMapURL));var s=a.getArg(o,"name",null);return null!==s&&(s=this._names.at(s)),{source:i,line:a.getArg(o,"originalLine",null),column:a.getArg(o,"originalColumn",null),name:s}}}return{source:null,line:null,column:null,name:null}},o.prototype.hasContentsOfAllSources=function(){return!!this.sourcesContent&&(this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some(function(e){return null==e}))},o.prototype.sourceContentFor=function(e,n){if(!this.sourcesContent)return null;var r=this._findSourceIndex(e);if(r>=0)return this.sourcesContent[r];var t=e;null!=this.sourceRoot&&(t=a.relative(this.sourceRoot,t));var o;if(null!=this.sourceRoot&&(o=a.urlParse(this.sourceRoot))){var i=t.replace(/^file:\/\//,"");if("file"==o.scheme&&this._sources.has(i))return this.sourcesContent[this._sources.indexOf(i)];if((!o.path||"/"==o.path)&&this._sources.has("/"+t))return this.sourcesContent[this._sources.indexOf("/"+t)]}if(n)return null;throw new Error('"'+t+'" is not in the SourceMap.')},o.prototype.generatedPositionFor=function(e){var n=a.getArg(e,"source");if(n=this._findSourceIndex(n),n<0)return{line:null,column:null,lastColumn:null};var r={source:n,originalLine:a.getArg(e,"line"),originalColumn:a.getArg(e,"column")},o=this._findMapping(r,this._originalMappings,"originalLine","originalColumn",a.compareByOriginalPositions,a.getArg(e,"bias",t.GREATEST_LOWER_BOUND));if(o>=0){var i=this._originalMappings[o];if(i.source===r.source)return{line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}}return{line:null,column:null,lastColumn:null}},n.BasicSourceMapConsumer=o,s.prototype=Object.create(t.prototype),s.prototype.constructor=t,s.prototype._version=3,Object.defineProperty(s.prototype,"sources",{get:function(){for(var e=[],n=0;n0?t-u>1?r(u,t,o,i,s,a):a==n.LEAST_UPPER_BOUND?t1?r(e,u,o,i,s,a):a==n.LEAST_UPPER_BOUND?u:e<0?-1:e}n.GREATEST_LOWER_BOUND=1,n.LEAST_UPPER_BOUND=2,n.search=function(e,t,o,i){if(0===t.length)return-1;var s=r(-1,t.length,e,t,o,i||n.GREATEST_LOWER_BOUND);if(s<0)return-1;for(;s-1>=0&&0===o(t[s],t[s-1],!0);)--s;return s}},function(e,n){function r(e,n,r){var t=e[n];e[n]=e[r],e[r]=t}function t(e,n){return Math.round(e+Math.random()*(n-e))}function o(e,n,i,s){if(i=0;n--)this.prepend(e[n]);else{if(!e[u]&&"string"!=typeof e)throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e);this.children.unshift(e)}return this},t.prototype.walk=function(e){for(var n,r=0,t=this.children.length;r0){for(n=[],r=0;r 0 && aGenerated.column >= 0\n\t && !aOriginal && !aSource && !aName) {\n\t // Case 1.\n\t return;\n\t }\n\t else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n\t && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n\t && aGenerated.line > 0 && aGenerated.column >= 0\n\t && aOriginal.line > 0 && aOriginal.column >= 0\n\t && aSource) {\n\t // Cases 2 and 3.\n\t return;\n\t }\n\t else {\n\t throw new Error('Invalid mapping: ' + JSON.stringify({\n\t generated: aGenerated,\n\t source: aSource,\n\t original: aOriginal,\n\t name: aName\n\t }));\n\t }\n\t };\n\t\n\t/**\n\t * Serialize the accumulated mappings in to the stream of base 64 VLQs\n\t * specified by the source map format.\n\t */\n\tSourceMapGenerator.prototype._serializeMappings =\n\t function SourceMapGenerator_serializeMappings() {\n\t var previousGeneratedColumn = 0;\n\t var previousGeneratedLine = 1;\n\t var previousOriginalColumn = 0;\n\t var previousOriginalLine = 0;\n\t var previousName = 0;\n\t var previousSource = 0;\n\t var result = '';\n\t var next;\n\t var mapping;\n\t var nameIdx;\n\t var sourceIdx;\n\t\n\t var mappings = this._mappings.toArray();\n\t for (var i = 0, len = mappings.length; i < len; i++) {\n\t mapping = mappings[i];\n\t next = ''\n\t\n\t if (mapping.generatedLine !== previousGeneratedLine) {\n\t previousGeneratedColumn = 0;\n\t while (mapping.generatedLine !== previousGeneratedLine) {\n\t next += ';';\n\t previousGeneratedLine++;\n\t }\n\t }\n\t else {\n\t if (i > 0) {\n\t if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n\t continue;\n\t }\n\t next += ',';\n\t }\n\t }\n\t\n\t next += base64VLQ.encode(mapping.generatedColumn\n\t - previousGeneratedColumn);\n\t previousGeneratedColumn = mapping.generatedColumn;\n\t\n\t if (mapping.source != null) {\n\t sourceIdx = this._sources.indexOf(mapping.source);\n\t next += base64VLQ.encode(sourceIdx - previousSource);\n\t previousSource = sourceIdx;\n\t\n\t // lines are stored 0-based in SourceMap spec version 3\n\t next += base64VLQ.encode(mapping.originalLine - 1\n\t - previousOriginalLine);\n\t previousOriginalLine = mapping.originalLine - 1;\n\t\n\t next += base64VLQ.encode(mapping.originalColumn\n\t - previousOriginalColumn);\n\t previousOriginalColumn = mapping.originalColumn;\n\t\n\t if (mapping.name != null) {\n\t nameIdx = this._names.indexOf(mapping.name);\n\t next += base64VLQ.encode(nameIdx - previousName);\n\t previousName = nameIdx;\n\t }\n\t }\n\t\n\t result += next;\n\t }\n\t\n\t return result;\n\t };\n\t\n\tSourceMapGenerator.prototype._generateSourcesContent =\n\t function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n\t return aSources.map(function (source) {\n\t if (!this._sourcesContents) {\n\t return null;\n\t }\n\t if (aSourceRoot != null) {\n\t source = util.relative(aSourceRoot, source);\n\t }\n\t var key = util.toSetString(source);\n\t return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n\t ? this._sourcesContents[key]\n\t : null;\n\t }, this);\n\t };\n\t\n\t/**\n\t * Externalize the source map.\n\t */\n\tSourceMapGenerator.prototype.toJSON =\n\t function SourceMapGenerator_toJSON() {\n\t var map = {\n\t version: this._version,\n\t sources: this._sources.toArray(),\n\t names: this._names.toArray(),\n\t mappings: this._serializeMappings()\n\t };\n\t if (this._file != null) {\n\t map.file = this._file;\n\t }\n\t if (this._sourceRoot != null) {\n\t map.sourceRoot = this._sourceRoot;\n\t }\n\t if (this._sourcesContents) {\n\t map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n\t }\n\t\n\t return map;\n\t };\n\t\n\t/**\n\t * Render the source map being generated to a string.\n\t */\n\tSourceMapGenerator.prototype.toString =\n\t function SourceMapGenerator_toString() {\n\t return JSON.stringify(this.toJSON());\n\t };\n\t\n\texports.SourceMapGenerator = SourceMapGenerator;\n\n\n/***/ }),\n/* 2 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t *\n\t * Based on the Base 64 VLQ implementation in Closure Compiler:\n\t * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n\t *\n\t * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n\t * Redistribution and use in source and binary forms, with or without\n\t * modification, are permitted provided that the following conditions are\n\t * met:\n\t *\n\t * * Redistributions of source code must retain the above copyright\n\t * notice, this list of conditions and the following disclaimer.\n\t * * Redistributions in binary form must reproduce the above\n\t * copyright notice, this list of conditions and the following\n\t * disclaimer in the documentation and/or other materials provided\n\t * with the distribution.\n\t * * Neither the name of Google Inc. nor the names of its\n\t * contributors may be used to endorse or promote products derived\n\t * from this software without specific prior written permission.\n\t *\n\t * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\t * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\t * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n\t * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n\t * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n\t * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n\t * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n\t * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n\t * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n\t * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\t * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\t */\n\t\n\tvar base64 = __webpack_require__(3);\n\t\n\t// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n\t// length quantities we use in the source map spec, the first bit is the sign,\n\t// the next four bits are the actual value, and the 6th bit is the\n\t// continuation bit. The continuation bit tells us whether there are more\n\t// digits in this value following this digit.\n\t//\n\t// Continuation\n\t// | Sign\n\t// | |\n\t// V V\n\t// 101011\n\t\n\tvar VLQ_BASE_SHIFT = 5;\n\t\n\t// binary: 100000\n\tvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\t\n\t// binary: 011111\n\tvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\t\n\t// binary: 100000\n\tvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\t\n\t/**\n\t * Converts from a two-complement value to a value where the sign bit is\n\t * placed in the least significant bit. For example, as decimals:\n\t * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n\t * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n\t */\n\tfunction toVLQSigned(aValue) {\n\t return aValue < 0\n\t ? ((-aValue) << 1) + 1\n\t : (aValue << 1) + 0;\n\t}\n\t\n\t/**\n\t * Converts to a two-complement value from a value where the sign bit is\n\t * placed in the least significant bit. For example, as decimals:\n\t * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n\t * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n\t */\n\tfunction fromVLQSigned(aValue) {\n\t var isNegative = (aValue & 1) === 1;\n\t var shifted = aValue >> 1;\n\t return isNegative\n\t ? -shifted\n\t : shifted;\n\t}\n\t\n\t/**\n\t * Returns the base 64 VLQ encoded value.\n\t */\n\texports.encode = function base64VLQ_encode(aValue) {\n\t var encoded = \"\";\n\t var digit;\n\t\n\t var vlq = toVLQSigned(aValue);\n\t\n\t do {\n\t digit = vlq & VLQ_BASE_MASK;\n\t vlq >>>= VLQ_BASE_SHIFT;\n\t if (vlq > 0) {\n\t // There are still more digits in this value, so we must make sure the\n\t // continuation bit is marked.\n\t digit |= VLQ_CONTINUATION_BIT;\n\t }\n\t encoded += base64.encode(digit);\n\t } while (vlq > 0);\n\t\n\t return encoded;\n\t};\n\t\n\t/**\n\t * Decodes the next base 64 VLQ value from the given string and returns the\n\t * value and the rest of the string via the out parameter.\n\t */\n\texports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n\t var strLen = aStr.length;\n\t var result = 0;\n\t var shift = 0;\n\t var continuation, digit;\n\t\n\t do {\n\t if (aIndex >= strLen) {\n\t throw new Error(\"Expected more digits in base 64 VLQ value.\");\n\t }\n\t\n\t digit = base64.decode(aStr.charCodeAt(aIndex++));\n\t if (digit === -1) {\n\t throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n\t }\n\t\n\t continuation = !!(digit & VLQ_CONTINUATION_BIT);\n\t digit &= VLQ_BASE_MASK;\n\t result = result + (digit << shift);\n\t shift += VLQ_BASE_SHIFT;\n\t } while (continuation);\n\t\n\t aOutParam.value = fromVLQSigned(result);\n\t aOutParam.rest = aIndex;\n\t};\n\n\n/***/ }),\n/* 3 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\t\n\t/**\n\t * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n\t */\n\texports.encode = function (number) {\n\t if (0 <= number && number < intToCharMap.length) {\n\t return intToCharMap[number];\n\t }\n\t throw new TypeError(\"Must be between 0 and 63: \" + number);\n\t};\n\t\n\t/**\n\t * Decode a single base 64 character code digit to an integer. Returns -1 on\n\t * failure.\n\t */\n\texports.decode = function (charCode) {\n\t var bigA = 65; // 'A'\n\t var bigZ = 90; // 'Z'\n\t\n\t var littleA = 97; // 'a'\n\t var littleZ = 122; // 'z'\n\t\n\t var zero = 48; // '0'\n\t var nine = 57; // '9'\n\t\n\t var plus = 43; // '+'\n\t var slash = 47; // '/'\n\t\n\t var littleOffset = 26;\n\t var numberOffset = 52;\n\t\n\t // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n\t if (bigA <= charCode && charCode <= bigZ) {\n\t return (charCode - bigA);\n\t }\n\t\n\t // 26 - 51: abcdefghijklmnopqrstuvwxyz\n\t if (littleA <= charCode && charCode <= littleZ) {\n\t return (charCode - littleA + littleOffset);\n\t }\n\t\n\t // 52 - 61: 0123456789\n\t if (zero <= charCode && charCode <= nine) {\n\t return (charCode - zero + numberOffset);\n\t }\n\t\n\t // 62: +\n\t if (charCode == plus) {\n\t return 62;\n\t }\n\t\n\t // 63: /\n\t if (charCode == slash) {\n\t return 63;\n\t }\n\t\n\t // Invalid base64 digit.\n\t return -1;\n\t};\n\n\n/***/ }),\n/* 4 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\t/**\n\t * This is a helper function for getting values from parameter/options\n\t * objects.\n\t *\n\t * @param args The object we are extracting values from\n\t * @param name The name of the property we are getting.\n\t * @param defaultValue An optional value to return if the property is missing\n\t * from the object. If this is not specified and the property is missing, an\n\t * error will be thrown.\n\t */\n\tfunction getArg(aArgs, aName, aDefaultValue) {\n\t if (aName in aArgs) {\n\t return aArgs[aName];\n\t } else if (arguments.length === 3) {\n\t return aDefaultValue;\n\t } else {\n\t throw new Error('\"' + aName + '\" is a required argument.');\n\t }\n\t}\n\texports.getArg = getArg;\n\t\n\tvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.-]*)(?::(\\d+))?(.*)$/;\n\tvar dataUrlRegexp = /^data:.+\\,.+$/;\n\t\n\tfunction urlParse(aUrl) {\n\t var match = aUrl.match(urlRegexp);\n\t if (!match) {\n\t return null;\n\t }\n\t return {\n\t scheme: match[1],\n\t auth: match[2],\n\t host: match[3],\n\t port: match[4],\n\t path: match[5]\n\t };\n\t}\n\texports.urlParse = urlParse;\n\t\n\tfunction urlGenerate(aParsedUrl) {\n\t var url = '';\n\t if (aParsedUrl.scheme) {\n\t url += aParsedUrl.scheme + ':';\n\t }\n\t url += '//';\n\t if (aParsedUrl.auth) {\n\t url += aParsedUrl.auth + '@';\n\t }\n\t if (aParsedUrl.host) {\n\t url += aParsedUrl.host;\n\t }\n\t if (aParsedUrl.port) {\n\t url += \":\" + aParsedUrl.port\n\t }\n\t if (aParsedUrl.path) {\n\t url += aParsedUrl.path;\n\t }\n\t return url;\n\t}\n\texports.urlGenerate = urlGenerate;\n\t\n\t/**\n\t * Normalizes a path, or the path portion of a URL:\n\t *\n\t * - Replaces consecutive slashes with one slash.\n\t * - Removes unnecessary '.' parts.\n\t * - Removes unnecessary '/..' parts.\n\t *\n\t * Based on code in the Node.js 'path' core module.\n\t *\n\t * @param aPath The path or url to normalize.\n\t */\n\tfunction normalize(aPath) {\n\t var path = aPath;\n\t var url = urlParse(aPath);\n\t if (url) {\n\t if (!url.path) {\n\t return aPath;\n\t }\n\t path = url.path;\n\t }\n\t var isAbsolute = exports.isAbsolute(path);\n\t\n\t var parts = path.split(/\\/+/);\n\t for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n\t part = parts[i];\n\t if (part === '.') {\n\t parts.splice(i, 1);\n\t } else if (part === '..') {\n\t up++;\n\t } else if (up > 0) {\n\t if (part === '') {\n\t // The first part is blank if the path is absolute. Trying to go\n\t // above the root is a no-op. Therefore we can remove all '..' parts\n\t // directly after the root.\n\t parts.splice(i + 1, up);\n\t up = 0;\n\t } else {\n\t parts.splice(i, 2);\n\t up--;\n\t }\n\t }\n\t }\n\t path = parts.join('/');\n\t\n\t if (path === '') {\n\t path = isAbsolute ? '/' : '.';\n\t }\n\t\n\t if (url) {\n\t url.path = path;\n\t return urlGenerate(url);\n\t }\n\t return path;\n\t}\n\texports.normalize = normalize;\n\t\n\t/**\n\t * Joins two paths/URLs.\n\t *\n\t * @param aRoot The root path or URL.\n\t * @param aPath The path or URL to be joined with the root.\n\t *\n\t * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n\t * scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n\t * first.\n\t * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n\t * is updated with the result and aRoot is returned. Otherwise the result\n\t * is returned.\n\t * - If aPath is absolute, the result is aPath.\n\t * - Otherwise the two paths are joined with a slash.\n\t * - Joining for example 'http://' and 'www.example.com' is also supported.\n\t */\n\tfunction join(aRoot, aPath) {\n\t if (aRoot === \"\") {\n\t aRoot = \".\";\n\t }\n\t if (aPath === \"\") {\n\t aPath = \".\";\n\t }\n\t var aPathUrl = urlParse(aPath);\n\t var aRootUrl = urlParse(aRoot);\n\t if (aRootUrl) {\n\t aRoot = aRootUrl.path || '/';\n\t }\n\t\n\t // `join(foo, '//www.example.org')`\n\t if (aPathUrl && !aPathUrl.scheme) {\n\t if (aRootUrl) {\n\t aPathUrl.scheme = aRootUrl.scheme;\n\t }\n\t return urlGenerate(aPathUrl);\n\t }\n\t\n\t if (aPathUrl || aPath.match(dataUrlRegexp)) {\n\t return aPath;\n\t }\n\t\n\t // `join('http://', 'www.example.com')`\n\t if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n\t aRootUrl.host = aPath;\n\t return urlGenerate(aRootUrl);\n\t }\n\t\n\t var joined = aPath.charAt(0) === '/'\n\t ? aPath\n\t : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\t\n\t if (aRootUrl) {\n\t aRootUrl.path = joined;\n\t return urlGenerate(aRootUrl);\n\t }\n\t return joined;\n\t}\n\texports.join = join;\n\t\n\texports.isAbsolute = function (aPath) {\n\t return aPath.charAt(0) === '/' || urlRegexp.test(aPath);\n\t};\n\t\n\t/**\n\t * Make a path relative to a URL or another path.\n\t *\n\t * @param aRoot The root path or URL.\n\t * @param aPath The path or URL to be made relative to aRoot.\n\t */\n\tfunction relative(aRoot, aPath) {\n\t if (aRoot === \"\") {\n\t aRoot = \".\";\n\t }\n\t\n\t aRoot = aRoot.replace(/\\/$/, '');\n\t\n\t // It is possible for the path to be above the root. In this case, simply\n\t // checking whether the root is a prefix of the path won't work. Instead, we\n\t // need to remove components from the root one by one, until either we find\n\t // a prefix that fits, or we run out of components to remove.\n\t var level = 0;\n\t while (aPath.indexOf(aRoot + '/') !== 0) {\n\t var index = aRoot.lastIndexOf(\"/\");\n\t if (index < 0) {\n\t return aPath;\n\t }\n\t\n\t // If the only part of the root that is left is the scheme (i.e. http://,\n\t // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n\t // have exhausted all components, so the path is not relative to the root.\n\t aRoot = aRoot.slice(0, index);\n\t if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n\t return aPath;\n\t }\n\t\n\t ++level;\n\t }\n\t\n\t // Make sure we add a \"../\" for each component we removed from the root.\n\t return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n\t}\n\texports.relative = relative;\n\t\n\tvar supportsNullProto = (function () {\n\t var obj = Object.create(null);\n\t return !('__proto__' in obj);\n\t}());\n\t\n\tfunction identity (s) {\n\t return s;\n\t}\n\t\n\t/**\n\t * Because behavior goes wacky when you set `__proto__` on objects, we\n\t * have to prefix all the strings in our set with an arbitrary character.\n\t *\n\t * See https://github.com/mozilla/source-map/pull/31 and\n\t * https://github.com/mozilla/source-map/issues/30\n\t *\n\t * @param String aStr\n\t */\n\tfunction toSetString(aStr) {\n\t if (isProtoString(aStr)) {\n\t return '$' + aStr;\n\t }\n\t\n\t return aStr;\n\t}\n\texports.toSetString = supportsNullProto ? identity : toSetString;\n\t\n\tfunction fromSetString(aStr) {\n\t if (isProtoString(aStr)) {\n\t return aStr.slice(1);\n\t }\n\t\n\t return aStr;\n\t}\n\texports.fromSetString = supportsNullProto ? identity : fromSetString;\n\t\n\tfunction isProtoString(s) {\n\t if (!s) {\n\t return false;\n\t }\n\t\n\t var length = s.length;\n\t\n\t if (length < 9 /* \"__proto__\".length */) {\n\t return false;\n\t }\n\t\n\t if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 2) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n\t s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n\t s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n\t s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n\t s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n\t s.charCodeAt(length - 8) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 9) !== 95 /* '_' */) {\n\t return false;\n\t }\n\t\n\t for (var i = length - 10; i >= 0; i--) {\n\t if (s.charCodeAt(i) !== 36 /* '$' */) {\n\t return false;\n\t }\n\t }\n\t\n\t return true;\n\t}\n\t\n\t/**\n\t * Comparator between two mappings where the original positions are compared.\n\t *\n\t * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n\t * mappings with the same original source/line/column, but different generated\n\t * line and column the same. Useful when searching for a mapping with a\n\t * stubbed out mapping.\n\t */\n\tfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n\t var cmp = strcmp(mappingA.source, mappingB.source);\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0 || onlyCompareOriginal) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return strcmp(mappingA.name, mappingB.name);\n\t}\n\texports.compareByOriginalPositions = compareByOriginalPositions;\n\t\n\t/**\n\t * Comparator between two mappings with deflated source and name indices where\n\t * the generated positions are compared.\n\t *\n\t * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n\t * mappings with the same generated line and column, but different\n\t * source/name/original line and column the same. Useful when searching for a\n\t * mapping with a stubbed out mapping.\n\t */\n\tfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n\t var cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0 || onlyCompareGenerated) {\n\t return cmp;\n\t }\n\t\n\t cmp = strcmp(mappingA.source, mappingB.source);\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return strcmp(mappingA.name, mappingB.name);\n\t}\n\texports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\t\n\tfunction strcmp(aStr1, aStr2) {\n\t if (aStr1 === aStr2) {\n\t return 0;\n\t }\n\t\n\t if (aStr1 === null) {\n\t return 1; // aStr2 !== null\n\t }\n\t\n\t if (aStr2 === null) {\n\t return -1; // aStr1 !== null\n\t }\n\t\n\t if (aStr1 > aStr2) {\n\t return 1;\n\t }\n\t\n\t return -1;\n\t}\n\t\n\t/**\n\t * Comparator between two mappings with inflated source and name strings where\n\t * the generated positions are compared.\n\t */\n\tfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n\t var cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = strcmp(mappingA.source, mappingB.source);\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return strcmp(mappingA.name, mappingB.name);\n\t}\n\texports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\t\n\t/**\n\t * Strip any JSON XSSI avoidance prefix from the string (as documented\n\t * in the source maps specification), and then parse the string as\n\t * JSON.\n\t */\n\tfunction parseSourceMapInput(str) {\n\t return JSON.parse(str.replace(/^\\)]}'[^\\n]*\\n/, ''));\n\t}\n\texports.parseSourceMapInput = parseSourceMapInput;\n\t\n\t/**\n\t * Compute the URL of a source given the the source root, the source's\n\t * URL, and the source map's URL.\n\t */\n\tfunction computeSourceURL(sourceRoot, sourceURL, sourceMapURL) {\n\t sourceURL = sourceURL || '';\n\t\n\t if (sourceRoot) {\n\t // This follows what Chrome does.\n\t if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') {\n\t sourceRoot += '/';\n\t }\n\t // The spec says:\n\t // Line 4: An optional source root, useful for relocating source\n\t // files on a server or removing repeated values in the\n\t // “sources” entry. This value is prepended to the individual\n\t // entries in the “source” field.\n\t sourceURL = sourceRoot + sourceURL;\n\t }\n\t\n\t // Historically, SourceMapConsumer did not take the sourceMapURL as\n\t // a parameter. This mode is still somewhat supported, which is why\n\t // this code block is conditional. However, it's preferable to pass\n\t // the source map URL to SourceMapConsumer, so that this function\n\t // can implement the source URL resolution algorithm as outlined in\n\t // the spec. This block is basically the equivalent of:\n\t // new URL(sourceURL, sourceMapURL).toString()\n\t // ... except it avoids using URL, which wasn't available in the\n\t // older releases of node still supported by this library.\n\t //\n\t // The spec says:\n\t // If the sources are not absolute URLs after prepending of the\n\t // “sourceRoot”, the sources are resolved relative to the\n\t // SourceMap (like resolving script src in a html document).\n\t if (sourceMapURL) {\n\t var parsed = urlParse(sourceMapURL);\n\t if (!parsed) {\n\t throw new Error(\"sourceMapURL could not be parsed\");\n\t }\n\t if (parsed.path) {\n\t // Strip the last path component, but keep the \"/\".\n\t var index = parsed.path.lastIndexOf('/');\n\t if (index >= 0) {\n\t parsed.path = parsed.path.substring(0, index + 1);\n\t }\n\t }\n\t sourceURL = join(urlGenerate(parsed), sourceURL);\n\t }\n\t\n\t return normalize(sourceURL);\n\t}\n\texports.computeSourceURL = computeSourceURL;\n\n\n/***/ }),\n/* 5 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\tvar has = Object.prototype.hasOwnProperty;\n\tvar hasNativeMap = typeof Map !== \"undefined\";\n\t\n\t/**\n\t * A data structure which is a combination of an array and a set. Adding a new\n\t * member is O(1), testing for membership is O(1), and finding the index of an\n\t * element is O(1). Removing elements from the set is not supported. Only\n\t * strings are supported for membership.\n\t */\n\tfunction ArraySet() {\n\t this._array = [];\n\t this._set = hasNativeMap ? new Map() : Object.create(null);\n\t}\n\t\n\t/**\n\t * Static method for creating ArraySet instances from an existing array.\n\t */\n\tArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n\t var set = new ArraySet();\n\t for (var i = 0, len = aArray.length; i < len; i++) {\n\t set.add(aArray[i], aAllowDuplicates);\n\t }\n\t return set;\n\t};\n\t\n\t/**\n\t * Return how many unique items are in this ArraySet. If duplicates have been\n\t * added, than those do not count towards the size.\n\t *\n\t * @returns Number\n\t */\n\tArraySet.prototype.size = function ArraySet_size() {\n\t return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;\n\t};\n\t\n\t/**\n\t * Add the given string to this set.\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n\t var sStr = hasNativeMap ? aStr : util.toSetString(aStr);\n\t var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);\n\t var idx = this._array.length;\n\t if (!isDuplicate || aAllowDuplicates) {\n\t this._array.push(aStr);\n\t }\n\t if (!isDuplicate) {\n\t if (hasNativeMap) {\n\t this._set.set(aStr, idx);\n\t } else {\n\t this._set[sStr] = idx;\n\t }\n\t }\n\t};\n\t\n\t/**\n\t * Is the given string a member of this set?\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.has = function ArraySet_has(aStr) {\n\t if (hasNativeMap) {\n\t return this._set.has(aStr);\n\t } else {\n\t var sStr = util.toSetString(aStr);\n\t return has.call(this._set, sStr);\n\t }\n\t};\n\t\n\t/**\n\t * What is the index of the given string in the array?\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n\t if (hasNativeMap) {\n\t var idx = this._set.get(aStr);\n\t if (idx >= 0) {\n\t return idx;\n\t }\n\t } else {\n\t var sStr = util.toSetString(aStr);\n\t if (has.call(this._set, sStr)) {\n\t return this._set[sStr];\n\t }\n\t }\n\t\n\t throw new Error('\"' + aStr + '\" is not in the set.');\n\t};\n\t\n\t/**\n\t * What is the element at the given index?\n\t *\n\t * @param Number aIdx\n\t */\n\tArraySet.prototype.at = function ArraySet_at(aIdx) {\n\t if (aIdx >= 0 && aIdx < this._array.length) {\n\t return this._array[aIdx];\n\t }\n\t throw new Error('No element indexed by ' + aIdx);\n\t};\n\t\n\t/**\n\t * Returns the array representation of this set (which has the proper indices\n\t * indicated by indexOf). Note that this is a copy of the internal array used\n\t * for storing the members so that no one can mess with internal state.\n\t */\n\tArraySet.prototype.toArray = function ArraySet_toArray() {\n\t return this._array.slice();\n\t};\n\t\n\texports.ArraySet = ArraySet;\n\n\n/***/ }),\n/* 6 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2014 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\t\n\t/**\n\t * Determine whether mappingB is after mappingA with respect to generated\n\t * position.\n\t */\n\tfunction generatedPositionAfter(mappingA, mappingB) {\n\t // Optimized for most common case\n\t var lineA = mappingA.generatedLine;\n\t var lineB = mappingB.generatedLine;\n\t var columnA = mappingA.generatedColumn;\n\t var columnB = mappingB.generatedColumn;\n\t return lineB > lineA || lineB == lineA && columnB >= columnA ||\n\t util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n\t}\n\t\n\t/**\n\t * A data structure to provide a sorted view of accumulated mappings in a\n\t * performance conscious manner. It trades a neglibable overhead in general\n\t * case for a large speedup in case of mappings being added in order.\n\t */\n\tfunction MappingList() {\n\t this._array = [];\n\t this._sorted = true;\n\t // Serves as infimum\n\t this._last = {generatedLine: -1, generatedColumn: 0};\n\t}\n\t\n\t/**\n\t * Iterate through internal items. This method takes the same arguments that\n\t * `Array.prototype.forEach` takes.\n\t *\n\t * NOTE: The order of the mappings is NOT guaranteed.\n\t */\n\tMappingList.prototype.unsortedForEach =\n\t function MappingList_forEach(aCallback, aThisArg) {\n\t this._array.forEach(aCallback, aThisArg);\n\t };\n\t\n\t/**\n\t * Add the given source mapping.\n\t *\n\t * @param Object aMapping\n\t */\n\tMappingList.prototype.add = function MappingList_add(aMapping) {\n\t if (generatedPositionAfter(this._last, aMapping)) {\n\t this._last = aMapping;\n\t this._array.push(aMapping);\n\t } else {\n\t this._sorted = false;\n\t this._array.push(aMapping);\n\t }\n\t};\n\t\n\t/**\n\t * Returns the flat, sorted array of mappings. The mappings are sorted by\n\t * generated position.\n\t *\n\t * WARNING: This method returns internal data without copying, for\n\t * performance. The return value must NOT be mutated, and should be treated as\n\t * an immutable borrow. If you want to take ownership, you must make your own\n\t * copy.\n\t */\n\tMappingList.prototype.toArray = function MappingList_toArray() {\n\t if (!this._sorted) {\n\t this._array.sort(util.compareByGeneratedPositionsInflated);\n\t this._sorted = true;\n\t }\n\t return this._array;\n\t};\n\t\n\texports.MappingList = MappingList;\n\n\n/***/ }),\n/* 7 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\tvar binarySearch = __webpack_require__(8);\n\tvar ArraySet = __webpack_require__(5).ArraySet;\n\tvar base64VLQ = __webpack_require__(2);\n\tvar quickSort = __webpack_require__(9).quickSort;\n\t\n\tfunction SourceMapConsumer(aSourceMap, aSourceMapURL) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = util.parseSourceMapInput(aSourceMap);\n\t }\n\t\n\t return sourceMap.sections != null\n\t ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL)\n\t : new BasicSourceMapConsumer(sourceMap, aSourceMapURL);\n\t}\n\t\n\tSourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) {\n\t return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL);\n\t}\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tSourceMapConsumer.prototype._version = 3;\n\t\n\t// `__generatedMappings` and `__originalMappings` are arrays that hold the\n\t// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n\t// are lazily instantiated, accessed via the `_generatedMappings` and\n\t// `_originalMappings` getters respectively, and we only parse the mappings\n\t// and create these arrays once queried for a source location. We jump through\n\t// these hoops because there can be many thousands of mappings, and parsing\n\t// them is expensive, so we only want to do it if we must.\n\t//\n\t// Each object in the arrays is of the form:\n\t//\n\t// {\n\t// generatedLine: The line number in the generated code,\n\t// generatedColumn: The column number in the generated code,\n\t// source: The path to the original source file that generated this\n\t// chunk of code,\n\t// originalLine: The line number in the original source that\n\t// corresponds to this chunk of generated code,\n\t// originalColumn: The column number in the original source that\n\t// corresponds to this chunk of generated code,\n\t// name: The name of the original symbol which generated this chunk of\n\t// code.\n\t// }\n\t//\n\t// All properties except for `generatedLine` and `generatedColumn` can be\n\t// `null`.\n\t//\n\t// `_generatedMappings` is ordered by the generated positions.\n\t//\n\t// `_originalMappings` is ordered by the original positions.\n\t\n\tSourceMapConsumer.prototype.__generatedMappings = null;\n\tObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n\t configurable: true,\n\t enumerable: true,\n\t get: function () {\n\t if (!this.__generatedMappings) {\n\t this._parseMappings(this._mappings, this.sourceRoot);\n\t }\n\t\n\t return this.__generatedMappings;\n\t }\n\t});\n\t\n\tSourceMapConsumer.prototype.__originalMappings = null;\n\tObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n\t configurable: true,\n\t enumerable: true,\n\t get: function () {\n\t if (!this.__originalMappings) {\n\t this._parseMappings(this._mappings, this.sourceRoot);\n\t }\n\t\n\t return this.__originalMappings;\n\t }\n\t});\n\t\n\tSourceMapConsumer.prototype._charIsMappingSeparator =\n\t function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n\t var c = aStr.charAt(index);\n\t return c === \";\" || c === \",\";\n\t };\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tSourceMapConsumer.prototype._parseMappings =\n\t function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t throw new Error(\"Subclasses must implement _parseMappings\");\n\t };\n\t\n\tSourceMapConsumer.GENERATED_ORDER = 1;\n\tSourceMapConsumer.ORIGINAL_ORDER = 2;\n\t\n\tSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\n\tSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\t\n\t/**\n\t * Iterate over each mapping between an original source/line/column and a\n\t * generated line/column in this source map.\n\t *\n\t * @param Function aCallback\n\t * The function that is called with each mapping.\n\t * @param Object aContext\n\t * Optional. If specified, this object will be the value of `this` every\n\t * time that `aCallback` is called.\n\t * @param aOrder\n\t * Either `SourceMapConsumer.GENERATED_ORDER` or\n\t * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n\t * iterate over the mappings sorted by the generated file's line/column\n\t * order or the original's source/line/column order, respectively. Defaults to\n\t * `SourceMapConsumer.GENERATED_ORDER`.\n\t */\n\tSourceMapConsumer.prototype.eachMapping =\n\t function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n\t var context = aContext || null;\n\t var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\t\n\t var mappings;\n\t switch (order) {\n\t case SourceMapConsumer.GENERATED_ORDER:\n\t mappings = this._generatedMappings;\n\t break;\n\t case SourceMapConsumer.ORIGINAL_ORDER:\n\t mappings = this._originalMappings;\n\t break;\n\t default:\n\t throw new Error(\"Unknown order of iteration.\");\n\t }\n\t\n\t var sourceRoot = this.sourceRoot;\n\t mappings.map(function (mapping) {\n\t var source = mapping.source === null ? null : this._sources.at(mapping.source);\n\t source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL);\n\t return {\n\t source: source,\n\t generatedLine: mapping.generatedLine,\n\t generatedColumn: mapping.generatedColumn,\n\t originalLine: mapping.originalLine,\n\t originalColumn: mapping.originalColumn,\n\t name: mapping.name === null ? null : this._names.at(mapping.name)\n\t };\n\t }, this).forEach(aCallback, context);\n\t };\n\t\n\t/**\n\t * Returns all generated line and column information for the original source,\n\t * line, and column provided. If no column is provided, returns all mappings\n\t * corresponding to a either the line we are searching for or the next\n\t * closest line that has any mappings. Otherwise, returns all mappings\n\t * corresponding to the given line and either the column we are searching for\n\t * or the next closest column that has any offsets.\n\t *\n\t * The only argument is an object with the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source. The line number is 1-based.\n\t * - column: Optional. the column number in the original source.\n\t * The column number is 0-based.\n\t *\n\t * and an array of objects is returned, each with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null. The\n\t * line number is 1-based.\n\t * - column: The column number in the generated source, or null.\n\t * The column number is 0-based.\n\t */\n\tSourceMapConsumer.prototype.allGeneratedPositionsFor =\n\t function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n\t var line = util.getArg(aArgs, 'line');\n\t\n\t // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n\t // returns the index of the closest mapping less than the needle. By\n\t // setting needle.originalColumn to 0, we thus find the last mapping for\n\t // the given line, provided such a mapping exists.\n\t var needle = {\n\t source: util.getArg(aArgs, 'source'),\n\t originalLine: line,\n\t originalColumn: util.getArg(aArgs, 'column', 0)\n\t };\n\t\n\t needle.source = this._findSourceIndex(needle.source);\n\t if (needle.source < 0) {\n\t return [];\n\t }\n\t\n\t var mappings = [];\n\t\n\t var index = this._findMapping(needle,\n\t this._originalMappings,\n\t \"originalLine\",\n\t \"originalColumn\",\n\t util.compareByOriginalPositions,\n\t binarySearch.LEAST_UPPER_BOUND);\n\t if (index >= 0) {\n\t var mapping = this._originalMappings[index];\n\t\n\t if (aArgs.column === undefined) {\n\t var originalLine = mapping.originalLine;\n\t\n\t // Iterate until either we run out of mappings, or we run into\n\t // a mapping for a different line than the one we found. Since\n\t // mappings are sorted, this is guaranteed to find all mappings for\n\t // the line we found.\n\t while (mapping && mapping.originalLine === originalLine) {\n\t mappings.push({\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t });\n\t\n\t mapping = this._originalMappings[++index];\n\t }\n\t } else {\n\t var originalColumn = mapping.originalColumn;\n\t\n\t // Iterate until either we run out of mappings, or we run into\n\t // a mapping for a different line than the one we were searching for.\n\t // Since mappings are sorted, this is guaranteed to find all mappings for\n\t // the line we are searching for.\n\t while (mapping &&\n\t mapping.originalLine === line &&\n\t mapping.originalColumn == originalColumn) {\n\t mappings.push({\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t });\n\t\n\t mapping = this._originalMappings[++index];\n\t }\n\t }\n\t }\n\t\n\t return mappings;\n\t };\n\t\n\texports.SourceMapConsumer = SourceMapConsumer;\n\t\n\t/**\n\t * A BasicSourceMapConsumer instance represents a parsed source map which we can\n\t * query for information about the original file positions by giving it a file\n\t * position in the generated source.\n\t *\n\t * The first parameter is the raw source map (either as a JSON string, or\n\t * already parsed to an object). According to the spec, source maps have the\n\t * following attributes:\n\t *\n\t * - version: Which version of the source map spec this map is following.\n\t * - sources: An array of URLs to the original source files.\n\t * - names: An array of identifiers which can be referrenced by individual mappings.\n\t * - sourceRoot: Optional. The URL root from which all sources are relative.\n\t * - sourcesContent: Optional. An array of contents of the original source files.\n\t * - mappings: A string of base64 VLQs which contain the actual mappings.\n\t * - file: Optional. The generated file this source map is associated with.\n\t *\n\t * Here is an example source map, taken from the source map spec[0]:\n\t *\n\t * {\n\t * version : 3,\n\t * file: \"out.js\",\n\t * sourceRoot : \"\",\n\t * sources: [\"foo.js\", \"bar.js\"],\n\t * names: [\"src\", \"maps\", \"are\", \"fun\"],\n\t * mappings: \"AA,AB;;ABCDE;\"\n\t * }\n\t *\n\t * The second parameter, if given, is a string whose value is the URL\n\t * at which the source map was found. This URL is used to compute the\n\t * sources array.\n\t *\n\t * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n\t */\n\tfunction BasicSourceMapConsumer(aSourceMap, aSourceMapURL) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = util.parseSourceMapInput(aSourceMap);\n\t }\n\t\n\t var version = util.getArg(sourceMap, 'version');\n\t var sources = util.getArg(sourceMap, 'sources');\n\t // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n\t // requires the array) to play nice here.\n\t var names = util.getArg(sourceMap, 'names', []);\n\t var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n\t var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n\t var mappings = util.getArg(sourceMap, 'mappings');\n\t var file = util.getArg(sourceMap, 'file', null);\n\t\n\t // Once again, Sass deviates from the spec and supplies the version as a\n\t // string rather than a number, so we use loose equality checking here.\n\t if (version != this._version) {\n\t throw new Error('Unsupported version: ' + version);\n\t }\n\t\n\t if (sourceRoot) {\n\t sourceRoot = util.normalize(sourceRoot);\n\t }\n\t\n\t sources = sources\n\t .map(String)\n\t // Some source maps produce relative source paths like \"./foo.js\" instead of\n\t // \"foo.js\". Normalize these first so that future comparisons will succeed.\n\t // See bugzil.la/1090768.\n\t .map(util.normalize)\n\t // Always ensure that absolute sources are internally stored relative to\n\t // the source root, if the source root is absolute. Not doing this would\n\t // be particularly problematic when the source root is a prefix of the\n\t // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n\t .map(function (source) {\n\t return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n\t ? util.relative(sourceRoot, source)\n\t : source;\n\t });\n\t\n\t // Pass `true` below to allow duplicate names and sources. While source maps\n\t // are intended to be compressed and deduplicated, the TypeScript compiler\n\t // sometimes generates source maps with duplicates in them. See Github issue\n\t // #72 and bugzil.la/889492.\n\t this._names = ArraySet.fromArray(names.map(String), true);\n\t this._sources = ArraySet.fromArray(sources, true);\n\t\n\t this._absoluteSources = this._sources.toArray().map(function (s) {\n\t return util.computeSourceURL(sourceRoot, s, aSourceMapURL);\n\t });\n\t\n\t this.sourceRoot = sourceRoot;\n\t this.sourcesContent = sourcesContent;\n\t this._mappings = mappings;\n\t this._sourceMapURL = aSourceMapURL;\n\t this.file = file;\n\t}\n\t\n\tBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\n\tBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\t\n\t/**\n\t * Utility function to find the index of a source. Returns -1 if not\n\t * found.\n\t */\n\tBasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) {\n\t var relativeSource = aSource;\n\t if (this.sourceRoot != null) {\n\t relativeSource = util.relative(this.sourceRoot, relativeSource);\n\t }\n\t\n\t if (this._sources.has(relativeSource)) {\n\t return this._sources.indexOf(relativeSource);\n\t }\n\t\n\t // Maybe aSource is an absolute URL as returned by |sources|. In\n\t // this case we can't simply undo the transform.\n\t var i;\n\t for (i = 0; i < this._absoluteSources.length; ++i) {\n\t if (this._absoluteSources[i] == aSource) {\n\t return i;\n\t }\n\t }\n\t\n\t return -1;\n\t};\n\t\n\t/**\n\t * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n\t *\n\t * @param SourceMapGenerator aSourceMap\n\t * The source map that will be consumed.\n\t * @param String aSourceMapURL\n\t * The URL at which the source map can be found (optional)\n\t * @returns BasicSourceMapConsumer\n\t */\n\tBasicSourceMapConsumer.fromSourceMap =\n\t function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) {\n\t var smc = Object.create(BasicSourceMapConsumer.prototype);\n\t\n\t var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n\t var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n\t smc.sourceRoot = aSourceMap._sourceRoot;\n\t smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n\t smc.sourceRoot);\n\t smc.file = aSourceMap._file;\n\t smc._sourceMapURL = aSourceMapURL;\n\t smc._absoluteSources = smc._sources.toArray().map(function (s) {\n\t return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL);\n\t });\n\t\n\t // Because we are modifying the entries (by converting string sources and\n\t // names to indices into the sources and names ArraySets), we have to make\n\t // a copy of the entry or else bad things happen. Shared mutable state\n\t // strikes again! See github issue #191.\n\t\n\t var generatedMappings = aSourceMap._mappings.toArray().slice();\n\t var destGeneratedMappings = smc.__generatedMappings = [];\n\t var destOriginalMappings = smc.__originalMappings = [];\n\t\n\t for (var i = 0, length = generatedMappings.length; i < length; i++) {\n\t var srcMapping = generatedMappings[i];\n\t var destMapping = new Mapping;\n\t destMapping.generatedLine = srcMapping.generatedLine;\n\t destMapping.generatedColumn = srcMapping.generatedColumn;\n\t\n\t if (srcMapping.source) {\n\t destMapping.source = sources.indexOf(srcMapping.source);\n\t destMapping.originalLine = srcMapping.originalLine;\n\t destMapping.originalColumn = srcMapping.originalColumn;\n\t\n\t if (srcMapping.name) {\n\t destMapping.name = names.indexOf(srcMapping.name);\n\t }\n\t\n\t destOriginalMappings.push(destMapping);\n\t }\n\t\n\t destGeneratedMappings.push(destMapping);\n\t }\n\t\n\t quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\t\n\t return smc;\n\t };\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tBasicSourceMapConsumer.prototype._version = 3;\n\t\n\t/**\n\t * The list of original sources.\n\t */\n\tObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n\t get: function () {\n\t return this._absoluteSources.slice();\n\t }\n\t});\n\t\n\t/**\n\t * Provide the JIT with a nice shape / hidden class.\n\t */\n\tfunction Mapping() {\n\t this.generatedLine = 0;\n\t this.generatedColumn = 0;\n\t this.source = null;\n\t this.originalLine = null;\n\t this.originalColumn = null;\n\t this.name = null;\n\t}\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tBasicSourceMapConsumer.prototype._parseMappings =\n\t function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t var generatedLine = 1;\n\t var previousGeneratedColumn = 0;\n\t var previousOriginalLine = 0;\n\t var previousOriginalColumn = 0;\n\t var previousSource = 0;\n\t var previousName = 0;\n\t var length = aStr.length;\n\t var index = 0;\n\t var cachedSegments = {};\n\t var temp = {};\n\t var originalMappings = [];\n\t var generatedMappings = [];\n\t var mapping, str, segment, end, value;\n\t\n\t while (index < length) {\n\t if (aStr.charAt(index) === ';') {\n\t generatedLine++;\n\t index++;\n\t previousGeneratedColumn = 0;\n\t }\n\t else if (aStr.charAt(index) === ',') {\n\t index++;\n\t }\n\t else {\n\t mapping = new Mapping();\n\t mapping.generatedLine = generatedLine;\n\t\n\t // Because each offset is encoded relative to the previous one,\n\t // many segments often have the same encoding. We can exploit this\n\t // fact by caching the parsed variable length fields of each segment,\n\t // allowing us to avoid a second parse if we encounter the same\n\t // segment again.\n\t for (end = index; end < length; end++) {\n\t if (this._charIsMappingSeparator(aStr, end)) {\n\t break;\n\t }\n\t }\n\t str = aStr.slice(index, end);\n\t\n\t segment = cachedSegments[str];\n\t if (segment) {\n\t index += str.length;\n\t } else {\n\t segment = [];\n\t while (index < end) {\n\t base64VLQ.decode(aStr, index, temp);\n\t value = temp.value;\n\t index = temp.rest;\n\t segment.push(value);\n\t }\n\t\n\t if (segment.length === 2) {\n\t throw new Error('Found a source, but no line and column');\n\t }\n\t\n\t if (segment.length === 3) {\n\t throw new Error('Found a source and line, but no column');\n\t }\n\t\n\t cachedSegments[str] = segment;\n\t }\n\t\n\t // Generated column.\n\t mapping.generatedColumn = previousGeneratedColumn + segment[0];\n\t previousGeneratedColumn = mapping.generatedColumn;\n\t\n\t if (segment.length > 1) {\n\t // Original source.\n\t mapping.source = previousSource + segment[1];\n\t previousSource += segment[1];\n\t\n\t // Original line.\n\t mapping.originalLine = previousOriginalLine + segment[2];\n\t previousOriginalLine = mapping.originalLine;\n\t // Lines are stored 0-based\n\t mapping.originalLine += 1;\n\t\n\t // Original column.\n\t mapping.originalColumn = previousOriginalColumn + segment[3];\n\t previousOriginalColumn = mapping.originalColumn;\n\t\n\t if (segment.length > 4) {\n\t // Original name.\n\t mapping.name = previousName + segment[4];\n\t previousName += segment[4];\n\t }\n\t }\n\t\n\t generatedMappings.push(mapping);\n\t if (typeof mapping.originalLine === 'number') {\n\t originalMappings.push(mapping);\n\t }\n\t }\n\t }\n\t\n\t quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n\t this.__generatedMappings = generatedMappings;\n\t\n\t quickSort(originalMappings, util.compareByOriginalPositions);\n\t this.__originalMappings = originalMappings;\n\t };\n\t\n\t/**\n\t * Find the mapping that best matches the hypothetical \"needle\" mapping that\n\t * we are searching for in the given \"haystack\" of mappings.\n\t */\n\tBasicSourceMapConsumer.prototype._findMapping =\n\t function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n\t aColumnName, aComparator, aBias) {\n\t // To return the position we are searching for, we must first find the\n\t // mapping for the given position and then return the opposite position it\n\t // points to. Because the mappings are sorted, we can use binary search to\n\t // find the best mapping.\n\t\n\t if (aNeedle[aLineName] <= 0) {\n\t throw new TypeError('Line must be greater than or equal to 1, got '\n\t + aNeedle[aLineName]);\n\t }\n\t if (aNeedle[aColumnName] < 0) {\n\t throw new TypeError('Column must be greater than or equal to 0, got '\n\t + aNeedle[aColumnName]);\n\t }\n\t\n\t return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n\t };\n\t\n\t/**\n\t * Compute the last column for each generated mapping. The last column is\n\t * inclusive.\n\t */\n\tBasicSourceMapConsumer.prototype.computeColumnSpans =\n\t function SourceMapConsumer_computeColumnSpans() {\n\t for (var index = 0; index < this._generatedMappings.length; ++index) {\n\t var mapping = this._generatedMappings[index];\n\t\n\t // Mappings do not contain a field for the last generated columnt. We\n\t // can come up with an optimistic estimate, however, by assuming that\n\t // mappings are contiguous (i.e. given two consecutive mappings, the\n\t // first mapping ends where the second one starts).\n\t if (index + 1 < this._generatedMappings.length) {\n\t var nextMapping = this._generatedMappings[index + 1];\n\t\n\t if (mapping.generatedLine === nextMapping.generatedLine) {\n\t mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n\t continue;\n\t }\n\t }\n\t\n\t // The last mapping for each line spans the entire line.\n\t mapping.lastGeneratedColumn = Infinity;\n\t }\n\t };\n\t\n\t/**\n\t * Returns the original source, line, and column information for the generated\n\t * source's line and column positions provided. The only argument is an object\n\t * with the following properties:\n\t *\n\t * - line: The line number in the generated source. The line number\n\t * is 1-based.\n\t * - column: The column number in the generated source. The column\n\t * number is 0-based.\n\t * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n\t * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - source: The original source file, or null.\n\t * - line: The line number in the original source, or null. The\n\t * line number is 1-based.\n\t * - column: The column number in the original source, or null. The\n\t * column number is 0-based.\n\t * - name: The original identifier, or null.\n\t */\n\tBasicSourceMapConsumer.prototype.originalPositionFor =\n\t function SourceMapConsumer_originalPositionFor(aArgs) {\n\t var needle = {\n\t generatedLine: util.getArg(aArgs, 'line'),\n\t generatedColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t var index = this._findMapping(\n\t needle,\n\t this._generatedMappings,\n\t \"generatedLine\",\n\t \"generatedColumn\",\n\t util.compareByGeneratedPositionsDeflated,\n\t util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n\t );\n\t\n\t if (index >= 0) {\n\t var mapping = this._generatedMappings[index];\n\t\n\t if (mapping.generatedLine === needle.generatedLine) {\n\t var source = util.getArg(mapping, 'source', null);\n\t if (source !== null) {\n\t source = this._sources.at(source);\n\t source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL);\n\t }\n\t var name = util.getArg(mapping, 'name', null);\n\t if (name !== null) {\n\t name = this._names.at(name);\n\t }\n\t return {\n\t source: source,\n\t line: util.getArg(mapping, 'originalLine', null),\n\t column: util.getArg(mapping, 'originalColumn', null),\n\t name: name\n\t };\n\t }\n\t }\n\t\n\t return {\n\t source: null,\n\t line: null,\n\t column: null,\n\t name: null\n\t };\n\t };\n\t\n\t/**\n\t * Return true if we have the source content for every source in the source\n\t * map, false otherwise.\n\t */\n\tBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n\t function BasicSourceMapConsumer_hasContentsOfAllSources() {\n\t if (!this.sourcesContent) {\n\t return false;\n\t }\n\t return this.sourcesContent.length >= this._sources.size() &&\n\t !this.sourcesContent.some(function (sc) { return sc == null; });\n\t };\n\t\n\t/**\n\t * Returns the original source content. The only argument is the url of the\n\t * original source file. Returns null if no original source content is\n\t * available.\n\t */\n\tBasicSourceMapConsumer.prototype.sourceContentFor =\n\t function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n\t if (!this.sourcesContent) {\n\t return null;\n\t }\n\t\n\t var index = this._findSourceIndex(aSource);\n\t if (index >= 0) {\n\t return this.sourcesContent[index];\n\t }\n\t\n\t var relativeSource = aSource;\n\t if (this.sourceRoot != null) {\n\t relativeSource = util.relative(this.sourceRoot, relativeSource);\n\t }\n\t\n\t var url;\n\t if (this.sourceRoot != null\n\t && (url = util.urlParse(this.sourceRoot))) {\n\t // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n\t // many users. We can help them out when they expect file:// URIs to\n\t // behave like it would if they were running a local HTTP server. See\n\t // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n\t var fileUriAbsPath = relativeSource.replace(/^file:\\/\\//, \"\");\n\t if (url.scheme == \"file\"\n\t && this._sources.has(fileUriAbsPath)) {\n\t return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n\t }\n\t\n\t if ((!url.path || url.path == \"/\")\n\t && this._sources.has(\"/\" + relativeSource)) {\n\t return this.sourcesContent[this._sources.indexOf(\"/\" + relativeSource)];\n\t }\n\t }\n\t\n\t // This function is used recursively from\n\t // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n\t // don't want to throw if we can't find the source - we just want to\n\t // return null, so we provide a flag to exit gracefully.\n\t if (nullOnMissing) {\n\t return null;\n\t }\n\t else {\n\t throw new Error('\"' + relativeSource + '\" is not in the SourceMap.');\n\t }\n\t };\n\t\n\t/**\n\t * Returns the generated line and column information for the original source,\n\t * line, and column positions provided. The only argument is an object with\n\t * the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source. The line number\n\t * is 1-based.\n\t * - column: The column number in the original source. The column\n\t * number is 0-based.\n\t * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n\t * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null. The\n\t * line number is 1-based.\n\t * - column: The column number in the generated source, or null.\n\t * The column number is 0-based.\n\t */\n\tBasicSourceMapConsumer.prototype.generatedPositionFor =\n\t function SourceMapConsumer_generatedPositionFor(aArgs) {\n\t var source = util.getArg(aArgs, 'source');\n\t source = this._findSourceIndex(source);\n\t if (source < 0) {\n\t return {\n\t line: null,\n\t column: null,\n\t lastColumn: null\n\t };\n\t }\n\t\n\t var needle = {\n\t source: source,\n\t originalLine: util.getArg(aArgs, 'line'),\n\t originalColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t var index = this._findMapping(\n\t needle,\n\t this._originalMappings,\n\t \"originalLine\",\n\t \"originalColumn\",\n\t util.compareByOriginalPositions,\n\t util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n\t );\n\t\n\t if (index >= 0) {\n\t var mapping = this._originalMappings[index];\n\t\n\t if (mapping.source === needle.source) {\n\t return {\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t };\n\t }\n\t }\n\t\n\t return {\n\t line: null,\n\t column: null,\n\t lastColumn: null\n\t };\n\t };\n\t\n\texports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\t\n\t/**\n\t * An IndexedSourceMapConsumer instance represents a parsed source map which\n\t * we can query for information. It differs from BasicSourceMapConsumer in\n\t * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n\t * input.\n\t *\n\t * The first parameter is a raw source map (either as a JSON string, or already\n\t * parsed to an object). According to the spec for indexed source maps, they\n\t * have the following attributes:\n\t *\n\t * - version: Which version of the source map spec this map is following.\n\t * - file: Optional. The generated file this source map is associated with.\n\t * - sections: A list of section definitions.\n\t *\n\t * Each value under the \"sections\" field has two fields:\n\t * - offset: The offset into the original specified at which this section\n\t * begins to apply, defined as an object with a \"line\" and \"column\"\n\t * field.\n\t * - map: A source map definition. This source map could also be indexed,\n\t * but doesn't have to be.\n\t *\n\t * Instead of the \"map\" field, it's also possible to have a \"url\" field\n\t * specifying a URL to retrieve a source map from, but that's currently\n\t * unsupported.\n\t *\n\t * Here's an example source map, taken from the source map spec[0], but\n\t * modified to omit a section which uses the \"url\" field.\n\t *\n\t * {\n\t * version : 3,\n\t * file: \"app.js\",\n\t * sections: [{\n\t * offset: {line:100, column:10},\n\t * map: {\n\t * version : 3,\n\t * file: \"section.js\",\n\t * sources: [\"foo.js\", \"bar.js\"],\n\t * names: [\"src\", \"maps\", \"are\", \"fun\"],\n\t * mappings: \"AAAA,E;;ABCDE;\"\n\t * }\n\t * }],\n\t * }\n\t *\n\t * The second parameter, if given, is a string whose value is the URL\n\t * at which the source map was found. This URL is used to compute the\n\t * sources array.\n\t *\n\t * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n\t */\n\tfunction IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = util.parseSourceMapInput(aSourceMap);\n\t }\n\t\n\t var version = util.getArg(sourceMap, 'version');\n\t var sections = util.getArg(sourceMap, 'sections');\n\t\n\t if (version != this._version) {\n\t throw new Error('Unsupported version: ' + version);\n\t }\n\t\n\t this._sources = new ArraySet();\n\t this._names = new ArraySet();\n\t\n\t var lastOffset = {\n\t line: -1,\n\t column: 0\n\t };\n\t this._sections = sections.map(function (s) {\n\t if (s.url) {\n\t // The url field will require support for asynchronicity.\n\t // See https://github.com/mozilla/source-map/issues/16\n\t throw new Error('Support for url field in sections not implemented.');\n\t }\n\t var offset = util.getArg(s, 'offset');\n\t var offsetLine = util.getArg(offset, 'line');\n\t var offsetColumn = util.getArg(offset, 'column');\n\t\n\t if (offsetLine < lastOffset.line ||\n\t (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n\t throw new Error('Section offsets must be ordered and non-overlapping.');\n\t }\n\t lastOffset = offset;\n\t\n\t return {\n\t generatedOffset: {\n\t // The offset fields are 0-based, but we use 1-based indices when\n\t // encoding/decoding from VLQ.\n\t generatedLine: offsetLine + 1,\n\t generatedColumn: offsetColumn + 1\n\t },\n\t consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL)\n\t }\n\t });\n\t}\n\t\n\tIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\n\tIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tIndexedSourceMapConsumer.prototype._version = 3;\n\t\n\t/**\n\t * The list of original sources.\n\t */\n\tObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n\t get: function () {\n\t var sources = [];\n\t for (var i = 0; i < this._sections.length; i++) {\n\t for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n\t sources.push(this._sections[i].consumer.sources[j]);\n\t }\n\t }\n\t return sources;\n\t }\n\t});\n\t\n\t/**\n\t * Returns the original source, line, and column information for the generated\n\t * source's line and column positions provided. The only argument is an object\n\t * with the following properties:\n\t *\n\t * - line: The line number in the generated source. The line number\n\t * is 1-based.\n\t * - column: The column number in the generated source. The column\n\t * number is 0-based.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - source: The original source file, or null.\n\t * - line: The line number in the original source, or null. The\n\t * line number is 1-based.\n\t * - column: The column number in the original source, or null. The\n\t * column number is 0-based.\n\t * - name: The original identifier, or null.\n\t */\n\tIndexedSourceMapConsumer.prototype.originalPositionFor =\n\t function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n\t var needle = {\n\t generatedLine: util.getArg(aArgs, 'line'),\n\t generatedColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t // Find the section containing the generated position we're trying to map\n\t // to an original position.\n\t var sectionIndex = binarySearch.search(needle, this._sections,\n\t function(needle, section) {\n\t var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n\t if (cmp) {\n\t return cmp;\n\t }\n\t\n\t return (needle.generatedColumn -\n\t section.generatedOffset.generatedColumn);\n\t });\n\t var section = this._sections[sectionIndex];\n\t\n\t if (!section) {\n\t return {\n\t source: null,\n\t line: null,\n\t column: null,\n\t name: null\n\t };\n\t }\n\t\n\t return section.consumer.originalPositionFor({\n\t line: needle.generatedLine -\n\t (section.generatedOffset.generatedLine - 1),\n\t column: needle.generatedColumn -\n\t (section.generatedOffset.generatedLine === needle.generatedLine\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0),\n\t bias: aArgs.bias\n\t });\n\t };\n\t\n\t/**\n\t * Return true if we have the source content for every source in the source\n\t * map, false otherwise.\n\t */\n\tIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n\t function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n\t return this._sections.every(function (s) {\n\t return s.consumer.hasContentsOfAllSources();\n\t });\n\t };\n\t\n\t/**\n\t * Returns the original source content. The only argument is the url of the\n\t * original source file. Returns null if no original source content is\n\t * available.\n\t */\n\tIndexedSourceMapConsumer.prototype.sourceContentFor =\n\t function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t\n\t var content = section.consumer.sourceContentFor(aSource, true);\n\t if (content) {\n\t return content;\n\t }\n\t }\n\t if (nullOnMissing) {\n\t return null;\n\t }\n\t else {\n\t throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n\t }\n\t };\n\t\n\t/**\n\t * Returns the generated line and column information for the original source,\n\t * line, and column positions provided. The only argument is an object with\n\t * the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source. The line number\n\t * is 1-based.\n\t * - column: The column number in the original source. The column\n\t * number is 0-based.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null. The\n\t * line number is 1-based. \n\t * - column: The column number in the generated source, or null.\n\t * The column number is 0-based.\n\t */\n\tIndexedSourceMapConsumer.prototype.generatedPositionFor =\n\t function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t\n\t // Only consider this section if the requested source is in the list of\n\t // sources of the consumer.\n\t if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) {\n\t continue;\n\t }\n\t var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n\t if (generatedPosition) {\n\t var ret = {\n\t line: generatedPosition.line +\n\t (section.generatedOffset.generatedLine - 1),\n\t column: generatedPosition.column +\n\t (section.generatedOffset.generatedLine === generatedPosition.line\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0)\n\t };\n\t return ret;\n\t }\n\t }\n\t\n\t return {\n\t line: null,\n\t column: null\n\t };\n\t };\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tIndexedSourceMapConsumer.prototype._parseMappings =\n\t function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t this.__generatedMappings = [];\n\t this.__originalMappings = [];\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t var sectionMappings = section.consumer._generatedMappings;\n\t for (var j = 0; j < sectionMappings.length; j++) {\n\t var mapping = sectionMappings[j];\n\t\n\t var source = section.consumer._sources.at(mapping.source);\n\t source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL);\n\t this._sources.add(source);\n\t source = this._sources.indexOf(source);\n\t\n\t var name = null;\n\t if (mapping.name) {\n\t name = section.consumer._names.at(mapping.name);\n\t this._names.add(name);\n\t name = this._names.indexOf(name);\n\t }\n\t\n\t // The mappings coming from the consumer for the section have\n\t // generated positions relative to the start of the section, so we\n\t // need to offset them to be relative to the start of the concatenated\n\t // generated file.\n\t var adjustedMapping = {\n\t source: source,\n\t generatedLine: mapping.generatedLine +\n\t (section.generatedOffset.generatedLine - 1),\n\t generatedColumn: mapping.generatedColumn +\n\t (section.generatedOffset.generatedLine === mapping.generatedLine\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0),\n\t originalLine: mapping.originalLine,\n\t originalColumn: mapping.originalColumn,\n\t name: name\n\t };\n\t\n\t this.__generatedMappings.push(adjustedMapping);\n\t if (typeof adjustedMapping.originalLine === 'number') {\n\t this.__originalMappings.push(adjustedMapping);\n\t }\n\t }\n\t }\n\t\n\t quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n\t quickSort(this.__originalMappings, util.compareByOriginalPositions);\n\t };\n\t\n\texports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n/***/ }),\n/* 8 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\texports.GREATEST_LOWER_BOUND = 1;\n\texports.LEAST_UPPER_BOUND = 2;\n\t\n\t/**\n\t * Recursive implementation of binary search.\n\t *\n\t * @param aLow Indices here and lower do not contain the needle.\n\t * @param aHigh Indices here and higher do not contain the needle.\n\t * @param aNeedle The element being searched for.\n\t * @param aHaystack The non-empty array being searched.\n\t * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n\t * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n\t * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t */\n\tfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n\t // This function terminates when one of the following is true:\n\t //\n\t // 1. We find the exact element we are looking for.\n\t //\n\t // 2. We did not find the exact element, but we can return the index of\n\t // the next-closest element.\n\t //\n\t // 3. We did not find the exact element, and there is no next-closest\n\t // element than the one we are searching for, so we return -1.\n\t var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n\t var cmp = aCompare(aNeedle, aHaystack[mid], true);\n\t if (cmp === 0) {\n\t // Found the element we are looking for.\n\t return mid;\n\t }\n\t else if (cmp > 0) {\n\t // Our needle is greater than aHaystack[mid].\n\t if (aHigh - mid > 1) {\n\t // The element is in the upper half.\n\t return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n\t }\n\t\n\t // The exact needle element was not found in this haystack. Determine if\n\t // we are in termination case (3) or (2) and return the appropriate thing.\n\t if (aBias == exports.LEAST_UPPER_BOUND) {\n\t return aHigh < aHaystack.length ? aHigh : -1;\n\t } else {\n\t return mid;\n\t }\n\t }\n\t else {\n\t // Our needle is less than aHaystack[mid].\n\t if (mid - aLow > 1) {\n\t // The element is in the lower half.\n\t return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n\t }\n\t\n\t // we are in termination case (3) or (2) and return the appropriate thing.\n\t if (aBias == exports.LEAST_UPPER_BOUND) {\n\t return mid;\n\t } else {\n\t return aLow < 0 ? -1 : aLow;\n\t }\n\t }\n\t}\n\t\n\t/**\n\t * This is an implementation of binary search which will always try and return\n\t * the index of the closest element if there is no exact hit. This is because\n\t * mappings between original and generated line/col pairs are single points,\n\t * and there is an implicit region between each of them, so a miss just means\n\t * that you aren't on the very start of a region.\n\t *\n\t * @param aNeedle The element you are looking for.\n\t * @param aHaystack The array that is being searched.\n\t * @param aCompare A function which takes the needle and an element in the\n\t * array and returns -1, 0, or 1 depending on whether the needle is less\n\t * than, equal to, or greater than the element, respectively.\n\t * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n\t * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n\t */\n\texports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n\t if (aHaystack.length === 0) {\n\t return -1;\n\t }\n\t\n\t var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n\t aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n\t if (index < 0) {\n\t return -1;\n\t }\n\t\n\t // We have found either the exact element, or the next-closest element than\n\t // the one we are searching for. However, there may be more than one such\n\t // element. Make sure we always return the smallest of these.\n\t while (index - 1 >= 0) {\n\t if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n\t break;\n\t }\n\t --index;\n\t }\n\t\n\t return index;\n\t};\n\n\n/***/ }),\n/* 9 */\n/***/ (function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\t// It turns out that some (most?) JavaScript engines don't self-host\n\t// `Array.prototype.sort`. This makes sense because C++ will likely remain\n\t// faster than JS when doing raw CPU-intensive sorting. However, when using a\n\t// custom comparator function, calling back and forth between the VM's C++ and\n\t// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n\t// worse generated code for the comparator function than would be optimal. In\n\t// fact, when sorting with a comparator, these costs outweigh the benefits of\n\t// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n\t// a ~3500ms mean speed-up in `bench/bench.html`.\n\t\n\t/**\n\t * Swap the elements indexed by `x` and `y` in the array `ary`.\n\t *\n\t * @param {Array} ary\n\t * The array.\n\t * @param {Number} x\n\t * The index of the first item.\n\t * @param {Number} y\n\t * The index of the second item.\n\t */\n\tfunction swap(ary, x, y) {\n\t var temp = ary[x];\n\t ary[x] = ary[y];\n\t ary[y] = temp;\n\t}\n\t\n\t/**\n\t * Returns a random integer within the range `low .. high` inclusive.\n\t *\n\t * @param {Number} low\n\t * The lower bound on the range.\n\t * @param {Number} high\n\t * The upper bound on the range.\n\t */\n\tfunction randomIntInRange(low, high) {\n\t return Math.round(low + (Math.random() * (high - low)));\n\t}\n\t\n\t/**\n\t * The Quick Sort algorithm.\n\t *\n\t * @param {Array} ary\n\t * An array to sort.\n\t * @param {function} comparator\n\t * Function to use to compare two items.\n\t * @param {Number} p\n\t * Start index of the array\n\t * @param {Number} r\n\t * End index of the array\n\t */\n\tfunction doQuickSort(ary, comparator, p, r) {\n\t // If our lower bound is less than our upper bound, we (1) partition the\n\t // array into two pieces and (2) recurse on each half. If it is not, this is\n\t // the empty array and our base case.\n\t\n\t if (p < r) {\n\t // (1) Partitioning.\n\t //\n\t // The partitioning chooses a pivot between `p` and `r` and moves all\n\t // elements that are less than or equal to the pivot to the before it, and\n\t // all the elements that are greater than it after it. The effect is that\n\t // once partition is done, the pivot is in the exact place it will be when\n\t // the array is put in sorted order, and it will not need to be moved\n\t // again. This runs in O(n) time.\n\t\n\t // Always choose a random pivot so that an input array which is reverse\n\t // sorted does not cause O(n^2) running time.\n\t var pivotIndex = randomIntInRange(p, r);\n\t var i = p - 1;\n\t\n\t swap(ary, pivotIndex, r);\n\t var pivot = ary[r];\n\t\n\t // Immediately after `j` is incremented in this loop, the following hold\n\t // true:\n\t //\n\t // * Every element in `ary[p .. i]` is less than or equal to the pivot.\n\t //\n\t // * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n\t for (var j = p; j < r; j++) {\n\t if (comparator(ary[j], pivot) <= 0) {\n\t i += 1;\n\t swap(ary, i, j);\n\t }\n\t }\n\t\n\t swap(ary, i + 1, j);\n\t var q = i + 1;\n\t\n\t // (2) Recurse on each half.\n\t\n\t doQuickSort(ary, comparator, p, q - 1);\n\t doQuickSort(ary, comparator, q + 1, r);\n\t }\n\t}\n\t\n\t/**\n\t * Sort the given array in-place with the given comparator function.\n\t *\n\t * @param {Array} ary\n\t * An array to sort.\n\t * @param {function} comparator\n\t * Function to use to compare two items.\n\t */\n\texports.quickSort = function (ary, comparator) {\n\t doQuickSort(ary, comparator, 0, ary.length - 1);\n\t};\n\n\n/***/ }),\n/* 10 */\n/***/ (function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar SourceMapGenerator = __webpack_require__(1).SourceMapGenerator;\n\tvar util = __webpack_require__(4);\n\t\n\t// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n\t// operating systems these days (capturing the result).\n\tvar REGEX_NEWLINE = /(\\r?\\n)/;\n\t\n\t// Newline character code for charCodeAt() comparisons\n\tvar NEWLINE_CODE = 10;\n\t\n\t// Private symbol for identifying `SourceNode`s when multiple versions of\n\t// the source-map library are loaded. This MUST NOT CHANGE across\n\t// versions!\n\tvar isSourceNode = \"$$$isSourceNode$$$\";\n\t\n\t/**\n\t * SourceNodes provide a way to abstract over interpolating/concatenating\n\t * snippets of generated JavaScript source code while maintaining the line and\n\t * column information associated with the original source code.\n\t *\n\t * @param aLine The original line number.\n\t * @param aColumn The original column number.\n\t * @param aSource The original source's filename.\n\t * @param aChunks Optional. An array of strings which are snippets of\n\t * generated JS, or other SourceNodes.\n\t * @param aName The original identifier.\n\t */\n\tfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n\t this.children = [];\n\t this.sourceContents = {};\n\t this.line = aLine == null ? null : aLine;\n\t this.column = aColumn == null ? null : aColumn;\n\t this.source = aSource == null ? null : aSource;\n\t this.name = aName == null ? null : aName;\n\t this[isSourceNode] = true;\n\t if (aChunks != null) this.add(aChunks);\n\t}\n\t\n\t/**\n\t * Creates a SourceNode from generated code and a SourceMapConsumer.\n\t *\n\t * @param aGeneratedCode The generated code\n\t * @param aSourceMapConsumer The SourceMap for the generated code\n\t * @param aRelativePath Optional. The path that relative sources in the\n\t * SourceMapConsumer should be relative to.\n\t */\n\tSourceNode.fromStringWithSourceMap =\n\t function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n\t // The SourceNode we want to fill with the generated code\n\t // and the SourceMap\n\t var node = new SourceNode();\n\t\n\t // All even indices of this array are one line of the generated code,\n\t // while all odd indices are the newlines between two adjacent lines\n\t // (since `REGEX_NEWLINE` captures its match).\n\t // Processed fragments are accessed by calling `shiftNextLine`.\n\t var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n\t var remainingLinesIndex = 0;\n\t var shiftNextLine = function() {\n\t var lineContents = getNextLine();\n\t // The last line of a file might not have a newline.\n\t var newLine = getNextLine() || \"\";\n\t return lineContents + newLine;\n\t\n\t function getNextLine() {\n\t return remainingLinesIndex < remainingLines.length ?\n\t remainingLines[remainingLinesIndex++] : undefined;\n\t }\n\t };\n\t\n\t // We need to remember the position of \"remainingLines\"\n\t var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\t\n\t // The generate SourceNodes we need a code range.\n\t // To extract it current and last mapping is used.\n\t // Here we store the last mapping.\n\t var lastMapping = null;\n\t\n\t aSourceMapConsumer.eachMapping(function (mapping) {\n\t if (lastMapping !== null) {\n\t // We add the code from \"lastMapping\" to \"mapping\":\n\t // First check if there is a new line in between.\n\t if (lastGeneratedLine < mapping.generatedLine) {\n\t // Associate first line with \"lastMapping\"\n\t addMappingWithCode(lastMapping, shiftNextLine());\n\t lastGeneratedLine++;\n\t lastGeneratedColumn = 0;\n\t // The remaining code is added without mapping\n\t } else {\n\t // There is no new line in between.\n\t // Associate the code between \"lastGeneratedColumn\" and\n\t // \"mapping.generatedColumn\" with \"lastMapping\"\n\t var nextLine = remainingLines[remainingLinesIndex] || '';\n\t var code = nextLine.substr(0, mapping.generatedColumn -\n\t lastGeneratedColumn);\n\t remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -\n\t lastGeneratedColumn);\n\t lastGeneratedColumn = mapping.generatedColumn;\n\t addMappingWithCode(lastMapping, code);\n\t // No more remaining code, continue\n\t lastMapping = mapping;\n\t return;\n\t }\n\t }\n\t // We add the generated code until the first mapping\n\t // to the SourceNode without any mapping.\n\t // Each line is added as separate string.\n\t while (lastGeneratedLine < mapping.generatedLine) {\n\t node.add(shiftNextLine());\n\t lastGeneratedLine++;\n\t }\n\t if (lastGeneratedColumn < mapping.generatedColumn) {\n\t var nextLine = remainingLines[remainingLinesIndex] || '';\n\t node.add(nextLine.substr(0, mapping.generatedColumn));\n\t remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);\n\t lastGeneratedColumn = mapping.generatedColumn;\n\t }\n\t lastMapping = mapping;\n\t }, this);\n\t // We have processed all mappings.\n\t if (remainingLinesIndex < remainingLines.length) {\n\t if (lastMapping) {\n\t // Associate the remaining code in the current line with \"lastMapping\"\n\t addMappingWithCode(lastMapping, shiftNextLine());\n\t }\n\t // and add the remaining lines without any mapping\n\t node.add(remainingLines.splice(remainingLinesIndex).join(\"\"));\n\t }\n\t\n\t // Copy sourcesContent into SourceNode\n\t aSourceMapConsumer.sources.forEach(function (sourceFile) {\n\t var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n\t if (content != null) {\n\t if (aRelativePath != null) {\n\t sourceFile = util.join(aRelativePath, sourceFile);\n\t }\n\t node.setSourceContent(sourceFile, content);\n\t }\n\t });\n\t\n\t return node;\n\t\n\t function addMappingWithCode(mapping, code) {\n\t if (mapping === null || mapping.source === undefined) {\n\t node.add(code);\n\t } else {\n\t var source = aRelativePath\n\t ? util.join(aRelativePath, mapping.source)\n\t : mapping.source;\n\t node.add(new SourceNode(mapping.originalLine,\n\t mapping.originalColumn,\n\t source,\n\t code,\n\t mapping.name));\n\t }\n\t }\n\t };\n\t\n\t/**\n\t * Add a chunk of generated JS to this source node.\n\t *\n\t * @param aChunk A string snippet of generated JS code, another instance of\n\t * SourceNode, or an array where each member is one of those things.\n\t */\n\tSourceNode.prototype.add = function SourceNode_add(aChunk) {\n\t if (Array.isArray(aChunk)) {\n\t aChunk.forEach(function (chunk) {\n\t this.add(chunk);\n\t }, this);\n\t }\n\t else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n\t if (aChunk) {\n\t this.children.push(aChunk);\n\t }\n\t }\n\t else {\n\t throw new TypeError(\n\t \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n\t );\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Add a chunk of generated JS to the beginning of this source node.\n\t *\n\t * @param aChunk A string snippet of generated JS code, another instance of\n\t * SourceNode, or an array where each member is one of those things.\n\t */\n\tSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n\t if (Array.isArray(aChunk)) {\n\t for (var i = aChunk.length-1; i >= 0; i--) {\n\t this.prepend(aChunk[i]);\n\t }\n\t }\n\t else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n\t this.children.unshift(aChunk);\n\t }\n\t else {\n\t throw new TypeError(\n\t \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n\t );\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Walk over the tree of JS snippets in this node and its children. The\n\t * walking function is called once for each snippet of JS and is passed that\n\t * snippet and the its original associated source's line/column location.\n\t *\n\t * @param aFn The traversal function.\n\t */\n\tSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n\t var chunk;\n\t for (var i = 0, len = this.children.length; i < len; i++) {\n\t chunk = this.children[i];\n\t if (chunk[isSourceNode]) {\n\t chunk.walk(aFn);\n\t }\n\t else {\n\t if (chunk !== '') {\n\t aFn(chunk, { source: this.source,\n\t line: this.line,\n\t column: this.column,\n\t name: this.name });\n\t }\n\t }\n\t }\n\t};\n\t\n\t/**\n\t * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n\t * each of `this.children`.\n\t *\n\t * @param aSep The separator.\n\t */\n\tSourceNode.prototype.join = function SourceNode_join(aSep) {\n\t var newChildren;\n\t var i;\n\t var len = this.children.length;\n\t if (len > 0) {\n\t newChildren = [];\n\t for (i = 0; i < len-1; i++) {\n\t newChildren.push(this.children[i]);\n\t newChildren.push(aSep);\n\t }\n\t newChildren.push(this.children[i]);\n\t this.children = newChildren;\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Call String.prototype.replace on the very right-most source snippet. Useful\n\t * for trimming whitespace from the end of a source node, etc.\n\t *\n\t * @param aPattern The pattern to replace.\n\t * @param aReplacement The thing to replace the pattern with.\n\t */\n\tSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n\t var lastChild = this.children[this.children.length - 1];\n\t if (lastChild[isSourceNode]) {\n\t lastChild.replaceRight(aPattern, aReplacement);\n\t }\n\t else if (typeof lastChild === 'string') {\n\t this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n\t }\n\t else {\n\t this.children.push(''.replace(aPattern, aReplacement));\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Set the source content for a source file. This will be added to the SourceMapGenerator\n\t * in the sourcesContent field.\n\t *\n\t * @param aSourceFile The filename of the source file\n\t * @param aSourceContent The content of the source file\n\t */\n\tSourceNode.prototype.setSourceContent =\n\t function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n\t this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n\t };\n\t\n\t/**\n\t * Walk over the tree of SourceNodes. The walking function is called for each\n\t * source file content and is passed the filename and source content.\n\t *\n\t * @param aFn The traversal function.\n\t */\n\tSourceNode.prototype.walkSourceContents =\n\t function SourceNode_walkSourceContents(aFn) {\n\t for (var i = 0, len = this.children.length; i < len; i++) {\n\t if (this.children[i][isSourceNode]) {\n\t this.children[i].walkSourceContents(aFn);\n\t }\n\t }\n\t\n\t var sources = Object.keys(this.sourceContents);\n\t for (var i = 0, len = sources.length; i < len; i++) {\n\t aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n\t }\n\t };\n\t\n\t/**\n\t * Return the string representation of this source node. Walks over the tree\n\t * and concatenates all the various snippets together to one string.\n\t */\n\tSourceNode.prototype.toString = function SourceNode_toString() {\n\t var str = \"\";\n\t this.walk(function (chunk) {\n\t str += chunk;\n\t });\n\t return str;\n\t};\n\t\n\t/**\n\t * Returns the string representation of this source node along with a source\n\t * map.\n\t */\n\tSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n\t var generated = {\n\t code: \"\",\n\t line: 1,\n\t column: 0\n\t };\n\t var map = new SourceMapGenerator(aArgs);\n\t var sourceMappingActive = false;\n\t var lastOriginalSource = null;\n\t var lastOriginalLine = null;\n\t var lastOriginalColumn = null;\n\t var lastOriginalName = null;\n\t this.walk(function (chunk, original) {\n\t generated.code += chunk;\n\t if (original.source !== null\n\t && original.line !== null\n\t && original.column !== null) {\n\t if(lastOriginalSource !== original.source\n\t || lastOriginalLine !== original.line\n\t || lastOriginalColumn !== original.column\n\t || lastOriginalName !== original.name) {\n\t map.addMapping({\n\t source: original.source,\n\t original: {\n\t line: original.line,\n\t column: original.column\n\t },\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t },\n\t name: original.name\n\t });\n\t }\n\t lastOriginalSource = original.source;\n\t lastOriginalLine = original.line;\n\t lastOriginalColumn = original.column;\n\t lastOriginalName = original.name;\n\t sourceMappingActive = true;\n\t } else if (sourceMappingActive) {\n\t map.addMapping({\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t }\n\t });\n\t lastOriginalSource = null;\n\t sourceMappingActive = false;\n\t }\n\t for (var idx = 0, length = chunk.length; idx < length; idx++) {\n\t if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n\t generated.line++;\n\t generated.column = 0;\n\t // Mappings end at eol\n\t if (idx + 1 === length) {\n\t lastOriginalSource = null;\n\t sourceMappingActive = false;\n\t } else if (sourceMappingActive) {\n\t map.addMapping({\n\t source: original.source,\n\t original: {\n\t line: original.line,\n\t column: original.column\n\t },\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t },\n\t name: original.name\n\t });\n\t }\n\t } else {\n\t generated.column++;\n\t }\n\t }\n\t });\n\t this.walkSourceContents(function (sourceFile, sourceContent) {\n\t map.setSourceContent(sourceFile, sourceContent);\n\t });\n\t\n\t return { code: generated.code, map: map };\n\t};\n\t\n\texports.SourceNode = SourceNode;\n\n\n/***/ })\n/******/ ])\n});\n;\n\n\n// WEBPACK FOOTER //\n// source-map.min.js"," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId])\n \t\t\treturn installedModules[moduleId].exports;\n\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\texports: {},\n \t\t\tid: moduleId,\n \t\t\tloaded: false\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.loaded = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(0);\n\n\n\n// WEBPACK FOOTER //\n// webpack/bootstrap 0fd5815da764db5fb9fe","/*\n * Copyright 2009-2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE.txt or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\nexports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator;\nexports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer;\nexports.SourceNode = require('./lib/source-node').SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./source-map.js\n// module id = 0\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar base64VLQ = require('./base64-vlq');\nvar util = require('./util');\nvar ArraySet = require('./array-set').ArraySet;\nvar MappingList = require('./mapping-list').MappingList;\n\n/**\n * An instance of the SourceMapGenerator represents a source map which is\n * being built incrementally. You may pass an object with the following\n * properties:\n *\n * - file: The filename of the generated source.\n * - sourceRoot: A root for all relative URLs in this source map.\n */\nfunction SourceMapGenerator(aArgs) {\n if (!aArgs) {\n aArgs = {};\n }\n this._file = util.getArg(aArgs, 'file', null);\n this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);\n this._skipValidation = util.getArg(aArgs, 'skipValidation', false);\n this._sources = new ArraySet();\n this._names = new ArraySet();\n this._mappings = new MappingList();\n this._sourcesContents = null;\n}\n\nSourceMapGenerator.prototype._version = 3;\n\n/**\n * Creates a new SourceMapGenerator based on a SourceMapConsumer\n *\n * @param aSourceMapConsumer The SourceMap.\n */\nSourceMapGenerator.fromSourceMap =\n function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {\n var sourceRoot = aSourceMapConsumer.sourceRoot;\n var generator = new SourceMapGenerator({\n file: aSourceMapConsumer.file,\n sourceRoot: sourceRoot\n });\n aSourceMapConsumer.eachMapping(function (mapping) {\n var newMapping = {\n generated: {\n line: mapping.generatedLine,\n column: mapping.generatedColumn\n }\n };\n\n if (mapping.source != null) {\n newMapping.source = mapping.source;\n if (sourceRoot != null) {\n newMapping.source = util.relative(sourceRoot, newMapping.source);\n }\n\n newMapping.original = {\n line: mapping.originalLine,\n column: mapping.originalColumn\n };\n\n if (mapping.name != null) {\n newMapping.name = mapping.name;\n }\n }\n\n generator.addMapping(newMapping);\n });\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var sourceRelative = sourceFile;\n if (sourceRoot !== null) {\n sourceRelative = util.relative(sourceRoot, sourceFile);\n }\n\n if (!generator._sources.has(sourceRelative)) {\n generator._sources.add(sourceRelative);\n }\n\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n generator.setSourceContent(sourceFile, content);\n }\n });\n return generator;\n };\n\n/**\n * Add a single mapping from original source line and column to the generated\n * source's line and column for this source map being created. The mapping\n * object should have the following properties:\n *\n * - generated: An object with the generated line and column positions.\n * - original: An object with the original line and column positions.\n * - source: The original source file (relative to the sourceRoot).\n * - name: An optional original token name for this mapping.\n */\nSourceMapGenerator.prototype.addMapping =\n function SourceMapGenerator_addMapping(aArgs) {\n var generated = util.getArg(aArgs, 'generated');\n var original = util.getArg(aArgs, 'original', null);\n var source = util.getArg(aArgs, 'source', null);\n var name = util.getArg(aArgs, 'name', null);\n\n if (!this._skipValidation) {\n this._validateMapping(generated, original, source, name);\n }\n\n if (source != null) {\n source = String(source);\n if (!this._sources.has(source)) {\n this._sources.add(source);\n }\n }\n\n if (name != null) {\n name = String(name);\n if (!this._names.has(name)) {\n this._names.add(name);\n }\n }\n\n this._mappings.add({\n generatedLine: generated.line,\n generatedColumn: generated.column,\n originalLine: original != null && original.line,\n originalColumn: original != null && original.column,\n source: source,\n name: name\n });\n };\n\n/**\n * Set the source content for a source file.\n */\nSourceMapGenerator.prototype.setSourceContent =\n function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {\n var source = aSourceFile;\n if (this._sourceRoot != null) {\n source = util.relative(this._sourceRoot, source);\n }\n\n if (aSourceContent != null) {\n // Add the source content to the _sourcesContents map.\n // Create a new _sourcesContents map if the property is null.\n if (!this._sourcesContents) {\n this._sourcesContents = Object.create(null);\n }\n this._sourcesContents[util.toSetString(source)] = aSourceContent;\n } else if (this._sourcesContents) {\n // Remove the source file from the _sourcesContents map.\n // If the _sourcesContents map is empty, set the property to null.\n delete this._sourcesContents[util.toSetString(source)];\n if (Object.keys(this._sourcesContents).length === 0) {\n this._sourcesContents = null;\n }\n }\n };\n\n/**\n * Applies the mappings of a sub-source-map for a specific source file to the\n * source map being generated. Each mapping to the supplied source file is\n * rewritten using the supplied source map. Note: The resolution for the\n * resulting mappings is the minimium of this map and the supplied map.\n *\n * @param aSourceMapConsumer The source map to be applied.\n * @param aSourceFile Optional. The filename of the source file.\n * If omitted, SourceMapConsumer's file property will be used.\n * @param aSourceMapPath Optional. The dirname of the path to the source map\n * to be applied. If relative, it is relative to the SourceMapConsumer.\n * This parameter is needed when the two source maps aren't in the same\n * directory, and the source map to be applied contains relative source\n * paths. If so, those relative source paths need to be rewritten\n * relative to the SourceMapGenerator.\n */\nSourceMapGenerator.prototype.applySourceMap =\n function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {\n var sourceFile = aSourceFile;\n // If aSourceFile is omitted, we will use the file property of the SourceMap\n if (aSourceFile == null) {\n if (aSourceMapConsumer.file == null) {\n throw new Error(\n 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +\n 'or the source map\\'s \"file\" property. Both were omitted.'\n );\n }\n sourceFile = aSourceMapConsumer.file;\n }\n var sourceRoot = this._sourceRoot;\n // Make \"sourceFile\" relative if an absolute Url is passed.\n if (sourceRoot != null) {\n sourceFile = util.relative(sourceRoot, sourceFile);\n }\n // Applying the SourceMap can add and remove items from the sources and\n // the names array.\n var newSources = new ArraySet();\n var newNames = new ArraySet();\n\n // Find mappings for the \"sourceFile\"\n this._mappings.unsortedForEach(function (mapping) {\n if (mapping.source === sourceFile && mapping.originalLine != null) {\n // Check if it can be mapped by the source map, then update the mapping.\n var original = aSourceMapConsumer.originalPositionFor({\n line: mapping.originalLine,\n column: mapping.originalColumn\n });\n if (original.source != null) {\n // Copy mapping\n mapping.source = original.source;\n if (aSourceMapPath != null) {\n mapping.source = util.join(aSourceMapPath, mapping.source)\n }\n if (sourceRoot != null) {\n mapping.source = util.relative(sourceRoot, mapping.source);\n }\n mapping.originalLine = original.line;\n mapping.originalColumn = original.column;\n if (original.name != null) {\n mapping.name = original.name;\n }\n }\n }\n\n var source = mapping.source;\n if (source != null && !newSources.has(source)) {\n newSources.add(source);\n }\n\n var name = mapping.name;\n if (name != null && !newNames.has(name)) {\n newNames.add(name);\n }\n\n }, this);\n this._sources = newSources;\n this._names = newNames;\n\n // Copy sourcesContents of applied map.\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n if (aSourceMapPath != null) {\n sourceFile = util.join(aSourceMapPath, sourceFile);\n }\n if (sourceRoot != null) {\n sourceFile = util.relative(sourceRoot, sourceFile);\n }\n this.setSourceContent(sourceFile, content);\n }\n }, this);\n };\n\n/**\n * A mapping can have one of the three levels of data:\n *\n * 1. Just the generated position.\n * 2. The Generated position, original position, and original source.\n * 3. Generated and original position, original source, as well as a name\n * token.\n *\n * To maintain consistency, we validate that any new mapping being added falls\n * in to one of these categories.\n */\nSourceMapGenerator.prototype._validateMapping =\n function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,\n aName) {\n // When aOriginal is truthy but has empty values for .line and .column,\n // it is most likely a programmer error. In this case we throw a very\n // specific error message to try to guide them the right way.\n // For example: https://github.com/Polymer/polymer-bundler/pull/519\n if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') {\n throw new Error(\n 'original.line and original.column are not numbers -- you probably meant to omit ' +\n 'the original mapping entirely and only map the generated position. If so, pass ' +\n 'null for the original mapping instead of an object with empty or null values.'\n );\n }\n\n if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n && aGenerated.line > 0 && aGenerated.column >= 0\n && !aOriginal && !aSource && !aName) {\n // Case 1.\n return;\n }\n else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n && aGenerated.line > 0 && aGenerated.column >= 0\n && aOriginal.line > 0 && aOriginal.column >= 0\n && aSource) {\n // Cases 2 and 3.\n return;\n }\n else {\n throw new Error('Invalid mapping: ' + JSON.stringify({\n generated: aGenerated,\n source: aSource,\n original: aOriginal,\n name: aName\n }));\n }\n };\n\n/**\n * Serialize the accumulated mappings in to the stream of base 64 VLQs\n * specified by the source map format.\n */\nSourceMapGenerator.prototype._serializeMappings =\n function SourceMapGenerator_serializeMappings() {\n var previousGeneratedColumn = 0;\n var previousGeneratedLine = 1;\n var previousOriginalColumn = 0;\n var previousOriginalLine = 0;\n var previousName = 0;\n var previousSource = 0;\n var result = '';\n var next;\n var mapping;\n var nameIdx;\n var sourceIdx;\n\n var mappings = this._mappings.toArray();\n for (var i = 0, len = mappings.length; i < len; i++) {\n mapping = mappings[i];\n next = ''\n\n if (mapping.generatedLine !== previousGeneratedLine) {\n previousGeneratedColumn = 0;\n while (mapping.generatedLine !== previousGeneratedLine) {\n next += ';';\n previousGeneratedLine++;\n }\n }\n else {\n if (i > 0) {\n if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n continue;\n }\n next += ',';\n }\n }\n\n next += base64VLQ.encode(mapping.generatedColumn\n - previousGeneratedColumn);\n previousGeneratedColumn = mapping.generatedColumn;\n\n if (mapping.source != null) {\n sourceIdx = this._sources.indexOf(mapping.source);\n next += base64VLQ.encode(sourceIdx - previousSource);\n previousSource = sourceIdx;\n\n // lines are stored 0-based in SourceMap spec version 3\n next += base64VLQ.encode(mapping.originalLine - 1\n - previousOriginalLine);\n previousOriginalLine = mapping.originalLine - 1;\n\n next += base64VLQ.encode(mapping.originalColumn\n - previousOriginalColumn);\n previousOriginalColumn = mapping.originalColumn;\n\n if (mapping.name != null) {\n nameIdx = this._names.indexOf(mapping.name);\n next += base64VLQ.encode(nameIdx - previousName);\n previousName = nameIdx;\n }\n }\n\n result += next;\n }\n\n return result;\n };\n\nSourceMapGenerator.prototype._generateSourcesContent =\n function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n return aSources.map(function (source) {\n if (!this._sourcesContents) {\n return null;\n }\n if (aSourceRoot != null) {\n source = util.relative(aSourceRoot, source);\n }\n var key = util.toSetString(source);\n return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n ? this._sourcesContents[key]\n : null;\n }, this);\n };\n\n/**\n * Externalize the source map.\n */\nSourceMapGenerator.prototype.toJSON =\n function SourceMapGenerator_toJSON() {\n var map = {\n version: this._version,\n sources: this._sources.toArray(),\n names: this._names.toArray(),\n mappings: this._serializeMappings()\n };\n if (this._file != null) {\n map.file = this._file;\n }\n if (this._sourceRoot != null) {\n map.sourceRoot = this._sourceRoot;\n }\n if (this._sourcesContents) {\n map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n }\n\n return map;\n };\n\n/**\n * Render the source map being generated to a string.\n */\nSourceMapGenerator.prototype.toString =\n function SourceMapGenerator_toString() {\n return JSON.stringify(this.toJSON());\n };\n\nexports.SourceMapGenerator = SourceMapGenerator;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-generator.js\n// module id = 1\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n *\n * Based on the Base 64 VLQ implementation in Closure Compiler:\n * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n *\n * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n * * Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * * Redistributions in binary form must reproduce the above\n * copyright notice, this list of conditions and the following\n * disclaimer in the documentation and/or other materials provided\n * with the distribution.\n * * Neither the name of Google Inc. nor the names of its\n * contributors may be used to endorse or promote products derived\n * from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\nvar base64 = require('./base64');\n\n// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n// length quantities we use in the source map spec, the first bit is the sign,\n// the next four bits are the actual value, and the 6th bit is the\n// continuation bit. The continuation bit tells us whether there are more\n// digits in this value following this digit.\n//\n// Continuation\n// | Sign\n// | |\n// V V\n// 101011\n\nvar VLQ_BASE_SHIFT = 5;\n\n// binary: 100000\nvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\n// binary: 011111\nvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\n// binary: 100000\nvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\n/**\n * Converts from a two-complement value to a value where the sign bit is\n * placed in the least significant bit. For example, as decimals:\n * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n */\nfunction toVLQSigned(aValue) {\n return aValue < 0\n ? ((-aValue) << 1) + 1\n : (aValue << 1) + 0;\n}\n\n/**\n * Converts to a two-complement value from a value where the sign bit is\n * placed in the least significant bit. For example, as decimals:\n * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n */\nfunction fromVLQSigned(aValue) {\n var isNegative = (aValue & 1) === 1;\n var shifted = aValue >> 1;\n return isNegative\n ? -shifted\n : shifted;\n}\n\n/**\n * Returns the base 64 VLQ encoded value.\n */\nexports.encode = function base64VLQ_encode(aValue) {\n var encoded = \"\";\n var digit;\n\n var vlq = toVLQSigned(aValue);\n\n do {\n digit = vlq & VLQ_BASE_MASK;\n vlq >>>= VLQ_BASE_SHIFT;\n if (vlq > 0) {\n // There are still more digits in this value, so we must make sure the\n // continuation bit is marked.\n digit |= VLQ_CONTINUATION_BIT;\n }\n encoded += base64.encode(digit);\n } while (vlq > 0);\n\n return encoded;\n};\n\n/**\n * Decodes the next base 64 VLQ value from the given string and returns the\n * value and the rest of the string via the out parameter.\n */\nexports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n var strLen = aStr.length;\n var result = 0;\n var shift = 0;\n var continuation, digit;\n\n do {\n if (aIndex >= strLen) {\n throw new Error(\"Expected more digits in base 64 VLQ value.\");\n }\n\n digit = base64.decode(aStr.charCodeAt(aIndex++));\n if (digit === -1) {\n throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n }\n\n continuation = !!(digit & VLQ_CONTINUATION_BIT);\n digit &= VLQ_BASE_MASK;\n result = result + (digit << shift);\n shift += VLQ_BASE_SHIFT;\n } while (continuation);\n\n aOutParam.value = fromVLQSigned(result);\n aOutParam.rest = aIndex;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64-vlq.js\n// module id = 2\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\n/**\n * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n */\nexports.encode = function (number) {\n if (0 <= number && number < intToCharMap.length) {\n return intToCharMap[number];\n }\n throw new TypeError(\"Must be between 0 and 63: \" + number);\n};\n\n/**\n * Decode a single base 64 character code digit to an integer. Returns -1 on\n * failure.\n */\nexports.decode = function (charCode) {\n var bigA = 65; // 'A'\n var bigZ = 90; // 'Z'\n\n var littleA = 97; // 'a'\n var littleZ = 122; // 'z'\n\n var zero = 48; // '0'\n var nine = 57; // '9'\n\n var plus = 43; // '+'\n var slash = 47; // '/'\n\n var littleOffset = 26;\n var numberOffset = 52;\n\n // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n if (bigA <= charCode && charCode <= bigZ) {\n return (charCode - bigA);\n }\n\n // 26 - 51: abcdefghijklmnopqrstuvwxyz\n if (littleA <= charCode && charCode <= littleZ) {\n return (charCode - littleA + littleOffset);\n }\n\n // 52 - 61: 0123456789\n if (zero <= charCode && charCode <= nine) {\n return (charCode - zero + numberOffset);\n }\n\n // 62: +\n if (charCode == plus) {\n return 62;\n }\n\n // 63: /\n if (charCode == slash) {\n return 63;\n }\n\n // Invalid base64 digit.\n return -1;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/base64.js\n// module id = 3\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n/**\n * This is a helper function for getting values from parameter/options\n * objects.\n *\n * @param args The object we are extracting values from\n * @param name The name of the property we are getting.\n * @param defaultValue An optional value to return if the property is missing\n * from the object. If this is not specified and the property is missing, an\n * error will be thrown.\n */\nfunction getArg(aArgs, aName, aDefaultValue) {\n if (aName in aArgs) {\n return aArgs[aName];\n } else if (arguments.length === 3) {\n return aDefaultValue;\n } else {\n throw new Error('\"' + aName + '\" is a required argument.');\n }\n}\nexports.getArg = getArg;\n\nvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.-]*)(?::(\\d+))?(.*)$/;\nvar dataUrlRegexp = /^data:.+\\,.+$/;\n\nfunction urlParse(aUrl) {\n var match = aUrl.match(urlRegexp);\n if (!match) {\n return null;\n }\n return {\n scheme: match[1],\n auth: match[2],\n host: match[3],\n port: match[4],\n path: match[5]\n };\n}\nexports.urlParse = urlParse;\n\nfunction urlGenerate(aParsedUrl) {\n var url = '';\n if (aParsedUrl.scheme) {\n url += aParsedUrl.scheme + ':';\n }\n url += '//';\n if (aParsedUrl.auth) {\n url += aParsedUrl.auth + '@';\n }\n if (aParsedUrl.host) {\n url += aParsedUrl.host;\n }\n if (aParsedUrl.port) {\n url += \":\" + aParsedUrl.port\n }\n if (aParsedUrl.path) {\n url += aParsedUrl.path;\n }\n return url;\n}\nexports.urlGenerate = urlGenerate;\n\n/**\n * Normalizes a path, or the path portion of a URL:\n *\n * - Replaces consecutive slashes with one slash.\n * - Removes unnecessary '.' parts.\n * - Removes unnecessary '/..' parts.\n *\n * Based on code in the Node.js 'path' core module.\n *\n * @param aPath The path or url to normalize.\n */\nfunction normalize(aPath) {\n var path = aPath;\n var url = urlParse(aPath);\n if (url) {\n if (!url.path) {\n return aPath;\n }\n path = url.path;\n }\n var isAbsolute = exports.isAbsolute(path);\n\n var parts = path.split(/\\/+/);\n for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n part = parts[i];\n if (part === '.') {\n parts.splice(i, 1);\n } else if (part === '..') {\n up++;\n } else if (up > 0) {\n if (part === '') {\n // The first part is blank if the path is absolute. Trying to go\n // above the root is a no-op. Therefore we can remove all '..' parts\n // directly after the root.\n parts.splice(i + 1, up);\n up = 0;\n } else {\n parts.splice(i, 2);\n up--;\n }\n }\n }\n path = parts.join('/');\n\n if (path === '') {\n path = isAbsolute ? '/' : '.';\n }\n\n if (url) {\n url.path = path;\n return urlGenerate(url);\n }\n return path;\n}\nexports.normalize = normalize;\n\n/**\n * Joins two paths/URLs.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be joined with the root.\n *\n * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n * scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n * first.\n * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n * is updated with the result and aRoot is returned. Otherwise the result\n * is returned.\n * - If aPath is absolute, the result is aPath.\n * - Otherwise the two paths are joined with a slash.\n * - Joining for example 'http://' and 'www.example.com' is also supported.\n */\nfunction join(aRoot, aPath) {\n if (aRoot === \"\") {\n aRoot = \".\";\n }\n if (aPath === \"\") {\n aPath = \".\";\n }\n var aPathUrl = urlParse(aPath);\n var aRootUrl = urlParse(aRoot);\n if (aRootUrl) {\n aRoot = aRootUrl.path || '/';\n }\n\n // `join(foo, '//www.example.org')`\n if (aPathUrl && !aPathUrl.scheme) {\n if (aRootUrl) {\n aPathUrl.scheme = aRootUrl.scheme;\n }\n return urlGenerate(aPathUrl);\n }\n\n if (aPathUrl || aPath.match(dataUrlRegexp)) {\n return aPath;\n }\n\n // `join('http://', 'www.example.com')`\n if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n aRootUrl.host = aPath;\n return urlGenerate(aRootUrl);\n }\n\n var joined = aPath.charAt(0) === '/'\n ? aPath\n : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\n if (aRootUrl) {\n aRootUrl.path = joined;\n return urlGenerate(aRootUrl);\n }\n return joined;\n}\nexports.join = join;\n\nexports.isAbsolute = function (aPath) {\n return aPath.charAt(0) === '/' || urlRegexp.test(aPath);\n};\n\n/**\n * Make a path relative to a URL or another path.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be made relative to aRoot.\n */\nfunction relative(aRoot, aPath) {\n if (aRoot === \"\") {\n aRoot = \".\";\n }\n\n aRoot = aRoot.replace(/\\/$/, '');\n\n // It is possible for the path to be above the root. In this case, simply\n // checking whether the root is a prefix of the path won't work. Instead, we\n // need to remove components from the root one by one, until either we find\n // a prefix that fits, or we run out of components to remove.\n var level = 0;\n while (aPath.indexOf(aRoot + '/') !== 0) {\n var index = aRoot.lastIndexOf(\"/\");\n if (index < 0) {\n return aPath;\n }\n\n // If the only part of the root that is left is the scheme (i.e. http://,\n // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n // have exhausted all components, so the path is not relative to the root.\n aRoot = aRoot.slice(0, index);\n if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n return aPath;\n }\n\n ++level;\n }\n\n // Make sure we add a \"../\" for each component we removed from the root.\n return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n}\nexports.relative = relative;\n\nvar supportsNullProto = (function () {\n var obj = Object.create(null);\n return !('__proto__' in obj);\n}());\n\nfunction identity (s) {\n return s;\n}\n\n/**\n * Because behavior goes wacky when you set `__proto__` on objects, we\n * have to prefix all the strings in our set with an arbitrary character.\n *\n * See https://github.com/mozilla/source-map/pull/31 and\n * https://github.com/mozilla/source-map/issues/30\n *\n * @param String aStr\n */\nfunction toSetString(aStr) {\n if (isProtoString(aStr)) {\n return '$' + aStr;\n }\n\n return aStr;\n}\nexports.toSetString = supportsNullProto ? identity : toSetString;\n\nfunction fromSetString(aStr) {\n if (isProtoString(aStr)) {\n return aStr.slice(1);\n }\n\n return aStr;\n}\nexports.fromSetString = supportsNullProto ? identity : fromSetString;\n\nfunction isProtoString(s) {\n if (!s) {\n return false;\n }\n\n var length = s.length;\n\n if (length < 9 /* \"__proto__\".length */) {\n return false;\n }\n\n if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||\n s.charCodeAt(length - 2) !== 95 /* '_' */ ||\n s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n s.charCodeAt(length - 8) !== 95 /* '_' */ ||\n s.charCodeAt(length - 9) !== 95 /* '_' */) {\n return false;\n }\n\n for (var i = length - 10; i >= 0; i--) {\n if (s.charCodeAt(i) !== 36 /* '$' */) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Comparator between two mappings where the original positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same original source/line/column, but different generated\n * line and column the same. Useful when searching for a mapping with a\n * stubbed out mapping.\n */\nfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n var cmp = strcmp(mappingA.source, mappingB.source);\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0 || onlyCompareOriginal) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByOriginalPositions = compareByOriginalPositions;\n\n/**\n * Comparator between two mappings with deflated source and name indices where\n * the generated positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same generated line and column, but different\n * source/name/original line and column the same. Useful when searching for a\n * mapping with a stubbed out mapping.\n */\nfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n var cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0 || onlyCompareGenerated) {\n return cmp;\n }\n\n cmp = strcmp(mappingA.source, mappingB.source);\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\nfunction strcmp(aStr1, aStr2) {\n if (aStr1 === aStr2) {\n return 0;\n }\n\n if (aStr1 === null) {\n return 1; // aStr2 !== null\n }\n\n if (aStr2 === null) {\n return -1; // aStr1 !== null\n }\n\n if (aStr1 > aStr2) {\n return 1;\n }\n\n return -1;\n}\n\n/**\n * Comparator between two mappings with inflated source and name strings where\n * the generated positions are compared.\n */\nfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n var cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = strcmp(mappingA.source, mappingB.source);\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n/**\n * Strip any JSON XSSI avoidance prefix from the string (as documented\n * in the source maps specification), and then parse the string as\n * JSON.\n */\nfunction parseSourceMapInput(str) {\n return JSON.parse(str.replace(/^\\)]}'[^\\n]*\\n/, ''));\n}\nexports.parseSourceMapInput = parseSourceMapInput;\n\n/**\n * Compute the URL of a source given the the source root, the source's\n * URL, and the source map's URL.\n */\nfunction computeSourceURL(sourceRoot, sourceURL, sourceMapURL) {\n sourceURL = sourceURL || '';\n\n if (sourceRoot) {\n // This follows what Chrome does.\n if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') {\n sourceRoot += '/';\n }\n // The spec says:\n // Line 4: An optional source root, useful for relocating source\n // files on a server or removing repeated values in the\n // “sources” entry. This value is prepended to the individual\n // entries in the “source” field.\n sourceURL = sourceRoot + sourceURL;\n }\n\n // Historically, SourceMapConsumer did not take the sourceMapURL as\n // a parameter. This mode is still somewhat supported, which is why\n // this code block is conditional. However, it's preferable to pass\n // the source map URL to SourceMapConsumer, so that this function\n // can implement the source URL resolution algorithm as outlined in\n // the spec. This block is basically the equivalent of:\n // new URL(sourceURL, sourceMapURL).toString()\n // ... except it avoids using URL, which wasn't available in the\n // older releases of node still supported by this library.\n //\n // The spec says:\n // If the sources are not absolute URLs after prepending of the\n // “sourceRoot”, the sources are resolved relative to the\n // SourceMap (like resolving script src in a html document).\n if (sourceMapURL) {\n var parsed = urlParse(sourceMapURL);\n if (!parsed) {\n throw new Error(\"sourceMapURL could not be parsed\");\n }\n if (parsed.path) {\n // Strip the last path component, but keep the \"/\".\n var index = parsed.path.lastIndexOf('/');\n if (index >= 0) {\n parsed.path = parsed.path.substring(0, index + 1);\n }\n }\n sourceURL = join(urlGenerate(parsed), sourceURL);\n }\n\n return normalize(sourceURL);\n}\nexports.computeSourceURL = computeSourceURL;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/util.js\n// module id = 4\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar has = Object.prototype.hasOwnProperty;\nvar hasNativeMap = typeof Map !== \"undefined\";\n\n/**\n * A data structure which is a combination of an array and a set. Adding a new\n * member is O(1), testing for membership is O(1), and finding the index of an\n * element is O(1). Removing elements from the set is not supported. Only\n * strings are supported for membership.\n */\nfunction ArraySet() {\n this._array = [];\n this._set = hasNativeMap ? new Map() : Object.create(null);\n}\n\n/**\n * Static method for creating ArraySet instances from an existing array.\n */\nArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n var set = new ArraySet();\n for (var i = 0, len = aArray.length; i < len; i++) {\n set.add(aArray[i], aAllowDuplicates);\n }\n return set;\n};\n\n/**\n * Return how many unique items are in this ArraySet. If duplicates have been\n * added, than those do not count towards the size.\n *\n * @returns Number\n */\nArraySet.prototype.size = function ArraySet_size() {\n return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length;\n};\n\n/**\n * Add the given string to this set.\n *\n * @param String aStr\n */\nArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n var sStr = hasNativeMap ? aStr : util.toSetString(aStr);\n var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr);\n var idx = this._array.length;\n if (!isDuplicate || aAllowDuplicates) {\n this._array.push(aStr);\n }\n if (!isDuplicate) {\n if (hasNativeMap) {\n this._set.set(aStr, idx);\n } else {\n this._set[sStr] = idx;\n }\n }\n};\n\n/**\n * Is the given string a member of this set?\n *\n * @param String aStr\n */\nArraySet.prototype.has = function ArraySet_has(aStr) {\n if (hasNativeMap) {\n return this._set.has(aStr);\n } else {\n var sStr = util.toSetString(aStr);\n return has.call(this._set, sStr);\n }\n};\n\n/**\n * What is the index of the given string in the array?\n *\n * @param String aStr\n */\nArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n if (hasNativeMap) {\n var idx = this._set.get(aStr);\n if (idx >= 0) {\n return idx;\n }\n } else {\n var sStr = util.toSetString(aStr);\n if (has.call(this._set, sStr)) {\n return this._set[sStr];\n }\n }\n\n throw new Error('\"' + aStr + '\" is not in the set.');\n};\n\n/**\n * What is the element at the given index?\n *\n * @param Number aIdx\n */\nArraySet.prototype.at = function ArraySet_at(aIdx) {\n if (aIdx >= 0 && aIdx < this._array.length) {\n return this._array[aIdx];\n }\n throw new Error('No element indexed by ' + aIdx);\n};\n\n/**\n * Returns the array representation of this set (which has the proper indices\n * indicated by indexOf). Note that this is a copy of the internal array used\n * for storing the members so that no one can mess with internal state.\n */\nArraySet.prototype.toArray = function ArraySet_toArray() {\n return this._array.slice();\n};\n\nexports.ArraySet = ArraySet;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/array-set.js\n// module id = 5\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2014 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\n\n/**\n * Determine whether mappingB is after mappingA with respect to generated\n * position.\n */\nfunction generatedPositionAfter(mappingA, mappingB) {\n // Optimized for most common case\n var lineA = mappingA.generatedLine;\n var lineB = mappingB.generatedLine;\n var columnA = mappingA.generatedColumn;\n var columnB = mappingB.generatedColumn;\n return lineB > lineA || lineB == lineA && columnB >= columnA ||\n util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n}\n\n/**\n * A data structure to provide a sorted view of accumulated mappings in a\n * performance conscious manner. It trades a neglibable overhead in general\n * case for a large speedup in case of mappings being added in order.\n */\nfunction MappingList() {\n this._array = [];\n this._sorted = true;\n // Serves as infimum\n this._last = {generatedLine: -1, generatedColumn: 0};\n}\n\n/**\n * Iterate through internal items. This method takes the same arguments that\n * `Array.prototype.forEach` takes.\n *\n * NOTE: The order of the mappings is NOT guaranteed.\n */\nMappingList.prototype.unsortedForEach =\n function MappingList_forEach(aCallback, aThisArg) {\n this._array.forEach(aCallback, aThisArg);\n };\n\n/**\n * Add the given source mapping.\n *\n * @param Object aMapping\n */\nMappingList.prototype.add = function MappingList_add(aMapping) {\n if (generatedPositionAfter(this._last, aMapping)) {\n this._last = aMapping;\n this._array.push(aMapping);\n } else {\n this._sorted = false;\n this._array.push(aMapping);\n }\n};\n\n/**\n * Returns the flat, sorted array of mappings. The mappings are sorted by\n * generated position.\n *\n * WARNING: This method returns internal data without copying, for\n * performance. The return value must NOT be mutated, and should be treated as\n * an immutable borrow. If you want to take ownership, you must make your own\n * copy.\n */\nMappingList.prototype.toArray = function MappingList_toArray() {\n if (!this._sorted) {\n this._array.sort(util.compareByGeneratedPositionsInflated);\n this._sorted = true;\n }\n return this._array;\n};\n\nexports.MappingList = MappingList;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/mapping-list.js\n// module id = 6\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar binarySearch = require('./binary-search');\nvar ArraySet = require('./array-set').ArraySet;\nvar base64VLQ = require('./base64-vlq');\nvar quickSort = require('./quick-sort').quickSort;\n\nfunction SourceMapConsumer(aSourceMap, aSourceMapURL) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = util.parseSourceMapInput(aSourceMap);\n }\n\n return sourceMap.sections != null\n ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL)\n : new BasicSourceMapConsumer(sourceMap, aSourceMapURL);\n}\n\nSourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) {\n return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL);\n}\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nSourceMapConsumer.prototype._version = 3;\n\n// `__generatedMappings` and `__originalMappings` are arrays that hold the\n// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n// are lazily instantiated, accessed via the `_generatedMappings` and\n// `_originalMappings` getters respectively, and we only parse the mappings\n// and create these arrays once queried for a source location. We jump through\n// these hoops because there can be many thousands of mappings, and parsing\n// them is expensive, so we only want to do it if we must.\n//\n// Each object in the arrays is of the form:\n//\n// {\n// generatedLine: The line number in the generated code,\n// generatedColumn: The column number in the generated code,\n// source: The path to the original source file that generated this\n// chunk of code,\n// originalLine: The line number in the original source that\n// corresponds to this chunk of generated code,\n// originalColumn: The column number in the original source that\n// corresponds to this chunk of generated code,\n// name: The name of the original symbol which generated this chunk of\n// code.\n// }\n//\n// All properties except for `generatedLine` and `generatedColumn` can be\n// `null`.\n//\n// `_generatedMappings` is ordered by the generated positions.\n//\n// `_originalMappings` is ordered by the original positions.\n\nSourceMapConsumer.prototype.__generatedMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n configurable: true,\n enumerable: true,\n get: function () {\n if (!this.__generatedMappings) {\n this._parseMappings(this._mappings, this.sourceRoot);\n }\n\n return this.__generatedMappings;\n }\n});\n\nSourceMapConsumer.prototype.__originalMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n configurable: true,\n enumerable: true,\n get: function () {\n if (!this.__originalMappings) {\n this._parseMappings(this._mappings, this.sourceRoot);\n }\n\n return this.__originalMappings;\n }\n});\n\nSourceMapConsumer.prototype._charIsMappingSeparator =\n function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n var c = aStr.charAt(index);\n return c === \";\" || c === \",\";\n };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nSourceMapConsumer.prototype._parseMappings =\n function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n throw new Error(\"Subclasses must implement _parseMappings\");\n };\n\nSourceMapConsumer.GENERATED_ORDER = 1;\nSourceMapConsumer.ORIGINAL_ORDER = 2;\n\nSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\nSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\n/**\n * Iterate over each mapping between an original source/line/column and a\n * generated line/column in this source map.\n *\n * @param Function aCallback\n * The function that is called with each mapping.\n * @param Object aContext\n * Optional. If specified, this object will be the value of `this` every\n * time that `aCallback` is called.\n * @param aOrder\n * Either `SourceMapConsumer.GENERATED_ORDER` or\n * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n * iterate over the mappings sorted by the generated file's line/column\n * order or the original's source/line/column order, respectively. Defaults to\n * `SourceMapConsumer.GENERATED_ORDER`.\n */\nSourceMapConsumer.prototype.eachMapping =\n function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n var context = aContext || null;\n var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\n var mappings;\n switch (order) {\n case SourceMapConsumer.GENERATED_ORDER:\n mappings = this._generatedMappings;\n break;\n case SourceMapConsumer.ORIGINAL_ORDER:\n mappings = this._originalMappings;\n break;\n default:\n throw new Error(\"Unknown order of iteration.\");\n }\n\n var sourceRoot = this.sourceRoot;\n mappings.map(function (mapping) {\n var source = mapping.source === null ? null : this._sources.at(mapping.source);\n source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL);\n return {\n source: source,\n generatedLine: mapping.generatedLine,\n generatedColumn: mapping.generatedColumn,\n originalLine: mapping.originalLine,\n originalColumn: mapping.originalColumn,\n name: mapping.name === null ? null : this._names.at(mapping.name)\n };\n }, this).forEach(aCallback, context);\n };\n\n/**\n * Returns all generated line and column information for the original source,\n * line, and column provided. If no column is provided, returns all mappings\n * corresponding to a either the line we are searching for or the next\n * closest line that has any mappings. Otherwise, returns all mappings\n * corresponding to the given line and either the column we are searching for\n * or the next closest column that has any offsets.\n *\n * The only argument is an object with the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source. The line number is 1-based.\n * - column: Optional. the column number in the original source.\n * The column number is 0-based.\n *\n * and an array of objects is returned, each with the following properties:\n *\n * - line: The line number in the generated source, or null. The\n * line number is 1-based.\n * - column: The column number in the generated source, or null.\n * The column number is 0-based.\n */\nSourceMapConsumer.prototype.allGeneratedPositionsFor =\n function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n var line = util.getArg(aArgs, 'line');\n\n // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n // returns the index of the closest mapping less than the needle. By\n // setting needle.originalColumn to 0, we thus find the last mapping for\n // the given line, provided such a mapping exists.\n var needle = {\n source: util.getArg(aArgs, 'source'),\n originalLine: line,\n originalColumn: util.getArg(aArgs, 'column', 0)\n };\n\n needle.source = this._findSourceIndex(needle.source);\n if (needle.source < 0) {\n return [];\n }\n\n var mappings = [];\n\n var index = this._findMapping(needle,\n this._originalMappings,\n \"originalLine\",\n \"originalColumn\",\n util.compareByOriginalPositions,\n binarySearch.LEAST_UPPER_BOUND);\n if (index >= 0) {\n var mapping = this._originalMappings[index];\n\n if (aArgs.column === undefined) {\n var originalLine = mapping.originalLine;\n\n // Iterate until either we run out of mappings, or we run into\n // a mapping for a different line than the one we found. Since\n // mappings are sorted, this is guaranteed to find all mappings for\n // the line we found.\n while (mapping && mapping.originalLine === originalLine) {\n mappings.push({\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n });\n\n mapping = this._originalMappings[++index];\n }\n } else {\n var originalColumn = mapping.originalColumn;\n\n // Iterate until either we run out of mappings, or we run into\n // a mapping for a different line than the one we were searching for.\n // Since mappings are sorted, this is guaranteed to find all mappings for\n // the line we are searching for.\n while (mapping &&\n mapping.originalLine === line &&\n mapping.originalColumn == originalColumn) {\n mappings.push({\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n });\n\n mapping = this._originalMappings[++index];\n }\n }\n }\n\n return mappings;\n };\n\nexports.SourceMapConsumer = SourceMapConsumer;\n\n/**\n * A BasicSourceMapConsumer instance represents a parsed source map which we can\n * query for information about the original file positions by giving it a file\n * position in the generated source.\n *\n * The first parameter is the raw source map (either as a JSON string, or\n * already parsed to an object). According to the spec, source maps have the\n * following attributes:\n *\n * - version: Which version of the source map spec this map is following.\n * - sources: An array of URLs to the original source files.\n * - names: An array of identifiers which can be referrenced by individual mappings.\n * - sourceRoot: Optional. The URL root from which all sources are relative.\n * - sourcesContent: Optional. An array of contents of the original source files.\n * - mappings: A string of base64 VLQs which contain the actual mappings.\n * - file: Optional. The generated file this source map is associated with.\n *\n * Here is an example source map, taken from the source map spec[0]:\n *\n * {\n * version : 3,\n * file: \"out.js\",\n * sourceRoot : \"\",\n * sources: [\"foo.js\", \"bar.js\"],\n * names: [\"src\", \"maps\", \"are\", \"fun\"],\n * mappings: \"AA,AB;;ABCDE;\"\n * }\n *\n * The second parameter, if given, is a string whose value is the URL\n * at which the source map was found. This URL is used to compute the\n * sources array.\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n */\nfunction BasicSourceMapConsumer(aSourceMap, aSourceMapURL) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = util.parseSourceMapInput(aSourceMap);\n }\n\n var version = util.getArg(sourceMap, 'version');\n var sources = util.getArg(sourceMap, 'sources');\n // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n // requires the array) to play nice here.\n var names = util.getArg(sourceMap, 'names', []);\n var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n var mappings = util.getArg(sourceMap, 'mappings');\n var file = util.getArg(sourceMap, 'file', null);\n\n // Once again, Sass deviates from the spec and supplies the version as a\n // string rather than a number, so we use loose equality checking here.\n if (version != this._version) {\n throw new Error('Unsupported version: ' + version);\n }\n\n if (sourceRoot) {\n sourceRoot = util.normalize(sourceRoot);\n }\n\n sources = sources\n .map(String)\n // Some source maps produce relative source paths like \"./foo.js\" instead of\n // \"foo.js\". Normalize these first so that future comparisons will succeed.\n // See bugzil.la/1090768.\n .map(util.normalize)\n // Always ensure that absolute sources are internally stored relative to\n // the source root, if the source root is absolute. Not doing this would\n // be particularly problematic when the source root is a prefix of the\n // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n .map(function (source) {\n return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n ? util.relative(sourceRoot, source)\n : source;\n });\n\n // Pass `true` below to allow duplicate names and sources. While source maps\n // are intended to be compressed and deduplicated, the TypeScript compiler\n // sometimes generates source maps with duplicates in them. See Github issue\n // #72 and bugzil.la/889492.\n this._names = ArraySet.fromArray(names.map(String), true);\n this._sources = ArraySet.fromArray(sources, true);\n\n this._absoluteSources = this._sources.toArray().map(function (s) {\n return util.computeSourceURL(sourceRoot, s, aSourceMapURL);\n });\n\n this.sourceRoot = sourceRoot;\n this.sourcesContent = sourcesContent;\n this._mappings = mappings;\n this._sourceMapURL = aSourceMapURL;\n this.file = file;\n}\n\nBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\n/**\n * Utility function to find the index of a source. Returns -1 if not\n * found.\n */\nBasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) {\n var relativeSource = aSource;\n if (this.sourceRoot != null) {\n relativeSource = util.relative(this.sourceRoot, relativeSource);\n }\n\n if (this._sources.has(relativeSource)) {\n return this._sources.indexOf(relativeSource);\n }\n\n // Maybe aSource is an absolute URL as returned by |sources|. In\n // this case we can't simply undo the transform.\n var i;\n for (i = 0; i < this._absoluteSources.length; ++i) {\n if (this._absoluteSources[i] == aSource) {\n return i;\n }\n }\n\n return -1;\n};\n\n/**\n * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n *\n * @param SourceMapGenerator aSourceMap\n * The source map that will be consumed.\n * @param String aSourceMapURL\n * The URL at which the source map can be found (optional)\n * @returns BasicSourceMapConsumer\n */\nBasicSourceMapConsumer.fromSourceMap =\n function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) {\n var smc = Object.create(BasicSourceMapConsumer.prototype);\n\n var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n smc.sourceRoot = aSourceMap._sourceRoot;\n smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n smc.sourceRoot);\n smc.file = aSourceMap._file;\n smc._sourceMapURL = aSourceMapURL;\n smc._absoluteSources = smc._sources.toArray().map(function (s) {\n return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL);\n });\n\n // Because we are modifying the entries (by converting string sources and\n // names to indices into the sources and names ArraySets), we have to make\n // a copy of the entry or else bad things happen. Shared mutable state\n // strikes again! See github issue #191.\n\n var generatedMappings = aSourceMap._mappings.toArray().slice();\n var destGeneratedMappings = smc.__generatedMappings = [];\n var destOriginalMappings = smc.__originalMappings = [];\n\n for (var i = 0, length = generatedMappings.length; i < length; i++) {\n var srcMapping = generatedMappings[i];\n var destMapping = new Mapping;\n destMapping.generatedLine = srcMapping.generatedLine;\n destMapping.generatedColumn = srcMapping.generatedColumn;\n\n if (srcMapping.source) {\n destMapping.source = sources.indexOf(srcMapping.source);\n destMapping.originalLine = srcMapping.originalLine;\n destMapping.originalColumn = srcMapping.originalColumn;\n\n if (srcMapping.name) {\n destMapping.name = names.indexOf(srcMapping.name);\n }\n\n destOriginalMappings.push(destMapping);\n }\n\n destGeneratedMappings.push(destMapping);\n }\n\n quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\n return smc;\n };\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nBasicSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n get: function () {\n return this._absoluteSources.slice();\n }\n});\n\n/**\n * Provide the JIT with a nice shape / hidden class.\n */\nfunction Mapping() {\n this.generatedLine = 0;\n this.generatedColumn = 0;\n this.source = null;\n this.originalLine = null;\n this.originalColumn = null;\n this.name = null;\n}\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nBasicSourceMapConsumer.prototype._parseMappings =\n function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n var generatedLine = 1;\n var previousGeneratedColumn = 0;\n var previousOriginalLine = 0;\n var previousOriginalColumn = 0;\n var previousSource = 0;\n var previousName = 0;\n var length = aStr.length;\n var index = 0;\n var cachedSegments = {};\n var temp = {};\n var originalMappings = [];\n var generatedMappings = [];\n var mapping, str, segment, end, value;\n\n while (index < length) {\n if (aStr.charAt(index) === ';') {\n generatedLine++;\n index++;\n previousGeneratedColumn = 0;\n }\n else if (aStr.charAt(index) === ',') {\n index++;\n }\n else {\n mapping = new Mapping();\n mapping.generatedLine = generatedLine;\n\n // Because each offset is encoded relative to the previous one,\n // many segments often have the same encoding. We can exploit this\n // fact by caching the parsed variable length fields of each segment,\n // allowing us to avoid a second parse if we encounter the same\n // segment again.\n for (end = index; end < length; end++) {\n if (this._charIsMappingSeparator(aStr, end)) {\n break;\n }\n }\n str = aStr.slice(index, end);\n\n segment = cachedSegments[str];\n if (segment) {\n index += str.length;\n } else {\n segment = [];\n while (index < end) {\n base64VLQ.decode(aStr, index, temp);\n value = temp.value;\n index = temp.rest;\n segment.push(value);\n }\n\n if (segment.length === 2) {\n throw new Error('Found a source, but no line and column');\n }\n\n if (segment.length === 3) {\n throw new Error('Found a source and line, but no column');\n }\n\n cachedSegments[str] = segment;\n }\n\n // Generated column.\n mapping.generatedColumn = previousGeneratedColumn + segment[0];\n previousGeneratedColumn = mapping.generatedColumn;\n\n if (segment.length > 1) {\n // Original source.\n mapping.source = previousSource + segment[1];\n previousSource += segment[1];\n\n // Original line.\n mapping.originalLine = previousOriginalLine + segment[2];\n previousOriginalLine = mapping.originalLine;\n // Lines are stored 0-based\n mapping.originalLine += 1;\n\n // Original column.\n mapping.originalColumn = previousOriginalColumn + segment[3];\n previousOriginalColumn = mapping.originalColumn;\n\n if (segment.length > 4) {\n // Original name.\n mapping.name = previousName + segment[4];\n previousName += segment[4];\n }\n }\n\n generatedMappings.push(mapping);\n if (typeof mapping.originalLine === 'number') {\n originalMappings.push(mapping);\n }\n }\n }\n\n quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n this.__generatedMappings = generatedMappings;\n\n quickSort(originalMappings, util.compareByOriginalPositions);\n this.__originalMappings = originalMappings;\n };\n\n/**\n * Find the mapping that best matches the hypothetical \"needle\" mapping that\n * we are searching for in the given \"haystack\" of mappings.\n */\nBasicSourceMapConsumer.prototype._findMapping =\n function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n aColumnName, aComparator, aBias) {\n // To return the position we are searching for, we must first find the\n // mapping for the given position and then return the opposite position it\n // points to. Because the mappings are sorted, we can use binary search to\n // find the best mapping.\n\n if (aNeedle[aLineName] <= 0) {\n throw new TypeError('Line must be greater than or equal to 1, got '\n + aNeedle[aLineName]);\n }\n if (aNeedle[aColumnName] < 0) {\n throw new TypeError('Column must be greater than or equal to 0, got '\n + aNeedle[aColumnName]);\n }\n\n return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n };\n\n/**\n * Compute the last column for each generated mapping. The last column is\n * inclusive.\n */\nBasicSourceMapConsumer.prototype.computeColumnSpans =\n function SourceMapConsumer_computeColumnSpans() {\n for (var index = 0; index < this._generatedMappings.length; ++index) {\n var mapping = this._generatedMappings[index];\n\n // Mappings do not contain a field for the last generated columnt. We\n // can come up with an optimistic estimate, however, by assuming that\n // mappings are contiguous (i.e. given two consecutive mappings, the\n // first mapping ends where the second one starts).\n if (index + 1 < this._generatedMappings.length) {\n var nextMapping = this._generatedMappings[index + 1];\n\n if (mapping.generatedLine === nextMapping.generatedLine) {\n mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n continue;\n }\n }\n\n // The last mapping for each line spans the entire line.\n mapping.lastGeneratedColumn = Infinity;\n }\n };\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n * - line: The line number in the generated source. The line number\n * is 1-based.\n * - column: The column number in the generated source. The column\n * number is 0-based.\n * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n * - source: The original source file, or null.\n * - line: The line number in the original source, or null. The\n * line number is 1-based.\n * - column: The column number in the original source, or null. The\n * column number is 0-based.\n * - name: The original identifier, or null.\n */\nBasicSourceMapConsumer.prototype.originalPositionFor =\n function SourceMapConsumer_originalPositionFor(aArgs) {\n var needle = {\n generatedLine: util.getArg(aArgs, 'line'),\n generatedColumn: util.getArg(aArgs, 'column')\n };\n\n var index = this._findMapping(\n needle,\n this._generatedMappings,\n \"generatedLine\",\n \"generatedColumn\",\n util.compareByGeneratedPositionsDeflated,\n util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n );\n\n if (index >= 0) {\n var mapping = this._generatedMappings[index];\n\n if (mapping.generatedLine === needle.generatedLine) {\n var source = util.getArg(mapping, 'source', null);\n if (source !== null) {\n source = this._sources.at(source);\n source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL);\n }\n var name = util.getArg(mapping, 'name', null);\n if (name !== null) {\n name = this._names.at(name);\n }\n return {\n source: source,\n line: util.getArg(mapping, 'originalLine', null),\n column: util.getArg(mapping, 'originalColumn', null),\n name: name\n };\n }\n }\n\n return {\n source: null,\n line: null,\n column: null,\n name: null\n };\n };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n function BasicSourceMapConsumer_hasContentsOfAllSources() {\n if (!this.sourcesContent) {\n return false;\n }\n return this.sourcesContent.length >= this._sources.size() &&\n !this.sourcesContent.some(function (sc) { return sc == null; });\n };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nBasicSourceMapConsumer.prototype.sourceContentFor =\n function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n if (!this.sourcesContent) {\n return null;\n }\n\n var index = this._findSourceIndex(aSource);\n if (index >= 0) {\n return this.sourcesContent[index];\n }\n\n var relativeSource = aSource;\n if (this.sourceRoot != null) {\n relativeSource = util.relative(this.sourceRoot, relativeSource);\n }\n\n var url;\n if (this.sourceRoot != null\n && (url = util.urlParse(this.sourceRoot))) {\n // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n // many users. We can help them out when they expect file:// URIs to\n // behave like it would if they were running a local HTTP server. See\n // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n var fileUriAbsPath = relativeSource.replace(/^file:\\/\\//, \"\");\n if (url.scheme == \"file\"\n && this._sources.has(fileUriAbsPath)) {\n return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n }\n\n if ((!url.path || url.path == \"/\")\n && this._sources.has(\"/\" + relativeSource)) {\n return this.sourcesContent[this._sources.indexOf(\"/\" + relativeSource)];\n }\n }\n\n // This function is used recursively from\n // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n // don't want to throw if we can't find the source - we just want to\n // return null, so we provide a flag to exit gracefully.\n if (nullOnMissing) {\n return null;\n }\n else {\n throw new Error('\"' + relativeSource + '\" is not in the SourceMap.');\n }\n };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source. The line number\n * is 1-based.\n * - column: The column number in the original source. The column\n * number is 0-based.\n * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n * - line: The line number in the generated source, or null. The\n * line number is 1-based.\n * - column: The column number in the generated source, or null.\n * The column number is 0-based.\n */\nBasicSourceMapConsumer.prototype.generatedPositionFor =\n function SourceMapConsumer_generatedPositionFor(aArgs) {\n var source = util.getArg(aArgs, 'source');\n source = this._findSourceIndex(source);\n if (source < 0) {\n return {\n line: null,\n column: null,\n lastColumn: null\n };\n }\n\n var needle = {\n source: source,\n originalLine: util.getArg(aArgs, 'line'),\n originalColumn: util.getArg(aArgs, 'column')\n };\n\n var index = this._findMapping(\n needle,\n this._originalMappings,\n \"originalLine\",\n \"originalColumn\",\n util.compareByOriginalPositions,\n util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n );\n\n if (index >= 0) {\n var mapping = this._originalMappings[index];\n\n if (mapping.source === needle.source) {\n return {\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n };\n }\n }\n\n return {\n line: null,\n column: null,\n lastColumn: null\n };\n };\n\nexports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\n/**\n * An IndexedSourceMapConsumer instance represents a parsed source map which\n * we can query for information. It differs from BasicSourceMapConsumer in\n * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n * input.\n *\n * The first parameter is a raw source map (either as a JSON string, or already\n * parsed to an object). According to the spec for indexed source maps, they\n * have the following attributes:\n *\n * - version: Which version of the source map spec this map is following.\n * - file: Optional. The generated file this source map is associated with.\n * - sections: A list of section definitions.\n *\n * Each value under the \"sections\" field has two fields:\n * - offset: The offset into the original specified at which this section\n * begins to apply, defined as an object with a \"line\" and \"column\"\n * field.\n * - map: A source map definition. This source map could also be indexed,\n * but doesn't have to be.\n *\n * Instead of the \"map\" field, it's also possible to have a \"url\" field\n * specifying a URL to retrieve a source map from, but that's currently\n * unsupported.\n *\n * Here's an example source map, taken from the source map spec[0], but\n * modified to omit a section which uses the \"url\" field.\n *\n * {\n * version : 3,\n * file: \"app.js\",\n * sections: [{\n * offset: {line:100, column:10},\n * map: {\n * version : 3,\n * file: \"section.js\",\n * sources: [\"foo.js\", \"bar.js\"],\n * names: [\"src\", \"maps\", \"are\", \"fun\"],\n * mappings: \"AAAA,E;;ABCDE;\"\n * }\n * }],\n * }\n *\n * The second parameter, if given, is a string whose value is the URL\n * at which the source map was found. This URL is used to compute the\n * sources array.\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n */\nfunction IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = util.parseSourceMapInput(aSourceMap);\n }\n\n var version = util.getArg(sourceMap, 'version');\n var sections = util.getArg(sourceMap, 'sections');\n\n if (version != this._version) {\n throw new Error('Unsupported version: ' + version);\n }\n\n this._sources = new ArraySet();\n this._names = new ArraySet();\n\n var lastOffset = {\n line: -1,\n column: 0\n };\n this._sections = sections.map(function (s) {\n if (s.url) {\n // The url field will require support for asynchronicity.\n // See https://github.com/mozilla/source-map/issues/16\n throw new Error('Support for url field in sections not implemented.');\n }\n var offset = util.getArg(s, 'offset');\n var offsetLine = util.getArg(offset, 'line');\n var offsetColumn = util.getArg(offset, 'column');\n\n if (offsetLine < lastOffset.line ||\n (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n throw new Error('Section offsets must be ordered and non-overlapping.');\n }\n lastOffset = offset;\n\n return {\n generatedOffset: {\n // The offset fields are 0-based, but we use 1-based indices when\n // encoding/decoding from VLQ.\n generatedLine: offsetLine + 1,\n generatedColumn: offsetColumn + 1\n },\n consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL)\n }\n });\n}\n\nIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nIndexedSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n get: function () {\n var sources = [];\n for (var i = 0; i < this._sections.length; i++) {\n for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n sources.push(this._sections[i].consumer.sources[j]);\n }\n }\n return sources;\n }\n});\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n * - line: The line number in the generated source. The line number\n * is 1-based.\n * - column: The column number in the generated source. The column\n * number is 0-based.\n *\n * and an object is returned with the following properties:\n *\n * - source: The original source file, or null.\n * - line: The line number in the original source, or null. The\n * line number is 1-based.\n * - column: The column number in the original source, or null. The\n * column number is 0-based.\n * - name: The original identifier, or null.\n */\nIndexedSourceMapConsumer.prototype.originalPositionFor =\n function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n var needle = {\n generatedLine: util.getArg(aArgs, 'line'),\n generatedColumn: util.getArg(aArgs, 'column')\n };\n\n // Find the section containing the generated position we're trying to map\n // to an original position.\n var sectionIndex = binarySearch.search(needle, this._sections,\n function(needle, section) {\n var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n if (cmp) {\n return cmp;\n }\n\n return (needle.generatedColumn -\n section.generatedOffset.generatedColumn);\n });\n var section = this._sections[sectionIndex];\n\n if (!section) {\n return {\n source: null,\n line: null,\n column: null,\n name: null\n };\n }\n\n return section.consumer.originalPositionFor({\n line: needle.generatedLine -\n (section.generatedOffset.generatedLine - 1),\n column: needle.generatedColumn -\n (section.generatedOffset.generatedLine === needle.generatedLine\n ? section.generatedOffset.generatedColumn - 1\n : 0),\n bias: aArgs.bias\n });\n };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n return this._sections.every(function (s) {\n return s.consumer.hasContentsOfAllSources();\n });\n };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nIndexedSourceMapConsumer.prototype.sourceContentFor =\n function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n\n var content = section.consumer.sourceContentFor(aSource, true);\n if (content) {\n return content;\n }\n }\n if (nullOnMissing) {\n return null;\n }\n else {\n throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n }\n };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source. The line number\n * is 1-based.\n * - column: The column number in the original source. The column\n * number is 0-based.\n *\n * and an object is returned with the following properties:\n *\n * - line: The line number in the generated source, or null. The\n * line number is 1-based. \n * - column: The column number in the generated source, or null.\n * The column number is 0-based.\n */\nIndexedSourceMapConsumer.prototype.generatedPositionFor =\n function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n\n // Only consider this section if the requested source is in the list of\n // sources of the consumer.\n if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) {\n continue;\n }\n var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n if (generatedPosition) {\n var ret = {\n line: generatedPosition.line +\n (section.generatedOffset.generatedLine - 1),\n column: generatedPosition.column +\n (section.generatedOffset.generatedLine === generatedPosition.line\n ? section.generatedOffset.generatedColumn - 1\n : 0)\n };\n return ret;\n }\n }\n\n return {\n line: null,\n column: null\n };\n };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nIndexedSourceMapConsumer.prototype._parseMappings =\n function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n this.__generatedMappings = [];\n this.__originalMappings = [];\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n var sectionMappings = section.consumer._generatedMappings;\n for (var j = 0; j < sectionMappings.length; j++) {\n var mapping = sectionMappings[j];\n\n var source = section.consumer._sources.at(mapping.source);\n source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL);\n this._sources.add(source);\n source = this._sources.indexOf(source);\n\n var name = null;\n if (mapping.name) {\n name = section.consumer._names.at(mapping.name);\n this._names.add(name);\n name = this._names.indexOf(name);\n }\n\n // The mappings coming from the consumer for the section have\n // generated positions relative to the start of the section, so we\n // need to offset them to be relative to the start of the concatenated\n // generated file.\n var adjustedMapping = {\n source: source,\n generatedLine: mapping.generatedLine +\n (section.generatedOffset.generatedLine - 1),\n generatedColumn: mapping.generatedColumn +\n (section.generatedOffset.generatedLine === mapping.generatedLine\n ? section.generatedOffset.generatedColumn - 1\n : 0),\n originalLine: mapping.originalLine,\n originalColumn: mapping.originalColumn,\n name: name\n };\n\n this.__generatedMappings.push(adjustedMapping);\n if (typeof adjustedMapping.originalLine === 'number') {\n this.__originalMappings.push(adjustedMapping);\n }\n }\n }\n\n quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n quickSort(this.__originalMappings, util.compareByOriginalPositions);\n };\n\nexports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-map-consumer.js\n// module id = 7\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nexports.GREATEST_LOWER_BOUND = 1;\nexports.LEAST_UPPER_BOUND = 2;\n\n/**\n * Recursive implementation of binary search.\n *\n * @param aLow Indices here and lower do not contain the needle.\n * @param aHigh Indices here and higher do not contain the needle.\n * @param aNeedle The element being searched for.\n * @param aHaystack The non-empty array being searched.\n * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n */\nfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n // This function terminates when one of the following is true:\n //\n // 1. We find the exact element we are looking for.\n //\n // 2. We did not find the exact element, but we can return the index of\n // the next-closest element.\n //\n // 3. We did not find the exact element, and there is no next-closest\n // element than the one we are searching for, so we return -1.\n var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n var cmp = aCompare(aNeedle, aHaystack[mid], true);\n if (cmp === 0) {\n // Found the element we are looking for.\n return mid;\n }\n else if (cmp > 0) {\n // Our needle is greater than aHaystack[mid].\n if (aHigh - mid > 1) {\n // The element is in the upper half.\n return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n }\n\n // The exact needle element was not found in this haystack. Determine if\n // we are in termination case (3) or (2) and return the appropriate thing.\n if (aBias == exports.LEAST_UPPER_BOUND) {\n return aHigh < aHaystack.length ? aHigh : -1;\n } else {\n return mid;\n }\n }\n else {\n // Our needle is less than aHaystack[mid].\n if (mid - aLow > 1) {\n // The element is in the lower half.\n return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n }\n\n // we are in termination case (3) or (2) and return the appropriate thing.\n if (aBias == exports.LEAST_UPPER_BOUND) {\n return mid;\n } else {\n return aLow < 0 ? -1 : aLow;\n }\n }\n}\n\n/**\n * This is an implementation of binary search which will always try and return\n * the index of the closest element if there is no exact hit. This is because\n * mappings between original and generated line/col pairs are single points,\n * and there is an implicit region between each of them, so a miss just means\n * that you aren't on the very start of a region.\n *\n * @param aNeedle The element you are looking for.\n * @param aHaystack The array that is being searched.\n * @param aCompare A function which takes the needle and an element in the\n * array and returns -1, 0, or 1 depending on whether the needle is less\n * than, equal to, or greater than the element, respectively.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n */\nexports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n if (aHaystack.length === 0) {\n return -1;\n }\n\n var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n if (index < 0) {\n return -1;\n }\n\n // We have found either the exact element, or the next-closest element than\n // the one we are searching for. However, there may be more than one such\n // element. Make sure we always return the smallest of these.\n while (index - 1 >= 0) {\n if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n break;\n }\n --index;\n }\n\n return index;\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/binary-search.js\n// module id = 8\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n// It turns out that some (most?) JavaScript engines don't self-host\n// `Array.prototype.sort`. This makes sense because C++ will likely remain\n// faster than JS when doing raw CPU-intensive sorting. However, when using a\n// custom comparator function, calling back and forth between the VM's C++ and\n// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n// worse generated code for the comparator function than would be optimal. In\n// fact, when sorting with a comparator, these costs outweigh the benefits of\n// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n// a ~3500ms mean speed-up in `bench/bench.html`.\n\n/**\n * Swap the elements indexed by `x` and `y` in the array `ary`.\n *\n * @param {Array} ary\n * The array.\n * @param {Number} x\n * The index of the first item.\n * @param {Number} y\n * The index of the second item.\n */\nfunction swap(ary, x, y) {\n var temp = ary[x];\n ary[x] = ary[y];\n ary[y] = temp;\n}\n\n/**\n * Returns a random integer within the range `low .. high` inclusive.\n *\n * @param {Number} low\n * The lower bound on the range.\n * @param {Number} high\n * The upper bound on the range.\n */\nfunction randomIntInRange(low, high) {\n return Math.round(low + (Math.random() * (high - low)));\n}\n\n/**\n * The Quick Sort algorithm.\n *\n * @param {Array} ary\n * An array to sort.\n * @param {function} comparator\n * Function to use to compare two items.\n * @param {Number} p\n * Start index of the array\n * @param {Number} r\n * End index of the array\n */\nfunction doQuickSort(ary, comparator, p, r) {\n // If our lower bound is less than our upper bound, we (1) partition the\n // array into two pieces and (2) recurse on each half. If it is not, this is\n // the empty array and our base case.\n\n if (p < r) {\n // (1) Partitioning.\n //\n // The partitioning chooses a pivot between `p` and `r` and moves all\n // elements that are less than or equal to the pivot to the before it, and\n // all the elements that are greater than it after it. The effect is that\n // once partition is done, the pivot is in the exact place it will be when\n // the array is put in sorted order, and it will not need to be moved\n // again. This runs in O(n) time.\n\n // Always choose a random pivot so that an input array which is reverse\n // sorted does not cause O(n^2) running time.\n var pivotIndex = randomIntInRange(p, r);\n var i = p - 1;\n\n swap(ary, pivotIndex, r);\n var pivot = ary[r];\n\n // Immediately after `j` is incremented in this loop, the following hold\n // true:\n //\n // * Every element in `ary[p .. i]` is less than or equal to the pivot.\n //\n // * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n for (var j = p; j < r; j++) {\n if (comparator(ary[j], pivot) <= 0) {\n i += 1;\n swap(ary, i, j);\n }\n }\n\n swap(ary, i + 1, j);\n var q = i + 1;\n\n // (2) Recurse on each half.\n\n doQuickSort(ary, comparator, p, q - 1);\n doQuickSort(ary, comparator, q + 1, r);\n }\n}\n\n/**\n * Sort the given array in-place with the given comparator function.\n *\n * @param {Array} ary\n * An array to sort.\n * @param {function} comparator\n * Function to use to compare two items.\n */\nexports.quickSort = function (ary, comparator) {\n doQuickSort(ary, comparator, 0, ary.length - 1);\n};\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/quick-sort.js\n// module id = 9\n// module chunks = 0","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;\nvar util = require('./util');\n\n// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n// operating systems these days (capturing the result).\nvar REGEX_NEWLINE = /(\\r?\\n)/;\n\n// Newline character code for charCodeAt() comparisons\nvar NEWLINE_CODE = 10;\n\n// Private symbol for identifying `SourceNode`s when multiple versions of\n// the source-map library are loaded. This MUST NOT CHANGE across\n// versions!\nvar isSourceNode = \"$$$isSourceNode$$$\";\n\n/**\n * SourceNodes provide a way to abstract over interpolating/concatenating\n * snippets of generated JavaScript source code while maintaining the line and\n * column information associated with the original source code.\n *\n * @param aLine The original line number.\n * @param aColumn The original column number.\n * @param aSource The original source's filename.\n * @param aChunks Optional. An array of strings which are snippets of\n * generated JS, or other SourceNodes.\n * @param aName The original identifier.\n */\nfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n this.children = [];\n this.sourceContents = {};\n this.line = aLine == null ? null : aLine;\n this.column = aColumn == null ? null : aColumn;\n this.source = aSource == null ? null : aSource;\n this.name = aName == null ? null : aName;\n this[isSourceNode] = true;\n if (aChunks != null) this.add(aChunks);\n}\n\n/**\n * Creates a SourceNode from generated code and a SourceMapConsumer.\n *\n * @param aGeneratedCode The generated code\n * @param aSourceMapConsumer The SourceMap for the generated code\n * @param aRelativePath Optional. The path that relative sources in the\n * SourceMapConsumer should be relative to.\n */\nSourceNode.fromStringWithSourceMap =\n function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n // The SourceNode we want to fill with the generated code\n // and the SourceMap\n var node = new SourceNode();\n\n // All even indices of this array are one line of the generated code,\n // while all odd indices are the newlines between two adjacent lines\n // (since `REGEX_NEWLINE` captures its match).\n // Processed fragments are accessed by calling `shiftNextLine`.\n var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n var remainingLinesIndex = 0;\n var shiftNextLine = function() {\n var lineContents = getNextLine();\n // The last line of a file might not have a newline.\n var newLine = getNextLine() || \"\";\n return lineContents + newLine;\n\n function getNextLine() {\n return remainingLinesIndex < remainingLines.length ?\n remainingLines[remainingLinesIndex++] : undefined;\n }\n };\n\n // We need to remember the position of \"remainingLines\"\n var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\n // The generate SourceNodes we need a code range.\n // To extract it current and last mapping is used.\n // Here we store the last mapping.\n var lastMapping = null;\n\n aSourceMapConsumer.eachMapping(function (mapping) {\n if (lastMapping !== null) {\n // We add the code from \"lastMapping\" to \"mapping\":\n // First check if there is a new line in between.\n if (lastGeneratedLine < mapping.generatedLine) {\n // Associate first line with \"lastMapping\"\n addMappingWithCode(lastMapping, shiftNextLine());\n lastGeneratedLine++;\n lastGeneratedColumn = 0;\n // The remaining code is added without mapping\n } else {\n // There is no new line in between.\n // Associate the code between \"lastGeneratedColumn\" and\n // \"mapping.generatedColumn\" with \"lastMapping\"\n var nextLine = remainingLines[remainingLinesIndex] || '';\n var code = nextLine.substr(0, mapping.generatedColumn -\n lastGeneratedColumn);\n remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -\n lastGeneratedColumn);\n lastGeneratedColumn = mapping.generatedColumn;\n addMappingWithCode(lastMapping, code);\n // No more remaining code, continue\n lastMapping = mapping;\n return;\n }\n }\n // We add the generated code until the first mapping\n // to the SourceNode without any mapping.\n // Each line is added as separate string.\n while (lastGeneratedLine < mapping.generatedLine) {\n node.add(shiftNextLine());\n lastGeneratedLine++;\n }\n if (lastGeneratedColumn < mapping.generatedColumn) {\n var nextLine = remainingLines[remainingLinesIndex] || '';\n node.add(nextLine.substr(0, mapping.generatedColumn));\n remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);\n lastGeneratedColumn = mapping.generatedColumn;\n }\n lastMapping = mapping;\n }, this);\n // We have processed all mappings.\n if (remainingLinesIndex < remainingLines.length) {\n if (lastMapping) {\n // Associate the remaining code in the current line with \"lastMapping\"\n addMappingWithCode(lastMapping, shiftNextLine());\n }\n // and add the remaining lines without any mapping\n node.add(remainingLines.splice(remainingLinesIndex).join(\"\"));\n }\n\n // Copy sourcesContent into SourceNode\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n if (aRelativePath != null) {\n sourceFile = util.join(aRelativePath, sourceFile);\n }\n node.setSourceContent(sourceFile, content);\n }\n });\n\n return node;\n\n function addMappingWithCode(mapping, code) {\n if (mapping === null || mapping.source === undefined) {\n node.add(code);\n } else {\n var source = aRelativePath\n ? util.join(aRelativePath, mapping.source)\n : mapping.source;\n node.add(new SourceNode(mapping.originalLine,\n mapping.originalColumn,\n source,\n code,\n mapping.name));\n }\n }\n };\n\n/**\n * Add a chunk of generated JS to this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n * SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.add = function SourceNode_add(aChunk) {\n if (Array.isArray(aChunk)) {\n aChunk.forEach(function (chunk) {\n this.add(chunk);\n }, this);\n }\n else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n if (aChunk) {\n this.children.push(aChunk);\n }\n }\n else {\n throw new TypeError(\n \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n );\n }\n return this;\n};\n\n/**\n * Add a chunk of generated JS to the beginning of this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n * SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n if (Array.isArray(aChunk)) {\n for (var i = aChunk.length-1; i >= 0; i--) {\n this.prepend(aChunk[i]);\n }\n }\n else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n this.children.unshift(aChunk);\n }\n else {\n throw new TypeError(\n \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n );\n }\n return this;\n};\n\n/**\n * Walk over the tree of JS snippets in this node and its children. The\n * walking function is called once for each snippet of JS and is passed that\n * snippet and the its original associated source's line/column location.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n var chunk;\n for (var i = 0, len = this.children.length; i < len; i++) {\n chunk = this.children[i];\n if (chunk[isSourceNode]) {\n chunk.walk(aFn);\n }\n else {\n if (chunk !== '') {\n aFn(chunk, { source: this.source,\n line: this.line,\n column: this.column,\n name: this.name });\n }\n }\n }\n};\n\n/**\n * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n * each of `this.children`.\n *\n * @param aSep The separator.\n */\nSourceNode.prototype.join = function SourceNode_join(aSep) {\n var newChildren;\n var i;\n var len = this.children.length;\n if (len > 0) {\n newChildren = [];\n for (i = 0; i < len-1; i++) {\n newChildren.push(this.children[i]);\n newChildren.push(aSep);\n }\n newChildren.push(this.children[i]);\n this.children = newChildren;\n }\n return this;\n};\n\n/**\n * Call String.prototype.replace on the very right-most source snippet. Useful\n * for trimming whitespace from the end of a source node, etc.\n *\n * @param aPattern The pattern to replace.\n * @param aReplacement The thing to replace the pattern with.\n */\nSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n var lastChild = this.children[this.children.length - 1];\n if (lastChild[isSourceNode]) {\n lastChild.replaceRight(aPattern, aReplacement);\n }\n else if (typeof lastChild === 'string') {\n this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n }\n else {\n this.children.push(''.replace(aPattern, aReplacement));\n }\n return this;\n};\n\n/**\n * Set the source content for a source file. This will be added to the SourceMapGenerator\n * in the sourcesContent field.\n *\n * @param aSourceFile The filename of the source file\n * @param aSourceContent The content of the source file\n */\nSourceNode.prototype.setSourceContent =\n function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n };\n\n/**\n * Walk over the tree of SourceNodes. The walking function is called for each\n * source file content and is passed the filename and source content.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walkSourceContents =\n function SourceNode_walkSourceContents(aFn) {\n for (var i = 0, len = this.children.length; i < len; i++) {\n if (this.children[i][isSourceNode]) {\n this.children[i].walkSourceContents(aFn);\n }\n }\n\n var sources = Object.keys(this.sourceContents);\n for (var i = 0, len = sources.length; i < len; i++) {\n aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n }\n };\n\n/**\n * Return the string representation of this source node. Walks over the tree\n * and concatenates all the various snippets together to one string.\n */\nSourceNode.prototype.toString = function SourceNode_toString() {\n var str = \"\";\n this.walk(function (chunk) {\n str += chunk;\n });\n return str;\n};\n\n/**\n * Returns the string representation of this source node along with a source\n * map.\n */\nSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n var generated = {\n code: \"\",\n line: 1,\n column: 0\n };\n var map = new SourceMapGenerator(aArgs);\n var sourceMappingActive = false;\n var lastOriginalSource = null;\n var lastOriginalLine = null;\n var lastOriginalColumn = null;\n var lastOriginalName = null;\n this.walk(function (chunk, original) {\n generated.code += chunk;\n if (original.source !== null\n && original.line !== null\n && original.column !== null) {\n if(lastOriginalSource !== original.source\n || lastOriginalLine !== original.line\n || lastOriginalColumn !== original.column\n || lastOriginalName !== original.name) {\n map.addMapping({\n source: original.source,\n original: {\n line: original.line,\n column: original.column\n },\n generated: {\n line: generated.line,\n column: generated.column\n },\n name: original.name\n });\n }\n lastOriginalSource = original.source;\n lastOriginalLine = original.line;\n lastOriginalColumn = original.column;\n lastOriginalName = original.name;\n sourceMappingActive = true;\n } else if (sourceMappingActive) {\n map.addMapping({\n generated: {\n line: generated.line,\n column: generated.column\n }\n });\n lastOriginalSource = null;\n sourceMappingActive = false;\n }\n for (var idx = 0, length = chunk.length; idx < length; idx++) {\n if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n generated.line++;\n generated.column = 0;\n // Mappings end at eol\n if (idx + 1 === length) {\n lastOriginalSource = null;\n sourceMappingActive = false;\n } else if (sourceMappingActive) {\n map.addMapping({\n source: original.source,\n original: {\n line: original.line,\n column: original.column\n },\n generated: {\n line: generated.line,\n column: generated.column\n },\n name: original.name\n });\n }\n } else {\n generated.column++;\n }\n }\n });\n this.walkSourceContents(function (sourceFile, sourceContent) {\n map.setSourceContent(sourceFile, sourceContent);\n });\n\n return { code: generated.code, map: map };\n};\n\nexports.SourceNode = SourceNode;\n\n\n\n//////////////////\n// WEBPACK FOOTER\n// ./lib/source-node.js\n// module id = 10\n// module chunks = 0"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/source-map/lib/array-set.js b/node_modules/source-map/lib/array-set.js new file mode 100644 index 0000000..fbd5c81 --- /dev/null +++ b/node_modules/source-map/lib/array-set.js @@ -0,0 +1,121 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var has = Object.prototype.hasOwnProperty; +var hasNativeMap = typeof Map !== "undefined"; + +/** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ +function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); +} + +/** + * Static method for creating ArraySet instances from an existing array. + */ +ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; +}; + +/** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ +ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; +}; + +/** + * Add the given string to this set. + * + * @param String aStr + */ +ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } +}; + +/** + * Is the given string a member of this set? + * + * @param String aStr + */ +ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } +}; + +/** + * What is the index of the given string in the array? + * + * @param String aStr + */ +ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); +}; + +/** + * What is the element at the given index? + * + * @param Number aIdx + */ +ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); +}; + +/** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ +ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); +}; + +exports.ArraySet = ArraySet; diff --git a/node_modules/source-map/lib/base64-vlq.js b/node_modules/source-map/lib/base64-vlq.js new file mode 100644 index 0000000..612b404 --- /dev/null +++ b/node_modules/source-map/lib/base64-vlq.js @@ -0,0 +1,140 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +var base64 = require('./base64'); + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +var VLQ_BASE_SHIFT = 5; + +// binary: 100000 +var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +var VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +var VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; + +/** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ +exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; +}; diff --git a/node_modules/source-map/lib/base64.js b/node_modules/source-map/lib/base64.js new file mode 100644 index 0000000..8aa86b3 --- /dev/null +++ b/node_modules/source-map/lib/base64.js @@ -0,0 +1,67 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); +}; + +/** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ +exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; +}; diff --git a/node_modules/source-map/lib/binary-search.js b/node_modules/source-map/lib/binary-search.js new file mode 100644 index 0000000..010ac94 --- /dev/null +++ b/node_modules/source-map/lib/binary-search.js @@ -0,0 +1,111 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +exports.GREATEST_LOWER_BOUND = 1; +exports.LEAST_UPPER_BOUND = 2; + +/** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ +function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } +} + +/** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ +exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; +}; diff --git a/node_modules/source-map/lib/mapping-list.js b/node_modules/source-map/lib/mapping-list.js new file mode 100644 index 0000000..06d1274 --- /dev/null +++ b/node_modules/source-map/lib/mapping-list.js @@ -0,0 +1,79 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); + +/** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ +function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; +} + +/** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ +function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; +} + +/** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ +MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + +/** + * Add the given source mapping. + * + * @param Object aMapping + */ +MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } +}; + +/** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ +MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; +}; + +exports.MappingList = MappingList; diff --git a/node_modules/source-map/lib/quick-sort.js b/node_modules/source-map/lib/quick-sort.js new file mode 100644 index 0000000..6a7caad --- /dev/null +++ b/node_modules/source-map/lib/quick-sort.js @@ -0,0 +1,114 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +// It turns out that some (most?) JavaScript engines don't self-host +// `Array.prototype.sort`. This makes sense because C++ will likely remain +// faster than JS when doing raw CPU-intensive sorting. However, when using a +// custom comparator function, calling back and forth between the VM's C++ and +// JIT'd JS is rather slow *and* loses JIT type information, resulting in +// worse generated code for the comparator function than would be optimal. In +// fact, when sorting with a comparator, these costs outweigh the benefits of +// sorting in C++. By using our own JS-implemented Quick Sort (below), we get +// a ~3500ms mean speed-up in `bench/bench.html`. + +/** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ +function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; +} + +/** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ +function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); +} + +/** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ +function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } +} + +/** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ +exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); +}; diff --git a/node_modules/source-map/lib/source-map-consumer.js b/node_modules/source-map/lib/source-map-consumer.js new file mode 100644 index 0000000..7b99d1d --- /dev/null +++ b/node_modules/source-map/lib/source-map-consumer.js @@ -0,0 +1,1145 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var binarySearch = require('./binary-search'); +var ArraySet = require('./array-set').ArraySet; +var base64VLQ = require('./base64-vlq'); +var quickSort = require('./quick-sort').quickSort; + +function SourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) + : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); +} + +SourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); +} + +/** + * The version of the source mapping spec that we are consuming. + */ +SourceMapConsumer.prototype._version = 3; + +// `__generatedMappings` and `__originalMappings` are arrays that hold the +// parsed mapping coordinates from the source map's "mappings" attribute. They +// are lazily instantiated, accessed via the `_generatedMappings` and +// `_originalMappings` getters respectively, and we only parse the mappings +// and create these arrays once queried for a source location. We jump through +// these hoops because there can be many thousands of mappings, and parsing +// them is expensive, so we only want to do it if we must. +// +// Each object in the arrays is of the form: +// +// { +// generatedLine: The line number in the generated code, +// generatedColumn: The column number in the generated code, +// source: The path to the original source file that generated this +// chunk of code, +// originalLine: The line number in the original source that +// corresponds to this chunk of generated code, +// originalColumn: The column number in the original source that +// corresponds to this chunk of generated code, +// name: The name of the original symbol which generated this chunk of +// code. +// } +// +// All properties except for `generatedLine` and `generatedColumn` can be +// `null`. +// +// `_generatedMappings` is ordered by the generated positions. +// +// `_originalMappings` is ordered by the original positions. + +SourceMapConsumer.prototype.__generatedMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } +}); + +SourceMapConsumer.prototype.__originalMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } +}); + +SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + +SourceMapConsumer.GENERATED_ORDER = 1; +SourceMapConsumer.ORIGINAL_ORDER = 2; + +SourceMapConsumer.GREATEST_LOWER_BOUND = 1; +SourceMapConsumer.LEAST_UPPER_BOUND = 2; + +/** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ +SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL); + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + +/** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number is 1-based. + * - column: Optional. the column number in the original source. + * The column number is 0-based. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + needle.source = this._findSourceIndex(needle.source); + if (needle.source < 0) { + return []; + } + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + +exports.SourceMapConsumer = SourceMapConsumer; + +/** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The first parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ +function BasicSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + if (sourceRoot) { + sourceRoot = util.normalize(sourceRoot); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this._absoluteSources = this._sources.toArray().map(function (s) { + return util.computeSourceURL(sourceRoot, s, aSourceMapURL); + }); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this._sourceMapURL = aSourceMapURL; + this.file = file; +} + +BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + +/** + * Utility function to find the index of a source. Returns -1 if not + * found. + */ +BasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) { + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + if (this._sources.has(relativeSource)) { + return this._sources.indexOf(relativeSource); + } + + // Maybe aSource is an absolute URL as returned by |sources|. In + // this case we can't simply undo the transform. + var i; + for (i = 0; i < this._absoluteSources.length; ++i) { + if (this._absoluteSources[i] == aSource) { + return i; + } + } + + return -1; +}; + +/** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @param String aSourceMapURL + * The URL at which the source map can be found (optional) + * @returns BasicSourceMapConsumer + */ +BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + smc._sourceMapURL = aSourceMapURL; + smc._absoluteSources = smc._sources.toArray().map(function (s) { + return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL); + }); + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + +/** + * The version of the source mapping spec that we are consuming. + */ +BasicSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._absoluteSources.slice(); + } +}); + +/** + * Provide the JIT with a nice shape / hidden class. + */ +function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; +} + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + +/** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ +BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + +/** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ +BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + var index = this._findSourceIndex(aSource); + if (index >= 0) { + return this.sourcesContent[index]; + } + + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + relativeSource)) { + return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + relativeSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + source = this._findSourceIndex(source); + if (source < 0) { + return { + line: null, + column: null, + lastColumn: null + }; + } + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + +exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + +/** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The first parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ +function IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL) + } + }); +} + +IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + +/** + * The version of the source mapping spec that we are consuming. + */ +IndexedSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } +}); + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL); + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = null; + if (mapping.name) { + name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + } + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + +exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; diff --git a/node_modules/source-map/lib/source-map-generator.js b/node_modules/source-map/lib/source-map-generator.js new file mode 100644 index 0000000..508bcfb --- /dev/null +++ b/node_modules/source-map/lib/source-map-generator.js @@ -0,0 +1,425 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var base64VLQ = require('./base64-vlq'); +var util = require('./util'); +var ArraySet = require('./array-set').ArraySet; +var MappingList = require('./mapping-list').MappingList; + +/** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ +function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; +} + +SourceMapGenerator.prototype._version = 3; + +/** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ +SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var sourceRelative = sourceFile; + if (sourceRoot !== null) { + sourceRelative = util.relative(sourceRoot, sourceFile); + } + + if (!generator._sources.has(sourceRelative)) { + generator._sources.add(sourceRelative); + } + + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + +/** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ +SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + +/** + * Set the source content for a source file. + */ +SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + +/** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ +SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + +/** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ +SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + +/** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ +SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + +SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + +/** + * Externalize the source map. + */ +SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + +/** + * Render the source map being generated to a string. + */ +SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + +exports.SourceMapGenerator = SourceMapGenerator; diff --git a/node_modules/source-map/lib/source-node.js b/node_modules/source-map/lib/source-node.js new file mode 100644 index 0000000..8bcdbe3 --- /dev/null +++ b/node_modules/source-map/lib/source-node.js @@ -0,0 +1,413 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; +var util = require('./util'); + +// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other +// operating systems these days (capturing the result). +var REGEX_NEWLINE = /(\r?\n)/; + +// Newline character code for charCodeAt() comparisons +var NEWLINE_CODE = 10; + +// Private symbol for identifying `SourceNode`s when multiple versions of +// the source-map library are loaded. This MUST NOT CHANGE across +// versions! +var isSourceNode = "$$$isSourceNode$$$"; + +/** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ +function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); +} + +/** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ +SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex] || ''; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex] || ''; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + +/** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } +}; + +/** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ +SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; +}; + +/** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ +SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; +}; + +/** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ +SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + +/** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + +/** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ +SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; +}; + +/** + * Returns the string representation of this source node along with a source + * map. + */ +SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; +}; + +exports.SourceNode = SourceNode; diff --git a/node_modules/source-map/lib/util.js b/node_modules/source-map/lib/util.js new file mode 100644 index 0000000..3ca92e5 --- /dev/null +++ b/node_modules/source-map/lib/util.js @@ -0,0 +1,488 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +/** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ +function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } +} +exports.getArg = getArg; + +var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; +var dataUrlRegexp = /^data:.+\,.+$/; + +function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; +} +exports.urlParse = urlParse; + +function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; +} +exports.urlGenerate = urlGenerate; + +/** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ +function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; +} +exports.normalize = normalize; + +/** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ +function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; +} +exports.join = join; + +exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || urlRegexp.test(aPath); +}; + +/** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ +function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); +} +exports.relative = relative; + +var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); +}()); + +function identity (s) { + return s; +} + +/** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ +function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; +} +exports.toSetString = supportsNullProto ? identity : toSetString; + +function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; +} +exports.fromSetString = supportsNullProto ? identity : fromSetString; + +function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; +} + +/** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ +function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByOriginalPositions = compareByOriginalPositions; + +/** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ +function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + +function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 === null) { + return 1; // aStr2 !== null + } + + if (aStr2 === null) { + return -1; // aStr1 !== null + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; +} + +/** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ +function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + +/** + * Strip any JSON XSSI avoidance prefix from the string (as documented + * in the source maps specification), and then parse the string as + * JSON. + */ +function parseSourceMapInput(str) { + return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, '')); +} +exports.parseSourceMapInput = parseSourceMapInput; + +/** + * Compute the URL of a source given the the source root, the source's + * URL, and the source map's URL. + */ +function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { + sourceURL = sourceURL || ''; + + if (sourceRoot) { + // This follows what Chrome does. + if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') { + sourceRoot += '/'; + } + // The spec says: + // Line 4: An optional source root, useful for relocating source + // files on a server or removing repeated values in the + // “sources” entry. This value is prepended to the individual + // entries in the “source” field. + sourceURL = sourceRoot + sourceURL; + } + + // Historically, SourceMapConsumer did not take the sourceMapURL as + // a parameter. This mode is still somewhat supported, which is why + // this code block is conditional. However, it's preferable to pass + // the source map URL to SourceMapConsumer, so that this function + // can implement the source URL resolution algorithm as outlined in + // the spec. This block is basically the equivalent of: + // new URL(sourceURL, sourceMapURL).toString() + // ... except it avoids using URL, which wasn't available in the + // older releases of node still supported by this library. + // + // The spec says: + // If the sources are not absolute URLs after prepending of the + // “sourceRoot”, the sources are resolved relative to the + // SourceMap (like resolving script src in a html document). + if (sourceMapURL) { + var parsed = urlParse(sourceMapURL); + if (!parsed) { + throw new Error("sourceMapURL could not be parsed"); + } + if (parsed.path) { + // Strip the last path component, but keep the "/". + var index = parsed.path.lastIndexOf('/'); + if (index >= 0) { + parsed.path = parsed.path.substring(0, index + 1); + } + } + sourceURL = join(urlGenerate(parsed), sourceURL); + } + + return normalize(sourceURL); +} +exports.computeSourceURL = computeSourceURL; diff --git a/node_modules/source-map/package.json b/node_modules/source-map/package.json new file mode 100644 index 0000000..2466341 --- /dev/null +++ b/node_modules/source-map/package.json @@ -0,0 +1,73 @@ +{ + "name": "source-map", + "description": "Generates and consumes source maps", + "version": "0.6.1", + "homepage": "https://github.com/mozilla/source-map", + "author": "Nick Fitzgerald ", + "contributors": [ + "Tobias Koppers ", + "Duncan Beevers ", + "Stephen Crane ", + "Ryan Seddon ", + "Miles Elam ", + "Mihai Bazon ", + "Michael Ficarra ", + "Todd Wolfson ", + "Alexander Solovyov ", + "Felix Gnass ", + "Conrad Irwin ", + "usrbincc ", + "David Glasser ", + "Chase Douglas ", + "Evan Wallace ", + "Heather Arthur ", + "Hugh Kennedy ", + "David Glasser ", + "Simon Lydell ", + "Jmeas Smith ", + "Michael Z Goddard ", + "azu ", + "John Gozde ", + "Adam Kirkton ", + "Chris Montgomery ", + "J. Ryan Stinnett ", + "Jack Herrington ", + "Chris Truter ", + "Daniel Espeset ", + "Jamie Wong ", + "Eddy Bruël ", + "Hawken Rives ", + "Gilad Peleg ", + "djchie ", + "Gary Ye ", + "Nicolas Lalevée " + ], + "repository": { + "type": "git", + "url": "http://github.com/mozilla/source-map.git" + }, + "main": "./source-map.js", + "files": [ + "source-map.js", + "source-map.d.ts", + "lib/", + "dist/source-map.debug.js", + "dist/source-map.js", + "dist/source-map.min.js", + "dist/source-map.min.js.map" + ], + "engines": { + "node": ">=0.10.0" + }, + "license": "BSD-3-Clause", + "scripts": { + "test": "npm run build && node test/run-tests.js", + "build": "webpack --color", + "toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md" + }, + "devDependencies": { + "doctoc": "^0.15.0", + "webpack": "^1.12.0" + }, + "typings": "source-map" +} diff --git a/node_modules/source-map/source-map.d.ts b/node_modules/source-map/source-map.d.ts new file mode 100644 index 0000000..8f972b0 --- /dev/null +++ b/node_modules/source-map/source-map.d.ts @@ -0,0 +1,98 @@ +export interface StartOfSourceMap { + file?: string; + sourceRoot?: string; +} + +export interface RawSourceMap extends StartOfSourceMap { + version: string; + sources: string[]; + names: string[]; + sourcesContent?: string[]; + mappings: string; +} + +export interface Position { + line: number; + column: number; +} + +export interface LineRange extends Position { + lastColumn: number; +} + +export interface FindPosition extends Position { + // SourceMapConsumer.GREATEST_LOWER_BOUND or SourceMapConsumer.LEAST_UPPER_BOUND + bias?: number; +} + +export interface SourceFindPosition extends FindPosition { + source: string; +} + +export interface MappedPosition extends Position { + source: string; + name?: string; +} + +export interface MappingItem { + source: string; + generatedLine: number; + generatedColumn: number; + originalLine: number; + originalColumn: number; + name: string; +} + +export class SourceMapConsumer { + static GENERATED_ORDER: number; + static ORIGINAL_ORDER: number; + + static GREATEST_LOWER_BOUND: number; + static LEAST_UPPER_BOUND: number; + + constructor(rawSourceMap: RawSourceMap); + computeColumnSpans(): void; + originalPositionFor(generatedPosition: FindPosition): MappedPosition; + generatedPositionFor(originalPosition: SourceFindPosition): LineRange; + allGeneratedPositionsFor(originalPosition: MappedPosition): Position[]; + hasContentsOfAllSources(): boolean; + sourceContentFor(source: string, returnNullOnMissing?: boolean): string; + eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void; +} + +export interface Mapping { + generated: Position; + original: Position; + source: string; + name?: string; +} + +export class SourceMapGenerator { + constructor(startOfSourceMap?: StartOfSourceMap); + static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator; + addMapping(mapping: Mapping): void; + setSourceContent(sourceFile: string, sourceContent: string): void; + applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void; + toString(): string; +} + +export interface CodeWithSourceMap { + code: string; + map: SourceMapGenerator; +} + +export class SourceNode { + constructor(); + constructor(line: number, column: number, source: string); + constructor(line: number, column: number, source: string, chunk?: string, name?: string); + static fromStringWithSourceMap(code: string, sourceMapConsumer: SourceMapConsumer, relativePath?: string): SourceNode; + add(chunk: string): void; + prepend(chunk: string): void; + setSourceContent(sourceFile: string, sourceContent: string): void; + walk(fn: (chunk: string, mapping: MappedPosition) => void): void; + walkSourceContents(fn: (file: string, content: string) => void): void; + join(sep: string): SourceNode; + replaceRight(pattern: string, replacement: string): SourceNode; + toString(): string; + toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap; +} diff --git a/node_modules/source-map/source-map.js b/node_modules/source-map/source-map.js new file mode 100644 index 0000000..bc88fe8 --- /dev/null +++ b/node_modules/source-map/source-map.js @@ -0,0 +1,8 @@ +/* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ +exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator; +exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer; +exports.SourceNode = require('./lib/source-node').SourceNode; diff --git a/node_modules/sucrase/LICENSE b/node_modules/sucrase/LICENSE new file mode 100644 index 0000000..06d77d6 --- /dev/null +++ b/node_modules/sucrase/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2012-2018 various contributors (see AUTHORS) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/sucrase/README.md b/node_modules/sucrase/README.md new file mode 100644 index 0000000..90e13dd --- /dev/null +++ b/node_modules/sucrase/README.md @@ -0,0 +1,290 @@ +# Sucrase + +[![Build Status](https://github.com/alangpierce/sucrase/workflows/All%20tests/badge.svg)](https://github.com/alangpierce/sucrase/actions) +[![npm version](https://img.shields.io/npm/v/sucrase.svg)](https://www.npmjs.com/package/sucrase) +[![Install Size](https://packagephobia.now.sh/badge?p=sucrase)](https://packagephobia.now.sh/result?p=sucrase) +[![MIT License](https://img.shields.io/npm/l/express.svg?maxAge=2592000)](LICENSE) +[![Join the chat at https://gitter.im/sucrasejs](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/sucrasejs/Lobby) + +## [Try it out](https://sucrase.io) + +## Quick usage + +```bash +yarn add --dev sucrase # Or npm install --save-dev sucrase +node -r sucrase/register main.ts +``` + +Using the [ts-node](https://github.com/TypeStrong/ts-node) integration: + +```bash +yarn add --dev sucrase ts-node typescript +./node_modules/.bin/ts-node --transpiler sucrase/ts-node-plugin main.ts +``` + +## Project overview + +Sucrase is an alternative to Babel that allows super-fast development builds. +Instead of compiling a large range of JS features to be able to work in Internet +Explorer, Sucrase assumes that you're developing with a recent browser or recent +Node.js version, so it focuses on compiling non-standard language extensions: +JSX, TypeScript, and Flow. Because of this smaller scope, Sucrase can get away +with an architecture that is much more performant but less extensible and +maintainable. Sucrase's parser is forked from Babel's parser (so Sucrase is +indebted to Babel and wouldn't be possible without it) and trims it down to a +focused subset of what Babel solves. If it fits your use case, hopefully Sucrase +can speed up your development experience! + +**Sucrase has been extensively tested.** It can successfully build +the [Benchling](https://benchling.com/) frontend code, +[Babel](https://github.com/babel/babel), +[React](https://github.com/facebook/react), +[TSLint](https://github.com/palantir/tslint), +[Apollo client](https://github.com/apollographql/apollo-client), and +[decaffeinate](https://github.com/decaffeinate/decaffeinate) +with all tests passing, about 1 million lines of code total. + +**Sucrase is about 20x faster than Babel.** Here's one measurement of how +Sucrase compares with other tools when compiling the Jest codebase 3 times, +about 360k lines of code total: + +```text + Time Speed +Sucrase 0.57 seconds 636975 lines per second +swc 1.19 seconds 304526 lines per second +esbuild 1.45 seconds 248692 lines per second +TypeScript 8.98 seconds 40240 lines per second +Babel 9.18 seconds 39366 lines per second +``` + +Details: Measured on July 2022. Tools run in single-threaded mode without warm-up. See the +[benchmark code](https://github.com/alangpierce/sucrase/blob/main/benchmark/benchmark.ts) +for methodology and caveats. + +## Transforms + +The main configuration option in Sucrase is an array of transform names. These +transforms are available: + +* **jsx**: Enables JSX syntax. By default, JSX is transformed to `React.createClass`, + but may be preserved or transformed to `_jsx()` by setting the `jsxRuntime` option. + Also adds `createReactClass` display names and JSX context information. +* **typescript**: Compiles TypeScript code to JavaScript, removing type + annotations and handling features like enums. Does not check types. Sucrase + transforms each file independently, so you should enable the `isolatedModules` + TypeScript flag so that the typechecker will disallow the few features like + `const enum`s that need cross-file compilation. +* **flow**: Removes Flow type annotations. Does not check types. +* **imports**: Transforms ES Modules (`import`/`export`) to CommonJS + (`require`/`module.exports`) using the same approach as Babel and TypeScript + with `--esModuleInterop`. If `preserveDynamicImport` is specified in the Sucrase + options, then dynamic `import` expressions are left alone, which is particularly + useful in Node to load ESM-only libraries. If `preserveDynamicImport` is not + specified, `import` expressions are transformed into a promise-wrapped call to + `require`. +* **react-hot-loader**: Performs the equivalent of the `react-hot-loader/babel` + transform in the [react-hot-loader](https://github.com/gaearon/react-hot-loader) + project. This enables advanced hot reloading use cases such as editing of + bound methods. +* **jest**: Hoist desired [jest](https://jestjs.io/) method calls above imports in + the same way as [babel-plugin-jest-hoist](https://github.com/facebook/jest/tree/master/packages/babel-plugin-jest-hoist). + Does not validate the arguments passed to `jest.mock`, but the same rules still apply. + +When the `imports` transform is *not* specified (i.e. when targeting ESM), the +`injectCreateRequireForImportRequire` option can be specified to transform TS +`import foo = require("foo");` in a way that matches the +[TypeScript 4.7 behavior](https://devblogs.microsoft.com/typescript/announcing-typescript-4-7/#commonjs-interoperability) +with `module: nodenext`. + +These newer JS features are transformed by default: + +* [Optional chaining](https://github.com/tc39/proposal-optional-chaining): `a?.b` +* [Nullish coalescing](https://github.com/tc39/proposal-nullish-coalescing): `a ?? b` +* [Class fields](https://github.com/tc39/proposal-class-fields): `class C { x = 1; }`. + This includes static fields but not the `#x` private field syntax. +* [Numeric separators](https://github.com/tc39/proposal-numeric-separator): + `const n = 1_234;` +* [Optional catch binding](https://github.com/tc39/proposal-optional-catch-binding): + `try { doThing(); } catch { }`. + +If your target runtime supports these features, you can specify +`disableESTransforms: true` so that Sucrase preserves the syntax rather than +trying to transform it. Note that transpiled and standard class fields behave +slightly differently; see the +[TypeScript 3.7 release notes](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-7.html#the-usedefineforclassfields-flag-and-the-declare-property-modifier) +for details. If you use TypeScript, you can enable the TypeScript option +`useDefineForClassFields` to enable error checking related to these differences. + +### Unsupported syntax + +All JS syntax not mentioned above will "pass through" and needs to be supported +by your JS runtime. For example: + +* Decorators, private fields, `throw` expressions, generator arrow functions, + and `do` expressions are all unsupported in browsers and Node (as of this + writing), and Sucrase doesn't make an attempt to transpile them. +* Object rest/spread, async functions, and async iterators are all recent + features that should work fine, but might cause issues if you use older + versions of tools like webpack. BigInt and newer regex features may or may not + work, based on your tooling. + +### JSX Options + +By default, JSX is compiled to React functions in development mode. This can be +configured with a few options: + +* **jsxRuntime**: A string specifying the transform mode, which can be one of three values: + * `"classic"` (default): The original JSX transform that calls `React.createElement` by default. + To configure for non-React use cases, specify: + * **jsxPragma**: Element creation function, defaults to `React.createElement`. + * **jsxFragmentPragma**: Fragment component, defaults to `React.Fragment`. + * `"automatic"`: The [new JSX transform](https://reactjs.org/blog/2020/09/22/introducing-the-new-jsx-transform.html) + introduced with React 17, which calls `jsx` functions and auto-adds import statements. + To configure for non-React use cases, specify: + * **jsxImportSource**: Package name for auto-generated import statements, defaults to `react`. + * `"preserve"`: Don't transform JSX, and instead emit it as-is in the output code. +* **production**: If `true`, use production version of functions and don't include debugging + information. When using React in production mode with the automatic transform, this *must* be + set to true to avoid an error about `jsxDEV` being missing. + +### Legacy CommonJS interop + +Two legacy modes can be used with the `imports` transform: + +* **enableLegacyTypeScriptModuleInterop**: Use the default TypeScript approach + to CommonJS interop instead of assuming that TypeScript's `--esModuleInterop` + flag is enabled. For example, if a CJS module exports a function, legacy + TypeScript interop requires you to write `import * as add from './add';`, + while Babel, Webpack, Node.js, and TypeScript with `--esModuleInterop` require + you to write `import add from './add';`. As mentioned in the + [docs](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-7.html#support-for-import-d-from-cjs-form-commonjs-modules-with---esmoduleinterop), + the TypeScript team recommends you always use `--esModuleInterop`. +* **enableLegacyBabel5ModuleInterop**: Use the Babel 5 approach to CommonJS + interop, so that you can run `require('./MyModule')` instead of + `require('./MyModule').default`. Analogous to + [babel-plugin-add-module-exports](https://github.com/59naga/babel-plugin-add-module-exports). + +## Usage + +### Tool integrations + +* [Webpack](https://github.com/alangpierce/sucrase/tree/main/integrations/webpack-loader) +* [Gulp](https://github.com/alangpierce/sucrase/tree/main/integrations/gulp-plugin) +* [Jest](https://github.com/alangpierce/sucrase/tree/main/integrations/jest-plugin) +* [Rollup](https://github.com/rollup/plugins/tree/master/packages/sucrase) +* [Broccoli](https://github.com/stefanpenner/broccoli-sucrase) + +### Usage in Node + +The most robust way is to use the Sucrase plugin for [ts-node](https://github.com/TypeStrong/ts-node), +which has various Node integrations and configures Sucrase via `tsconfig.json`: +```bash +ts-node --transpiler sucrase/ts-node-plugin +``` + +For projects that don't target ESM, Sucrase also has a require hook with some +reasonable defaults that can be accessed in a few ways: + +* From code: `require("sucrase/register");` +* When invoking Node: `node -r sucrase/register main.ts` +* As a separate binary: `sucrase-node main.ts` + +### Compiling a project to JS + +For simple use cases, Sucrase comes with a `sucrase` CLI that mirrors your +directory structure to an output directory: +```bash +sucrase ./srcDir -d ./outDir --transforms typescript,imports +``` + +### Usage from code + +For any advanced use cases, Sucrase can be called from JS directly: + +```js +import {transform} from "sucrase"; +const compiledCode = transform(code, {transforms: ["typescript", "imports"]}).code; +``` + +## What Sucrase is not + +Sucrase is intended to be useful for the most common cases, but it does not aim +to have nearly the scope and versatility of Babel. Some specific examples: + +* Sucrase does not check your code for errors. Sucrase's contract is that if you + give it valid code, it will produce valid JS code. If you give it invalid + code, it might produce invalid code, it might produce valid code, or it might + give an error. Always use Sucrase with a linter or typechecker, which is more + suited for error-checking. +* Sucrase is not pluginizable. With the current architecture, transforms need to + be explicitly written to cooperate with each other, so each additional + transform takes significant extra work. +* Sucrase is not good for prototyping language extensions and upcoming language + features. Its faster architecture makes new transforms more difficult to write + and more fragile. +* Sucrase will never produce code for old browsers like IE. Compiling code down + to ES5 is much more complicated than any transformation that Sucrase needs to + do. +* Sucrase is hesitant to implement upcoming JS features, although some of them + make sense to implement for pragmatic reasons. Its main focus is on language + extensions (JSX, TypeScript, Flow) that will never be supported by JS + runtimes. +* Like Babel, Sucrase is not a typechecker, and must process each file in + isolation. For example, TypeScript `const enum`s are treated as regular + `enum`s rather than inlining across files. +* You should think carefully before using Sucrase in production. Sucrase is + mostly beneficial in development, and in many cases, Babel or tsc will be more + suitable for production builds. + +See the [Project Vision](./docs/PROJECT_VISION.md) document for more details on +the philosophy behind Sucrase. + +## Motivation + +As JavaScript implementations mature, it becomes more and more reasonable to +disable Babel transforms, especially in development when you know that you're +targeting a modern runtime. You might hope that you could simplify and speed up +the build step by eventually disabling Babel entirely, but this isn't possible +if you're using a non-standard language extension like JSX, TypeScript, or Flow. +Unfortunately, disabling most transforms in Babel doesn't speed it up as much as +you might expect. To understand, let's take a look at how Babel works: + +1. Tokenize the input source code into a token stream. +2. Parse the token stream into an AST. +3. Walk the AST to compute the scope information for each variable. +4. Apply all transform plugins in a single traversal, resulting in a new AST. +5. Print the resulting AST. + +Only step 4 gets faster when disabling plugins, so there's always a fixed cost +to running Babel regardless of how many transforms are enabled. + +Sucrase bypasses most of these steps, and works like this: + +1. Tokenize the input source code into a token stream using a trimmed-down fork + of the Babel parser. This fork does not produce a full AST, but still + produces meaningful token metadata specifically designed for the later + transforms. +2. Scan through the tokens, computing preliminary information like all + imported/exported names. +3. Run the transform by doing a pass through the tokens and performing a number + of careful find-and-replace operations, like replacing ` 0 || + importInfo.namedExports.length > 0 + ) { + continue; + } + const names = [ + ...importInfo.defaultNames, + ...importInfo.wildcardNames, + ...importInfo.namedImports.map(({localName}) => localName), + ]; + if (names.every((name) => this.isTypeName(name))) { + this.importsToReplace.set(path, ""); + } + } + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + generateImportReplacements() { + for (const [path, importInfo] of this.importInfoByPath.entries()) { + const { + defaultNames, + wildcardNames, + namedImports, + namedExports, + exportStarNames, + hasStarExport, + } = importInfo; + + if ( + defaultNames.length === 0 && + wildcardNames.length === 0 && + namedImports.length === 0 && + namedExports.length === 0 && + exportStarNames.length === 0 && + !hasStarExport + ) { + // Import is never used, so don't even assign a name. + this.importsToReplace.set(path, `require('${path}');`); + continue; + } + + const primaryImportName = this.getFreeIdentifierForPath(path); + let secondaryImportName; + if (this.enableLegacyTypeScriptModuleInterop) { + secondaryImportName = primaryImportName; + } else { + secondaryImportName = + wildcardNames.length > 0 ? wildcardNames[0] : this.getFreeIdentifierForPath(path); + } + let requireCode = `var ${primaryImportName} = require('${path}');`; + if (wildcardNames.length > 0) { + for (const wildcardName of wildcardNames) { + const moduleExpr = this.enableLegacyTypeScriptModuleInterop + ? primaryImportName + : `${this.helperManager.getHelperName("interopRequireWildcard")}(${primaryImportName})`; + requireCode += ` var ${wildcardName} = ${moduleExpr};`; + } + } else if (exportStarNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helperManager.getHelperName( + "interopRequireWildcard", + )}(${primaryImportName});`; + } else if (defaultNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helperManager.getHelperName( + "interopRequireDefault", + )}(${primaryImportName});`; + } + + for (const {importedName, localName} of namedExports) { + requireCode += ` ${this.helperManager.getHelperName( + "createNamedExportFrom", + )}(${primaryImportName}, '${localName}', '${importedName}');`; + } + for (const exportStarName of exportStarNames) { + requireCode += ` exports.${exportStarName} = ${secondaryImportName};`; + } + if (hasStarExport) { + requireCode += ` ${this.helperManager.getHelperName( + "createStarExport", + )}(${primaryImportName});`; + } + + this.importsToReplace.set(path, requireCode); + + for (const defaultName of defaultNames) { + this.identifierReplacements.set(defaultName, `${secondaryImportName}.default`); + } + for (const {importedName, localName} of namedImports) { + this.identifierReplacements.set(localName, `${primaryImportName}.${importedName}`); + } + } + } + + getFreeIdentifierForPath(path) { + const components = path.split("/"); + const lastComponent = components[components.length - 1]; + const baseName = lastComponent.replace(/\W/g, ""); + return this.nameManager.claimFreeName(`_${baseName}`); + } + + preprocessImportAtIndex(index) { + const defaultNames = []; + const wildcardNames = []; + const namedImports = []; + + index++; + if ( + (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._type) || + this.tokens.matches1AtIndex(index, _types.TokenType._typeof)) && + !this.tokens.matches1AtIndex(index + 1, _types.TokenType.comma) && + !this.tokens.matchesContextualAtIndex(index + 1, _keywords.ContextualKeyword._from) + ) { + // import type declaration, so no need to process anything. + return; + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.name)) { + defaultNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + if (this.tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.star)) { + // * as + index += 2; + wildcardNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + } + + if (this.tokens.matches1AtIndex(index, _types.TokenType.braceL)) { + const result = this.getNamedImports(index + 1); + index = result.newIndex; + + for (const namedImport of result.namedImports) { + // Treat {default as X} as a default import to ensure usage of require interop helper + if (namedImport.importedName === "default") { + defaultNames.push(namedImport.localName); + } else { + namedImports.push(namedImport); + } + } + } + + if (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._from)) { + index++; + } + + if (!this.tokens.matches1AtIndex(index, _types.TokenType.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.defaultNames.push(...defaultNames); + importInfo.wildcardNames.push(...wildcardNames); + importInfo.namedImports.push(...namedImports); + if (defaultNames.length === 0 && wildcardNames.length === 0 && namedImports.length === 0) { + importInfo.hasBareImport = true; + } + } + + preprocessExportAtIndex(index) { + if ( + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._var) || + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._let) || + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._const) + ) { + this.preprocessVarExportAtIndex(index); + } else if ( + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._function) || + this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType._class) + ) { + const exportName = this.tokens.identifierNameAtIndex(index + 2); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches3AtIndex(index, _types.TokenType._export, _types.TokenType.name, _types.TokenType._function)) { + const exportName = this.tokens.identifierNameAtIndex(index + 3); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType.braceL)) { + this.preprocessNamedExportAtIndex(index); + } else if (this.tokens.matches2AtIndex(index, _types.TokenType._export, _types.TokenType.star)) { + this.preprocessExportStarAtIndex(index); + } + } + + preprocessVarExportAtIndex(index) { + let depth = 0; + // Handle cases like `export let {x} = y;`, starting at the open-brace in that case. + for (let i = index + 2; ; i++) { + if ( + this.tokens.matches1AtIndex(i, _types.TokenType.braceL) || + this.tokens.matches1AtIndex(i, _types.TokenType.dollarBraceL) || + this.tokens.matches1AtIndex(i, _types.TokenType.bracketL) + ) { + depth++; + } else if ( + this.tokens.matches1AtIndex(i, _types.TokenType.braceR) || + this.tokens.matches1AtIndex(i, _types.TokenType.bracketR) + ) { + depth--; + } else if (depth === 0 && !this.tokens.matches1AtIndex(i, _types.TokenType.name)) { + break; + } else if (this.tokens.matches1AtIndex(1, _types.TokenType.eq)) { + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + i = endIndex - 1; + } else { + const token = this.tokens.tokens[i]; + if (_tokenizer.isDeclaration.call(void 0, token)) { + const exportName = this.tokens.identifierNameAtIndex(i); + this.identifierReplacements.set(exportName, `exports.${exportName}`); + } + } + } + } + + /** + * Walk this export statement just in case it's an export...from statement. + * If it is, combine it into the import info for that path. Otherwise, just + * bail out; it'll be handled later. + */ + preprocessNamedExportAtIndex(index) { + // export { + index += 2; + const {newIndex, namedImports} = this.getNamedImports(index); + index = newIndex; + + if (this.tokens.matchesContextualAtIndex(index, _keywords.ContextualKeyword._from)) { + index++; + } else { + // Reinterpret "a as b" to be local/exported rather than imported/local. + for (const {importedName: localName, localName: exportedName} of namedImports) { + this.addExportBinding(localName, exportedName); + } + return; + } + + if (!this.tokens.matches1AtIndex(index, _types.TokenType.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.namedExports.push(...namedImports); + } + + preprocessExportStarAtIndex(index) { + let exportedName = null; + if (this.tokens.matches3AtIndex(index, _types.TokenType._export, _types.TokenType.star, _types.TokenType._as)) { + // export * as + index += 3; + exportedName = this.tokens.identifierNameAtIndex(index); + // foo from + index += 2; + } else { + // export * from + index += 3; + } + if (!this.tokens.matches1AtIndex(index, _types.TokenType.string)) { + throw new Error("Expected string token at the end of star export statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + if (exportedName !== null) { + importInfo.exportStarNames.push(exportedName); + } else { + importInfo.hasStarExport = true; + } + } + + getNamedImports(index) { + const namedImports = []; + while (true) { + if (this.tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + index++; + break; + } + + const specifierInfo = _getImportExportSpecifierInfo2.default.call(void 0, this.tokens, index); + index = specifierInfo.endIndex; + if (!specifierInfo.isType) { + namedImports.push({ + importedName: specifierInfo.leftName, + localName: specifierInfo.rightName, + }); + } + + if (this.tokens.matches2AtIndex(index, _types.TokenType.comma, _types.TokenType.braceR)) { + index += 2; + break; + } else if (this.tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + index++; + break; + } else if (this.tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.tokens[index])}`); + } + } + return {newIndex: index, namedImports}; + } + + /** + * Get a mutable import info object for this path, creating one if it doesn't + * exist yet. + */ + getImportInfo(path) { + const existingInfo = this.importInfoByPath.get(path); + if (existingInfo) { + return existingInfo; + } + const newInfo = { + defaultNames: [], + wildcardNames: [], + namedImports: [], + namedExports: [], + hasBareImport: false, + exportStarNames: [], + hasStarExport: false, + }; + this.importInfoByPath.set(path, newInfo); + return newInfo; + } + + addExportBinding(localName, exportedName) { + if (!this.exportBindingsByLocalName.has(localName)) { + this.exportBindingsByLocalName.set(localName, []); + } + this.exportBindingsByLocalName.get(localName).push(exportedName); + } + + /** + * Return the code to use for the import for this path, or the empty string if + * the code has already been "claimed" by a previous import. + */ + claimImportCode(importPath) { + const result = this.importsToReplace.get(importPath); + this.importsToReplace.set(importPath, ""); + return result || ""; + } + + getIdentifierReplacement(identifierName) { + return this.identifierReplacements.get(identifierName) || null; + } + + /** + * Return a string like `exports.foo = exports.bar`. + */ + resolveExportBinding(assignedName) { + const exportedNames = this.exportBindingsByLocalName.get(assignedName); + if (!exportedNames || exportedNames.length === 0) { + return null; + } + return exportedNames.map((exportedName) => `exports.${exportedName}`).join(" = "); + } + + /** + * Return all imported/exported names where we might be interested in whether usages of those + * names are shadowed. + */ + getGlobalNames() { + return new Set([ + ...this.identifierReplacements.keys(), + ...this.exportBindingsByLocalName.keys(), + ]); + } +} exports.default = CJSImportProcessor; diff --git a/node_modules/sucrase/dist/HelperManager.js b/node_modules/sucrase/dist/HelperManager.js new file mode 100644 index 0000000..6dcc9c9 --- /dev/null +++ b/node_modules/sucrase/dist/HelperManager.js @@ -0,0 +1,176 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + +const HELPERS = { + require: ` + import {createRequire as CREATE_REQUIRE_NAME} from "module"; + const require = CREATE_REQUIRE_NAME(import.meta.url); + `, + interopRequireWildcard: ` + function interopRequireWildcard(obj) { + if (obj && obj.__esModule) { + return obj; + } else { + var newObj = {}; + if (obj != null) { + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + return newObj; + } + } + `, + interopRequireDefault: ` + function interopRequireDefault(obj) { + return obj && obj.__esModule ? obj : { default: obj }; + } + `, + createNamedExportFrom: ` + function createNamedExportFrom(obj, localName, importedName) { + Object.defineProperty(exports, localName, {enumerable: true, configurable: true, get: () => obj[importedName]}); + } + `, + // Note that TypeScript and Babel do this differently; TypeScript does a simple existence + // check in the exports object and does a plain assignment, whereas Babel uses + // defineProperty and builds an object of explicitly-exported names so that star exports can + // always take lower precedence. For now, we do the easier TypeScript thing. + createStarExport: ` + function createStarExport(obj) { + Object.keys(obj) + .filter((key) => key !== "default" && key !== "__esModule") + .forEach((key) => { + if (exports.hasOwnProperty(key)) { + return; + } + Object.defineProperty(exports, key, {enumerable: true, configurable: true, get: () => obj[key]}); + }); + } + `, + nullishCoalesce: ` + function nullishCoalesce(lhs, rhsFn) { + if (lhs != null) { + return lhs; + } else { + return rhsFn(); + } + } + `, + asyncNullishCoalesce: ` + async function asyncNullishCoalesce(lhs, rhsFn) { + if (lhs != null) { + return lhs; + } else { + return await rhsFn(); + } + } + `, + optionalChain: ` + function optionalChain(ops) { + let lastAccessLHS = undefined; + let value = ops[0]; + let i = 1; + while (i < ops.length) { + const op = ops[i]; + const fn = ops[i + 1]; + i += 2; + if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { + return undefined; + } + if (op === 'access' || op === 'optionalAccess') { + lastAccessLHS = value; + value = fn(value); + } else if (op === 'call' || op === 'optionalCall') { + value = fn((...args) => value.call(lastAccessLHS, ...args)); + lastAccessLHS = undefined; + } + } + return value; + } + `, + asyncOptionalChain: ` + async function asyncOptionalChain(ops) { + let lastAccessLHS = undefined; + let value = ops[0]; + let i = 1; + while (i < ops.length) { + const op = ops[i]; + const fn = ops[i + 1]; + i += 2; + if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { + return undefined; + } + if (op === 'access' || op === 'optionalAccess') { + lastAccessLHS = value; + value = await fn(value); + } else if (op === 'call' || op === 'optionalCall') { + value = await fn((...args) => value.call(lastAccessLHS, ...args)); + lastAccessLHS = undefined; + } + } + return value; + } + `, + optionalChainDelete: ` + function optionalChainDelete(ops) { + const result = OPTIONAL_CHAIN_NAME(ops); + return result == null ? true : result; + } + `, + asyncOptionalChainDelete: ` + async function asyncOptionalChainDelete(ops) { + const result = await ASYNC_OPTIONAL_CHAIN_NAME(ops); + return result == null ? true : result; + } + `, +}; + + class HelperManager { + __init() {this.helperNames = {}} + __init2() {this.createRequireName = null} + constructor( nameManager) {;this.nameManager = nameManager;HelperManager.prototype.__init.call(this);HelperManager.prototype.__init2.call(this);} + + getHelperName(baseName) { + let helperName = this.helperNames[baseName]; + if (helperName) { + return helperName; + } + helperName = this.nameManager.claimFreeName(`_${baseName}`); + this.helperNames[baseName] = helperName; + return helperName; + } + + emitHelpers() { + let resultCode = ""; + if (this.helperNames.optionalChainDelete) { + this.getHelperName("optionalChain"); + } + if (this.helperNames.asyncOptionalChainDelete) { + this.getHelperName("asyncOptionalChain"); + } + for (const [baseName, helperCodeTemplate] of Object.entries(HELPERS)) { + const helperName = this.helperNames[baseName]; + let helperCode = helperCodeTemplate; + if (baseName === "optionalChainDelete") { + helperCode = helperCode.replace("OPTIONAL_CHAIN_NAME", this.helperNames.optionalChain); + } else if (baseName === "asyncOptionalChainDelete") { + helperCode = helperCode.replace( + "ASYNC_OPTIONAL_CHAIN_NAME", + this.helperNames.asyncOptionalChain, + ); + } else if (baseName === "require") { + if (this.createRequireName === null) { + this.createRequireName = this.nameManager.claimFreeName("_createRequire"); + } + helperCode = helperCode.replace(/CREATE_REQUIRE_NAME/g, this.createRequireName); + } + if (helperName) { + resultCode += " "; + resultCode += helperCode.replace(baseName, helperName).replace(/\s+/g, " ").trim(); + } + } + return resultCode; + } +} exports.HelperManager = HelperManager; diff --git a/node_modules/sucrase/dist/NameManager.js b/node_modules/sucrase/dist/NameManager.js new file mode 100644 index 0000000..0ebbe28 --- /dev/null +++ b/node_modules/sucrase/dist/NameManager.js @@ -0,0 +1,27 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _getIdentifierNames = require('./util/getIdentifierNames'); var _getIdentifierNames2 = _interopRequireDefault(_getIdentifierNames); + + class NameManager { + __init() {this.usedNames = new Set()} + + constructor(code, tokens) {;NameManager.prototype.__init.call(this); + this.usedNames = new Set(_getIdentifierNames2.default.call(void 0, code, tokens)); + } + + claimFreeName(name) { + const newName = this.findFreeName(name); + this.usedNames.add(newName); + return newName; + } + + findFreeName(name) { + if (!this.usedNames.has(name)) { + return name; + } + let suffixNum = 2; + while (this.usedNames.has(name + String(suffixNum))) { + suffixNum++; + } + return name + String(suffixNum); + } +} exports.default = NameManager; diff --git a/node_modules/sucrase/dist/Options-gen-types.js b/node_modules/sucrase/dist/Options-gen-types.js new file mode 100644 index 0000000..77ef4ea --- /dev/null +++ b/node_modules/sucrase/dist/Options-gen-types.js @@ -0,0 +1,41 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }/** + * This module was automatically generated by `ts-interface-builder` + */ +var _tsinterfacechecker = require('ts-interface-checker'); var t = _interopRequireWildcard(_tsinterfacechecker); +// tslint:disable:object-literal-key-quotes + + const Transform = t.union( + t.lit("jsx"), + t.lit("typescript"), + t.lit("flow"), + t.lit("imports"), + t.lit("react-hot-loader"), + t.lit("jest"), +); exports.Transform = Transform; + + const SourceMapOptions = t.iface([], { + compiledFilename: "string", +}); exports.SourceMapOptions = SourceMapOptions; + + const Options = t.iface([], { + transforms: t.array("Transform"), + disableESTransforms: t.opt("boolean"), + jsxRuntime: t.opt(t.union(t.lit("classic"), t.lit("automatic"), t.lit("preserve"))), + production: t.opt("boolean"), + jsxImportSource: t.opt("string"), + jsxPragma: t.opt("string"), + jsxFragmentPragma: t.opt("string"), + preserveDynamicImport: t.opt("boolean"), + injectCreateRequireForImportRequire: t.opt("boolean"), + enableLegacyTypeScriptModuleInterop: t.opt("boolean"), + enableLegacyBabel5ModuleInterop: t.opt("boolean"), + sourceMapOptions: t.opt("SourceMapOptions"), + filePath: t.opt("string"), +}); exports.Options = Options; + +const exportedTypeSuite = { + Transform: exports.Transform, + SourceMapOptions: exports.SourceMapOptions, + Options: exports.Options, +}; +exports. default = exportedTypeSuite; diff --git a/node_modules/sucrase/dist/Options.js b/node_modules/sucrase/dist/Options.js new file mode 100644 index 0000000..39fe646 --- /dev/null +++ b/node_modules/sucrase/dist/Options.js @@ -0,0 +1,95 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _tsinterfacechecker = require('ts-interface-checker'); + +var _Optionsgentypes = require('./Options-gen-types'); var _Optionsgentypes2 = _interopRequireDefault(_Optionsgentypes); + +const {Options: OptionsChecker} = _tsinterfacechecker.createCheckers.call(void 0, _Optionsgentypes2.default); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + function validateOptions(options) { + OptionsChecker.strictCheck(options); +} exports.validateOptions = validateOptions; diff --git a/node_modules/sucrase/dist/TokenProcessor.js b/node_modules/sucrase/dist/TokenProcessor.js new file mode 100644 index 0000000..58fdd98 --- /dev/null +++ b/node_modules/sucrase/dist/TokenProcessor.js @@ -0,0 +1,357 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + + +var _types = require('./parser/tokenizer/types'); +var _isAsyncOperation = require('./util/isAsyncOperation'); var _isAsyncOperation2 = _interopRequireDefault(_isAsyncOperation); + + + + + + + + + + + + class TokenProcessor { + __init() {this.resultCode = ""} + // Array mapping input token index to optional string index position in the + // output code. + __init2() {this.resultMappings = new Array(this.tokens.length)} + __init3() {this.tokenIndex = 0} + + constructor( + code, + tokens, + isFlowEnabled, + disableESTransforms, + helperManager, + ) {;this.code = code;this.tokens = tokens;this.isFlowEnabled = isFlowEnabled;this.disableESTransforms = disableESTransforms;this.helperManager = helperManager;TokenProcessor.prototype.__init.call(this);TokenProcessor.prototype.__init2.call(this);TokenProcessor.prototype.__init3.call(this);} + + /** + * Snapshot the token state in a way that can be restored later, useful for + * things like lookahead. + * + * resultMappings do not need to be copied since in all use cases, they will + * be overwritten anyway after restore. + */ + snapshot() { + return { + resultCode: this.resultCode, + tokenIndex: this.tokenIndex, + }; + } + + restoreToSnapshot(snapshot) { + this.resultCode = snapshot.resultCode; + this.tokenIndex = snapshot.tokenIndex; + } + + /** + * Remove and return the code generated since the snapshot, leaving the + * current token position in-place. Unlike most TokenProcessor operations, + * this operation can result in input/output line number mismatches because + * the removed code may contain newlines, so this operation should be used + * sparingly. + */ + dangerouslyGetAndRemoveCodeSinceSnapshot(snapshot) { + const result = this.resultCode.slice(snapshot.resultCode.length); + this.resultCode = snapshot.resultCode; + return result; + } + + reset() { + this.resultCode = ""; + this.resultMappings = new Array(this.tokens.length); + this.tokenIndex = 0; + } + + matchesContextualAtIndex(index, contextualKeyword) { + return ( + this.matches1AtIndex(index, _types.TokenType.name) && + this.tokens[index].contextualKeyword === contextualKeyword + ); + } + + identifierNameAtIndex(index) { + // TODO: We need to process escapes since technically you can have unicode escapes in variable + // names. + return this.identifierNameForToken(this.tokens[index]); + } + + identifierNameAtRelativeIndex(relativeIndex) { + return this.identifierNameForToken(this.tokenAtRelativeIndex(relativeIndex)); + } + + identifierName() { + return this.identifierNameForToken(this.currentToken()); + } + + identifierNameForToken(token) { + return this.code.slice(token.start, token.end); + } + + rawCodeForToken(token) { + return this.code.slice(token.start, token.end); + } + + stringValueAtIndex(index) { + return this.stringValueForToken(this.tokens[index]); + } + + stringValue() { + return this.stringValueForToken(this.currentToken()); + } + + stringValueForToken(token) { + // This is used to identify when two imports are the same and to resolve TypeScript enum keys. + // Ideally we'd process escapes within the strings, but for now we pretty much take the raw + // code. + return this.code.slice(token.start + 1, token.end - 1); + } + + matches1AtIndex(index, t1) { + return this.tokens[index].type === t1; + } + + matches2AtIndex(index, t1, t2) { + return this.tokens[index].type === t1 && this.tokens[index + 1].type === t2; + } + + matches3AtIndex(index, t1, t2, t3) { + return ( + this.tokens[index].type === t1 && + this.tokens[index + 1].type === t2 && + this.tokens[index + 2].type === t3 + ); + } + + matches1(t1) { + return this.tokens[this.tokenIndex].type === t1; + } + + matches2(t1, t2) { + return this.tokens[this.tokenIndex].type === t1 && this.tokens[this.tokenIndex + 1].type === t2; + } + + matches3(t1, t2, t3) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 + ); + } + + matches4(t1, t2, t3, t4) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 + ); + } + + matches5(t1, t2, t3, t4, t5) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 && + this.tokens[this.tokenIndex + 4].type === t5 + ); + } + + matchesContextual(contextualKeyword) { + return this.matchesContextualAtIndex(this.tokenIndex, contextualKeyword); + } + + matchesContextIdAndLabel(type, contextId) { + return this.matches1(type) && this.currentToken().contextId === contextId; + } + + previousWhitespaceAndComments() { + let whitespaceAndComments = this.code.slice( + this.tokenIndex > 0 ? this.tokens[this.tokenIndex - 1].end : 0, + this.tokenIndex < this.tokens.length ? this.tokens[this.tokenIndex].start : this.code.length, + ); + if (this.isFlowEnabled) { + whitespaceAndComments = whitespaceAndComments.replace(/@flow/g, ""); + } + return whitespaceAndComments; + } + + replaceToken(newCode) { + this.resultCode += this.previousWhitespaceAndComments(); + this.appendTokenPrefix(); + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += newCode; + this.appendTokenSuffix(); + this.tokenIndex++; + } + + replaceTokenTrimmingLeftWhitespace(newCode) { + this.resultCode += this.previousWhitespaceAndComments().replace(/[^\r\n]/g, ""); + this.appendTokenPrefix(); + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += newCode; + this.appendTokenSuffix(); + this.tokenIndex++; + } + + removeInitialToken() { + this.replaceToken(""); + } + + removeToken() { + this.replaceTokenTrimmingLeftWhitespace(""); + } + + /** + * Remove all code until the next }, accounting for balanced braces. + */ + removeBalancedCode() { + let braceDepth = 0; + while (!this.isAtEnd()) { + if (this.matches1(_types.TokenType.braceL)) { + braceDepth++; + } else if (this.matches1(_types.TokenType.braceR)) { + if (braceDepth === 0) { + return; + } + braceDepth--; + } + this.removeToken(); + } + } + + copyExpectedToken(tokenType) { + if (this.tokens[this.tokenIndex].type !== tokenType) { + throw new Error(`Expected token ${tokenType}`); + } + this.copyToken(); + } + + copyToken() { + this.resultCode += this.previousWhitespaceAndComments(); + this.appendTokenPrefix(); + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.appendTokenSuffix(); + this.tokenIndex++; + } + + copyTokenWithPrefix(prefix) { + this.resultCode += this.previousWhitespaceAndComments(); + this.appendTokenPrefix(); + this.resultCode += prefix; + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.appendTokenSuffix(); + this.tokenIndex++; + } + + appendTokenPrefix() { + const token = this.currentToken(); + if (token.numNullishCoalesceStarts || token.isOptionalChainStart) { + token.isAsyncOperation = _isAsyncOperation2.default.call(void 0, this); + } + if (this.disableESTransforms) { + return; + } + if (token.numNullishCoalesceStarts) { + for (let i = 0; i < token.numNullishCoalesceStarts; i++) { + if (token.isAsyncOperation) { + this.resultCode += "await "; + this.resultCode += this.helperManager.getHelperName("asyncNullishCoalesce"); + } else { + this.resultCode += this.helperManager.getHelperName("nullishCoalesce"); + } + this.resultCode += "("; + } + } + if (token.isOptionalChainStart) { + if (token.isAsyncOperation) { + this.resultCode += "await "; + } + if (this.tokenIndex > 0 && this.tokenAtRelativeIndex(-1).type === _types.TokenType._delete) { + if (token.isAsyncOperation) { + this.resultCode += this.helperManager.getHelperName("asyncOptionalChainDelete"); + } else { + this.resultCode += this.helperManager.getHelperName("optionalChainDelete"); + } + } else if (token.isAsyncOperation) { + this.resultCode += this.helperManager.getHelperName("asyncOptionalChain"); + } else { + this.resultCode += this.helperManager.getHelperName("optionalChain"); + } + this.resultCode += "(["; + } + } + + appendTokenSuffix() { + const token = this.currentToken(); + if (token.isOptionalChainEnd && !this.disableESTransforms) { + this.resultCode += "])"; + } + if (token.numNullishCoalesceEnds && !this.disableESTransforms) { + for (let i = 0; i < token.numNullishCoalesceEnds; i++) { + this.resultCode += "))"; + } + } + } + + appendCode(code) { + this.resultCode += code; + } + + currentToken() { + return this.tokens[this.tokenIndex]; + } + + currentTokenCode() { + const token = this.currentToken(); + return this.code.slice(token.start, token.end); + } + + tokenAtRelativeIndex(relativeIndex) { + return this.tokens[this.tokenIndex + relativeIndex]; + } + + currentIndex() { + return this.tokenIndex; + } + + /** + * Move to the next token. Only suitable in preprocessing steps. When + * generating new code, you should use copyToken or removeToken. + */ + nextToken() { + if (this.tokenIndex === this.tokens.length) { + throw new Error("Unexpectedly reached end of input."); + } + this.tokenIndex++; + } + + previousToken() { + this.tokenIndex--; + } + + finish() { + if (this.tokenIndex !== this.tokens.length) { + throw new Error("Tried to finish processing tokens before reaching the end."); + } + this.resultCode += this.previousWhitespaceAndComments(); + return {code: this.resultCode, mappings: this.resultMappings}; + } + + isAtEnd() { + return this.tokenIndex === this.tokens.length; + } +} exports.default = TokenProcessor; diff --git a/node_modules/sucrase/dist/cli.js b/node_modules/sucrase/dist/cli.js new file mode 100644 index 0000000..4bb96c2 --- /dev/null +++ b/node_modules/sucrase/dist/cli.js @@ -0,0 +1,296 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }/* eslint-disable no-console */ +var _commander = require('commander'); var _commander2 = _interopRequireDefault(_commander); +var _glob = require('glob'); var _glob2 = _interopRequireDefault(_glob); +var _fs = require('mz/fs'); +var _path = require('path'); +var _util = require('util'); + +var _index = require('./index'); + + + + + + + + + + + +const glob = _util.promisify.call(void 0, _glob2.default); + + function run() { + _commander2.default + .description(`Sucrase: super-fast Babel alternative.`) + .usage("[options] ") + .option( + "-d, --out-dir ", + "Compile an input directory of modules into an output directory.", + ) + .option( + "-p, --project ", + "Compile a TypeScript project, will read from tsconfig.json in ", + ) + .option("--out-extension ", "File extension to use for all output files.", "js") + .option("--exclude-dirs ", "Names of directories that should not be traversed.") + .option("-t, --transforms ", "Comma-separated list of transforms to run.") + .option("-q, --quiet", "Don't print the names of converted files.") + .option( + "--enable-legacy-typescript-module-interop", + "Use default TypeScript ESM/CJS interop strategy.", + ) + .option("--enable-legacy-babel5-module-interop", "Use Babel 5 ESM/CJS interop strategy.") + .option("--jsx-pragma ", "Element creation function, defaults to `React.createElement`") + .option("--jsx-fragment-pragma ", "Fragment component, defaults to `React.Fragment`") + .option("--production", "Disable debugging information from JSX in output.") + .parse(process.argv); + + if (_commander2.default.project) { + if ( + _commander2.default.outDir || + _commander2.default.transforms || + _commander2.default.args[0] || + _commander2.default.enableLegacyTypescriptModuleInterop + ) { + console.error( + "If TypeScript project is specified, out directory, transforms, source " + + "directory, and --enable-legacy-typescript-module-interop may not be specified.", + ); + process.exit(1); + } + } else { + if (!_commander2.default.outDir) { + console.error("Out directory is required"); + process.exit(1); + } + + if (!_commander2.default.transforms) { + console.error("Transforms option is required."); + process.exit(1); + } + + if (!_commander2.default.args[0]) { + console.error("Source directory is required."); + process.exit(1); + } + } + + const options = { + outDirPath: _commander2.default.outDir, + srcDirPath: _commander2.default.args[0], + project: _commander2.default.project, + outExtension: _commander2.default.outExtension, + excludeDirs: _commander2.default.excludeDirs ? _commander2.default.excludeDirs.split(",") : [], + quiet: _commander2.default.quiet, + sucraseOptions: { + transforms: _commander2.default.transforms ? _commander2.default.transforms.split(",") : [], + enableLegacyTypeScriptModuleInterop: _commander2.default.enableLegacyTypescriptModuleInterop, + enableLegacyBabel5ModuleInterop: _commander2.default.enableLegacyBabel5ModuleInterop, + jsxPragma: _commander2.default.jsxPragma || "React.createElement", + jsxFragmentPragma: _commander2.default.jsxFragmentPragma || "React.Fragment", + production: _commander2.default.production, + }, + }; + + buildDirectory(options).catch((e) => { + process.exitCode = 1; + console.error(e); + }); +} exports.default = run; + + + + + + +async function findFiles(options) { + const outDirPath = options.outDirPath; + const srcDirPath = options.srcDirPath; + + const extensions = options.sucraseOptions.transforms.includes("typescript") + ? [".ts", ".tsx"] + : [".js", ".jsx"]; + + if (!(await _fs.exists.call(void 0, outDirPath))) { + await _fs.mkdir.call(void 0, outDirPath); + } + + const outArr = []; + for (const child of await _fs.readdir.call(void 0, srcDirPath)) { + if (["node_modules", ".git"].includes(child) || options.excludeDirs.includes(child)) { + continue; + } + const srcChildPath = _path.join.call(void 0, srcDirPath, child); + const outChildPath = _path.join.call(void 0, outDirPath, child); + if ((await _fs.stat.call(void 0, srcChildPath)).isDirectory()) { + const innerOptions = {...options}; + innerOptions.srcDirPath = srcChildPath; + innerOptions.outDirPath = outChildPath; + const innerFiles = await findFiles(innerOptions); + outArr.push(...innerFiles); + } else if (extensions.some((ext) => srcChildPath.endsWith(ext))) { + const outPath = outChildPath.replace(/\.\w+$/, `.${options.outExtension}`); + outArr.push({ + srcPath: srcChildPath, + outPath, + }); + } + } + + return outArr; +} + +async function runGlob(options) { + const tsConfigPath = _path.join.call(void 0, options.project, "tsconfig.json"); + + let str; + try { + str = await _fs.readFile.call(void 0, tsConfigPath, "utf8"); + } catch (err) { + console.error("Could not find project tsconfig.json"); + console.error(` --project=${options.project}`); + console.error(err); + process.exit(1); + } + const json = JSON.parse(str); + + const foundFiles = []; + + const files = json.files; + const include = json.include; + + const absProject = _path.join.call(void 0, process.cwd(), options.project); + const outDirs = []; + + if (!(await _fs.exists.call(void 0, options.outDirPath))) { + await _fs.mkdir.call(void 0, options.outDirPath); + } + + if (files) { + for (const file of files) { + if (file.endsWith(".d.ts")) { + continue; + } + if (!file.endsWith(".ts") && !file.endsWith(".js")) { + continue; + } + + const srcFile = _path.join.call(void 0, absProject, file); + const outFile = _path.join.call(void 0, options.outDirPath, file); + const outPath = outFile.replace(/\.\w+$/, `.${options.outExtension}`); + + const outDir = _path.dirname.call(void 0, outPath); + if (!outDirs.includes(outDir)) { + outDirs.push(outDir); + } + + foundFiles.push({ + srcPath: srcFile, + outPath, + }); + } + } + if (include) { + for (const pattern of include) { + const globFiles = await glob(_path.join.call(void 0, absProject, pattern)); + for (const file of globFiles) { + if (!file.endsWith(".ts") && !file.endsWith(".js")) { + continue; + } + if (file.endsWith(".d.ts")) { + continue; + } + + const relativeFile = _path.relative.call(void 0, absProject, file); + const outFile = _path.join.call(void 0, options.outDirPath, relativeFile); + const outPath = outFile.replace(/\.\w+$/, `.${options.outExtension}`); + + const outDir = _path.dirname.call(void 0, outPath); + if (!outDirs.includes(outDir)) { + outDirs.push(outDir); + } + + foundFiles.push({ + srcPath: file, + outPath, + }); + } + } + } + + for (const outDirPath of outDirs) { + if (!(await _fs.exists.call(void 0, outDirPath))) { + await _fs.mkdir.call(void 0, outDirPath); + } + } + + // TODO: read exclude + + return foundFiles; +} + +async function updateOptionsFromProject(options) { + /** + * Read the project information and assign the following. + * - outDirPath + * - transform: imports + * - transform: typescript + * - enableLegacyTypescriptModuleInterop: true/false. + */ + + const tsConfigPath = _path.join.call(void 0, options.project, "tsconfig.json"); + + let str; + try { + str = await _fs.readFile.call(void 0, tsConfigPath, "utf8"); + } catch (err) { + console.error("Could not find project tsconfig.json"); + console.error(` --project=${options.project}`); + console.error(err); + process.exit(1); + } + const json = JSON.parse(str); + const sucraseOpts = options.sucraseOptions; + if (!sucraseOpts.transforms.includes("typescript")) { + sucraseOpts.transforms.push("typescript"); + } + + const compilerOpts = json.compilerOptions; + if (compilerOpts.outDir) { + options.outDirPath = _path.join.call(void 0, process.cwd(), options.project, compilerOpts.outDir); + } + if (compilerOpts.esModuleInterop !== true) { + sucraseOpts.enableLegacyTypeScriptModuleInterop = true; + } + if (compilerOpts.module === "commonjs") { + if (!sucraseOpts.transforms.includes("imports")) { + sucraseOpts.transforms.push("imports"); + } + } +} + +async function buildDirectory(options) { + let files; + if (options.outDirPath && options.srcDirPath) { + files = await findFiles(options); + } else if (options.project) { + await updateOptionsFromProject(options); + files = await runGlob(options); + } else { + console.error("Project or Source directory required."); + process.exit(1); + } + + for (const file of files) { + await buildFile(file.srcPath, file.outPath, options); + } +} + +async function buildFile(srcPath, outPath, options) { + if (!options.quiet) { + console.log(`${srcPath} -> ${outPath}`); + } + const code = (await _fs.readFile.call(void 0, srcPath)).toString(); + const transformedCode = _index.transform.call(void 0, code, {...options.sucraseOptions, filePath: srcPath}).code; + await _fs.writeFile.call(void 0, outPath, transformedCode); +} diff --git a/node_modules/sucrase/dist/computeSourceMap.js b/node_modules/sucrase/dist/computeSourceMap.js new file mode 100644 index 0000000..567ddbb --- /dev/null +++ b/node_modules/sucrase/dist/computeSourceMap.js @@ -0,0 +1,89 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _genmapping = require('@jridgewell/gen-mapping'); + + + +var _charcodes = require('./parser/util/charcodes'); + + + + + + + + + + + + +/** + * Generate a source map indicating that each line maps directly to the original line, + * with the tokens in their new positions. + */ + function computeSourceMap( + {code: generatedCode, mappings: rawMappings}, + filePath, + options, + source, + tokens, +) { + const sourceColumns = computeSourceColumns(source, tokens); + const map = new (0, _genmapping.GenMapping)({file: options.compiledFilename}); + let tokenIndex = 0; + // currentMapping is the output source index for the current input token being + // considered. + let currentMapping = rawMappings[0]; + while (currentMapping === undefined && tokenIndex < rawMappings.length - 1) { + tokenIndex++; + currentMapping = rawMappings[tokenIndex]; + } + let line = 0; + let lineStart = 0; + if (currentMapping !== lineStart) { + _genmapping.maybeAddSegment.call(void 0, map, line, 0, filePath, line, 0); + } + for (let i = 0; i < generatedCode.length; i++) { + if (i === currentMapping) { + const genColumn = currentMapping - lineStart; + const sourceColumn = sourceColumns[tokenIndex]; + _genmapping.maybeAddSegment.call(void 0, map, line, genColumn, filePath, line, sourceColumn); + while ( + (currentMapping === i || currentMapping === undefined) && + tokenIndex < rawMappings.length - 1 + ) { + tokenIndex++; + currentMapping = rawMappings[tokenIndex]; + } + } + if (generatedCode.charCodeAt(i) === _charcodes.charCodes.lineFeed) { + line++; + lineStart = i + 1; + if (currentMapping !== lineStart) { + _genmapping.maybeAddSegment.call(void 0, map, line, 0, filePath, line, 0); + } + } + } + const {sourceRoot, sourcesContent, ...sourceMap} = _genmapping.toEncodedMap.call(void 0, map); + return sourceMap ; +} exports.default = computeSourceMap; + +/** + * Create an array mapping each token index to the 0-based column of the start + * position of the token. + */ +function computeSourceColumns(code, tokens) { + const sourceColumns = new Array(tokens.length); + let tokenIndex = 0; + let currentMapping = tokens[tokenIndex].start; + let lineStart = 0; + for (let i = 0; i < code.length; i++) { + if (i === currentMapping) { + sourceColumns[tokenIndex] = currentMapping - lineStart; + tokenIndex++; + currentMapping = tokens[tokenIndex].start; + } + if (code.charCodeAt(i) === _charcodes.charCodes.lineFeed) { + lineStart = i + 1; + } + } + return sourceColumns; +} diff --git a/node_modules/sucrase/dist/esm/CJSImportProcessor.js b/node_modules/sucrase/dist/esm/CJSImportProcessor.js new file mode 100644 index 0000000..480d541 --- /dev/null +++ b/node_modules/sucrase/dist/esm/CJSImportProcessor.js @@ -0,0 +1,451 @@ + + + +import {isDeclaration} from "./parser/tokenizer"; +import {ContextualKeyword} from "./parser/tokenizer/keywords"; +import {TokenType as tt} from "./parser/tokenizer/types"; + +import getImportExportSpecifierInfo from "./util/getImportExportSpecifierInfo"; +import {getNonTypeIdentifiers} from "./util/getNonTypeIdentifiers"; + + + + + + + + + + + + + + + + +/** + * Class responsible for preprocessing and bookkeeping import and export declarations within the + * file. + * + * TypeScript uses a simpler mechanism that does not use functions like interopRequireDefault and + * interopRequireWildcard, so we also allow that mode for compatibility. + */ +export default class CJSImportProcessor { + __init() {this.nonTypeIdentifiers = new Set()} + __init2() {this.importInfoByPath = new Map()} + __init3() {this.importsToReplace = new Map()} + __init4() {this.identifierReplacements = new Map()} + __init5() {this.exportBindingsByLocalName = new Map()} + + constructor( + nameManager, + tokens, + enableLegacyTypeScriptModuleInterop, + options, + isTypeScriptTransformEnabled, + helperManager, + ) {;this.nameManager = nameManager;this.tokens = tokens;this.enableLegacyTypeScriptModuleInterop = enableLegacyTypeScriptModuleInterop;this.options = options;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;this.helperManager = helperManager;CJSImportProcessor.prototype.__init.call(this);CJSImportProcessor.prototype.__init2.call(this);CJSImportProcessor.prototype.__init3.call(this);CJSImportProcessor.prototype.__init4.call(this);CJSImportProcessor.prototype.__init5.call(this);} + + preprocessTokens() { + for (let i = 0; i < this.tokens.tokens.length; i++) { + if ( + this.tokens.matches1AtIndex(i, tt._import) && + !this.tokens.matches3AtIndex(i, tt._import, tt.name, tt.eq) + ) { + this.preprocessImportAtIndex(i); + } + if ( + this.tokens.matches1AtIndex(i, tt._export) && + !this.tokens.matches2AtIndex(i, tt._export, tt.eq) + ) { + this.preprocessExportAtIndex(i); + } + } + this.generateImportReplacements(); + } + + /** + * In TypeScript, import statements that only import types should be removed. This does not count + * bare imports. + */ + pruneTypeOnlyImports() { + this.nonTypeIdentifiers = getNonTypeIdentifiers(this.tokens, this.options); + for (const [path, importInfo] of this.importInfoByPath.entries()) { + if ( + importInfo.hasBareImport || + importInfo.hasStarExport || + importInfo.exportStarNames.length > 0 || + importInfo.namedExports.length > 0 + ) { + continue; + } + const names = [ + ...importInfo.defaultNames, + ...importInfo.wildcardNames, + ...importInfo.namedImports.map(({localName}) => localName), + ]; + if (names.every((name) => this.isTypeName(name))) { + this.importsToReplace.set(path, ""); + } + } + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + generateImportReplacements() { + for (const [path, importInfo] of this.importInfoByPath.entries()) { + const { + defaultNames, + wildcardNames, + namedImports, + namedExports, + exportStarNames, + hasStarExport, + } = importInfo; + + if ( + defaultNames.length === 0 && + wildcardNames.length === 0 && + namedImports.length === 0 && + namedExports.length === 0 && + exportStarNames.length === 0 && + !hasStarExport + ) { + // Import is never used, so don't even assign a name. + this.importsToReplace.set(path, `require('${path}');`); + continue; + } + + const primaryImportName = this.getFreeIdentifierForPath(path); + let secondaryImportName; + if (this.enableLegacyTypeScriptModuleInterop) { + secondaryImportName = primaryImportName; + } else { + secondaryImportName = + wildcardNames.length > 0 ? wildcardNames[0] : this.getFreeIdentifierForPath(path); + } + let requireCode = `var ${primaryImportName} = require('${path}');`; + if (wildcardNames.length > 0) { + for (const wildcardName of wildcardNames) { + const moduleExpr = this.enableLegacyTypeScriptModuleInterop + ? primaryImportName + : `${this.helperManager.getHelperName("interopRequireWildcard")}(${primaryImportName})`; + requireCode += ` var ${wildcardName} = ${moduleExpr};`; + } + } else if (exportStarNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helperManager.getHelperName( + "interopRequireWildcard", + )}(${primaryImportName});`; + } else if (defaultNames.length > 0 && secondaryImportName !== primaryImportName) { + requireCode += ` var ${secondaryImportName} = ${this.helperManager.getHelperName( + "interopRequireDefault", + )}(${primaryImportName});`; + } + + for (const {importedName, localName} of namedExports) { + requireCode += ` ${this.helperManager.getHelperName( + "createNamedExportFrom", + )}(${primaryImportName}, '${localName}', '${importedName}');`; + } + for (const exportStarName of exportStarNames) { + requireCode += ` exports.${exportStarName} = ${secondaryImportName};`; + } + if (hasStarExport) { + requireCode += ` ${this.helperManager.getHelperName( + "createStarExport", + )}(${primaryImportName});`; + } + + this.importsToReplace.set(path, requireCode); + + for (const defaultName of defaultNames) { + this.identifierReplacements.set(defaultName, `${secondaryImportName}.default`); + } + for (const {importedName, localName} of namedImports) { + this.identifierReplacements.set(localName, `${primaryImportName}.${importedName}`); + } + } + } + + getFreeIdentifierForPath(path) { + const components = path.split("/"); + const lastComponent = components[components.length - 1]; + const baseName = lastComponent.replace(/\W/g, ""); + return this.nameManager.claimFreeName(`_${baseName}`); + } + + preprocessImportAtIndex(index) { + const defaultNames = []; + const wildcardNames = []; + const namedImports = []; + + index++; + if ( + (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._type) || + this.tokens.matches1AtIndex(index, tt._typeof)) && + !this.tokens.matches1AtIndex(index + 1, tt.comma) && + !this.tokens.matchesContextualAtIndex(index + 1, ContextualKeyword._from) + ) { + // import type declaration, so no need to process anything. + return; + } + + if (this.tokens.matches1AtIndex(index, tt.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (this.tokens.matches1AtIndex(index, tt.name)) { + defaultNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + if (this.tokens.matches1AtIndex(index, tt.comma)) { + index++; + } + } + + if (this.tokens.matches1AtIndex(index, tt.star)) { + // * as + index += 2; + wildcardNames.push(this.tokens.identifierNameAtIndex(index)); + index++; + } + + if (this.tokens.matches1AtIndex(index, tt.braceL)) { + const result = this.getNamedImports(index + 1); + index = result.newIndex; + + for (const namedImport of result.namedImports) { + // Treat {default as X} as a default import to ensure usage of require interop helper + if (namedImport.importedName === "default") { + defaultNames.push(namedImport.localName); + } else { + namedImports.push(namedImport); + } + } + } + + if (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._from)) { + index++; + } + + if (!this.tokens.matches1AtIndex(index, tt.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.defaultNames.push(...defaultNames); + importInfo.wildcardNames.push(...wildcardNames); + importInfo.namedImports.push(...namedImports); + if (defaultNames.length === 0 && wildcardNames.length === 0 && namedImports.length === 0) { + importInfo.hasBareImport = true; + } + } + + preprocessExportAtIndex(index) { + if ( + this.tokens.matches2AtIndex(index, tt._export, tt._var) || + this.tokens.matches2AtIndex(index, tt._export, tt._let) || + this.tokens.matches2AtIndex(index, tt._export, tt._const) + ) { + this.preprocessVarExportAtIndex(index); + } else if ( + this.tokens.matches2AtIndex(index, tt._export, tt._function) || + this.tokens.matches2AtIndex(index, tt._export, tt._class) + ) { + const exportName = this.tokens.identifierNameAtIndex(index + 2); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches3AtIndex(index, tt._export, tt.name, tt._function)) { + const exportName = this.tokens.identifierNameAtIndex(index + 3); + this.addExportBinding(exportName, exportName); + } else if (this.tokens.matches2AtIndex(index, tt._export, tt.braceL)) { + this.preprocessNamedExportAtIndex(index); + } else if (this.tokens.matches2AtIndex(index, tt._export, tt.star)) { + this.preprocessExportStarAtIndex(index); + } + } + + preprocessVarExportAtIndex(index) { + let depth = 0; + // Handle cases like `export let {x} = y;`, starting at the open-brace in that case. + for (let i = index + 2; ; i++) { + if ( + this.tokens.matches1AtIndex(i, tt.braceL) || + this.tokens.matches1AtIndex(i, tt.dollarBraceL) || + this.tokens.matches1AtIndex(i, tt.bracketL) + ) { + depth++; + } else if ( + this.tokens.matches1AtIndex(i, tt.braceR) || + this.tokens.matches1AtIndex(i, tt.bracketR) + ) { + depth--; + } else if (depth === 0 && !this.tokens.matches1AtIndex(i, tt.name)) { + break; + } else if (this.tokens.matches1AtIndex(1, tt.eq)) { + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + i = endIndex - 1; + } else { + const token = this.tokens.tokens[i]; + if (isDeclaration(token)) { + const exportName = this.tokens.identifierNameAtIndex(i); + this.identifierReplacements.set(exportName, `exports.${exportName}`); + } + } + } + } + + /** + * Walk this export statement just in case it's an export...from statement. + * If it is, combine it into the import info for that path. Otherwise, just + * bail out; it'll be handled later. + */ + preprocessNamedExportAtIndex(index) { + // export { + index += 2; + const {newIndex, namedImports} = this.getNamedImports(index); + index = newIndex; + + if (this.tokens.matchesContextualAtIndex(index, ContextualKeyword._from)) { + index++; + } else { + // Reinterpret "a as b" to be local/exported rather than imported/local. + for (const {importedName: localName, localName: exportedName} of namedImports) { + this.addExportBinding(localName, exportedName); + } + return; + } + + if (!this.tokens.matches1AtIndex(index, tt.string)) { + throw new Error("Expected string token at the end of import statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + importInfo.namedExports.push(...namedImports); + } + + preprocessExportStarAtIndex(index) { + let exportedName = null; + if (this.tokens.matches3AtIndex(index, tt._export, tt.star, tt._as)) { + // export * as + index += 3; + exportedName = this.tokens.identifierNameAtIndex(index); + // foo from + index += 2; + } else { + // export * from + index += 3; + } + if (!this.tokens.matches1AtIndex(index, tt.string)) { + throw new Error("Expected string token at the end of star export statement."); + } + const path = this.tokens.stringValueAtIndex(index); + const importInfo = this.getImportInfo(path); + if (exportedName !== null) { + importInfo.exportStarNames.push(exportedName); + } else { + importInfo.hasStarExport = true; + } + } + + getNamedImports(index) { + const namedImports = []; + while (true) { + if (this.tokens.matches1AtIndex(index, tt.braceR)) { + index++; + break; + } + + const specifierInfo = getImportExportSpecifierInfo(this.tokens, index); + index = specifierInfo.endIndex; + if (!specifierInfo.isType) { + namedImports.push({ + importedName: specifierInfo.leftName, + localName: specifierInfo.rightName, + }); + } + + if (this.tokens.matches2AtIndex(index, tt.comma, tt.braceR)) { + index += 2; + break; + } else if (this.tokens.matches1AtIndex(index, tt.braceR)) { + index++; + break; + } else if (this.tokens.matches1AtIndex(index, tt.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.tokens[index])}`); + } + } + return {newIndex: index, namedImports}; + } + + /** + * Get a mutable import info object for this path, creating one if it doesn't + * exist yet. + */ + getImportInfo(path) { + const existingInfo = this.importInfoByPath.get(path); + if (existingInfo) { + return existingInfo; + } + const newInfo = { + defaultNames: [], + wildcardNames: [], + namedImports: [], + namedExports: [], + hasBareImport: false, + exportStarNames: [], + hasStarExport: false, + }; + this.importInfoByPath.set(path, newInfo); + return newInfo; + } + + addExportBinding(localName, exportedName) { + if (!this.exportBindingsByLocalName.has(localName)) { + this.exportBindingsByLocalName.set(localName, []); + } + this.exportBindingsByLocalName.get(localName).push(exportedName); + } + + /** + * Return the code to use for the import for this path, or the empty string if + * the code has already been "claimed" by a previous import. + */ + claimImportCode(importPath) { + const result = this.importsToReplace.get(importPath); + this.importsToReplace.set(importPath, ""); + return result || ""; + } + + getIdentifierReplacement(identifierName) { + return this.identifierReplacements.get(identifierName) || null; + } + + /** + * Return a string like `exports.foo = exports.bar`. + */ + resolveExportBinding(assignedName) { + const exportedNames = this.exportBindingsByLocalName.get(assignedName); + if (!exportedNames || exportedNames.length === 0) { + return null; + } + return exportedNames.map((exportedName) => `exports.${exportedName}`).join(" = "); + } + + /** + * Return all imported/exported names where we might be interested in whether usages of those + * names are shadowed. + */ + getGlobalNames() { + return new Set([ + ...this.identifierReplacements.keys(), + ...this.exportBindingsByLocalName.keys(), + ]); + } +} diff --git a/node_modules/sucrase/dist/esm/HelperManager.js b/node_modules/sucrase/dist/esm/HelperManager.js new file mode 100644 index 0000000..7964db3 --- /dev/null +++ b/node_modules/sucrase/dist/esm/HelperManager.js @@ -0,0 +1,176 @@ + + +const HELPERS = { + require: ` + import {createRequire as CREATE_REQUIRE_NAME} from "module"; + const require = CREATE_REQUIRE_NAME(import.meta.url); + `, + interopRequireWildcard: ` + function interopRequireWildcard(obj) { + if (obj && obj.__esModule) { + return obj; + } else { + var newObj = {}; + if (obj != null) { + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + return newObj; + } + } + `, + interopRequireDefault: ` + function interopRequireDefault(obj) { + return obj && obj.__esModule ? obj : { default: obj }; + } + `, + createNamedExportFrom: ` + function createNamedExportFrom(obj, localName, importedName) { + Object.defineProperty(exports, localName, {enumerable: true, configurable: true, get: () => obj[importedName]}); + } + `, + // Note that TypeScript and Babel do this differently; TypeScript does a simple existence + // check in the exports object and does a plain assignment, whereas Babel uses + // defineProperty and builds an object of explicitly-exported names so that star exports can + // always take lower precedence. For now, we do the easier TypeScript thing. + createStarExport: ` + function createStarExport(obj) { + Object.keys(obj) + .filter((key) => key !== "default" && key !== "__esModule") + .forEach((key) => { + if (exports.hasOwnProperty(key)) { + return; + } + Object.defineProperty(exports, key, {enumerable: true, configurable: true, get: () => obj[key]}); + }); + } + `, + nullishCoalesce: ` + function nullishCoalesce(lhs, rhsFn) { + if (lhs != null) { + return lhs; + } else { + return rhsFn(); + } + } + `, + asyncNullishCoalesce: ` + async function asyncNullishCoalesce(lhs, rhsFn) { + if (lhs != null) { + return lhs; + } else { + return await rhsFn(); + } + } + `, + optionalChain: ` + function optionalChain(ops) { + let lastAccessLHS = undefined; + let value = ops[0]; + let i = 1; + while (i < ops.length) { + const op = ops[i]; + const fn = ops[i + 1]; + i += 2; + if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { + return undefined; + } + if (op === 'access' || op === 'optionalAccess') { + lastAccessLHS = value; + value = fn(value); + } else if (op === 'call' || op === 'optionalCall') { + value = fn((...args) => value.call(lastAccessLHS, ...args)); + lastAccessLHS = undefined; + } + } + return value; + } + `, + asyncOptionalChain: ` + async function asyncOptionalChain(ops) { + let lastAccessLHS = undefined; + let value = ops[0]; + let i = 1; + while (i < ops.length) { + const op = ops[i]; + const fn = ops[i + 1]; + i += 2; + if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { + return undefined; + } + if (op === 'access' || op === 'optionalAccess') { + lastAccessLHS = value; + value = await fn(value); + } else if (op === 'call' || op === 'optionalCall') { + value = await fn((...args) => value.call(lastAccessLHS, ...args)); + lastAccessLHS = undefined; + } + } + return value; + } + `, + optionalChainDelete: ` + function optionalChainDelete(ops) { + const result = OPTIONAL_CHAIN_NAME(ops); + return result == null ? true : result; + } + `, + asyncOptionalChainDelete: ` + async function asyncOptionalChainDelete(ops) { + const result = await ASYNC_OPTIONAL_CHAIN_NAME(ops); + return result == null ? true : result; + } + `, +}; + +export class HelperManager { + __init() {this.helperNames = {}} + __init2() {this.createRequireName = null} + constructor( nameManager) {;this.nameManager = nameManager;HelperManager.prototype.__init.call(this);HelperManager.prototype.__init2.call(this);} + + getHelperName(baseName) { + let helperName = this.helperNames[baseName]; + if (helperName) { + return helperName; + } + helperName = this.nameManager.claimFreeName(`_${baseName}`); + this.helperNames[baseName] = helperName; + return helperName; + } + + emitHelpers() { + let resultCode = ""; + if (this.helperNames.optionalChainDelete) { + this.getHelperName("optionalChain"); + } + if (this.helperNames.asyncOptionalChainDelete) { + this.getHelperName("asyncOptionalChain"); + } + for (const [baseName, helperCodeTemplate] of Object.entries(HELPERS)) { + const helperName = this.helperNames[baseName]; + let helperCode = helperCodeTemplate; + if (baseName === "optionalChainDelete") { + helperCode = helperCode.replace("OPTIONAL_CHAIN_NAME", this.helperNames.optionalChain); + } else if (baseName === "asyncOptionalChainDelete") { + helperCode = helperCode.replace( + "ASYNC_OPTIONAL_CHAIN_NAME", + this.helperNames.asyncOptionalChain, + ); + } else if (baseName === "require") { + if (this.createRequireName === null) { + this.createRequireName = this.nameManager.claimFreeName("_createRequire"); + } + helperCode = helperCode.replace(/CREATE_REQUIRE_NAME/g, this.createRequireName); + } + if (helperName) { + resultCode += " "; + resultCode += helperCode.replace(baseName, helperName).replace(/\s+/g, " ").trim(); + } + } + return resultCode; + } +} diff --git a/node_modules/sucrase/dist/esm/NameManager.js b/node_modules/sucrase/dist/esm/NameManager.js new file mode 100644 index 0000000..47d2c9f --- /dev/null +++ b/node_modules/sucrase/dist/esm/NameManager.js @@ -0,0 +1,27 @@ + +import getIdentifierNames from "./util/getIdentifierNames"; + +export default class NameManager { + __init() {this.usedNames = new Set()} + + constructor(code, tokens) {;NameManager.prototype.__init.call(this); + this.usedNames = new Set(getIdentifierNames(code, tokens)); + } + + claimFreeName(name) { + const newName = this.findFreeName(name); + this.usedNames.add(newName); + return newName; + } + + findFreeName(name) { + if (!this.usedNames.has(name)) { + return name; + } + let suffixNum = 2; + while (this.usedNames.has(name + String(suffixNum))) { + suffixNum++; + } + return name + String(suffixNum); + } +} diff --git a/node_modules/sucrase/dist/esm/Options-gen-types.js b/node_modules/sucrase/dist/esm/Options-gen-types.js new file mode 100644 index 0000000..cda192d --- /dev/null +++ b/node_modules/sucrase/dist/esm/Options-gen-types.js @@ -0,0 +1,41 @@ +/** + * This module was automatically generated by `ts-interface-builder` + */ +import * as t from "ts-interface-checker"; +// tslint:disable:object-literal-key-quotes + +export const Transform = t.union( + t.lit("jsx"), + t.lit("typescript"), + t.lit("flow"), + t.lit("imports"), + t.lit("react-hot-loader"), + t.lit("jest"), +); + +export const SourceMapOptions = t.iface([], { + compiledFilename: "string", +}); + +export const Options = t.iface([], { + transforms: t.array("Transform"), + disableESTransforms: t.opt("boolean"), + jsxRuntime: t.opt(t.union(t.lit("classic"), t.lit("automatic"), t.lit("preserve"))), + production: t.opt("boolean"), + jsxImportSource: t.opt("string"), + jsxPragma: t.opt("string"), + jsxFragmentPragma: t.opt("string"), + preserveDynamicImport: t.opt("boolean"), + injectCreateRequireForImportRequire: t.opt("boolean"), + enableLegacyTypeScriptModuleInterop: t.opt("boolean"), + enableLegacyBabel5ModuleInterop: t.opt("boolean"), + sourceMapOptions: t.opt("SourceMapOptions"), + filePath: t.opt("string"), +}); + +const exportedTypeSuite = { + Transform, + SourceMapOptions, + Options, +}; +export default exportedTypeSuite; diff --git a/node_modules/sucrase/dist/esm/Options.js b/node_modules/sucrase/dist/esm/Options.js new file mode 100644 index 0000000..7b5eabf --- /dev/null +++ b/node_modules/sucrase/dist/esm/Options.js @@ -0,0 +1,95 @@ +import {createCheckers} from "ts-interface-checker"; + +import OptionsGenTypes from "./Options-gen-types"; + +const {Options: OptionsChecker} = createCheckers(OptionsGenTypes); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +export function validateOptions(options) { + OptionsChecker.strictCheck(options); +} diff --git a/node_modules/sucrase/dist/esm/TokenProcessor.js b/node_modules/sucrase/dist/esm/TokenProcessor.js new file mode 100644 index 0000000..5335f23 --- /dev/null +++ b/node_modules/sucrase/dist/esm/TokenProcessor.js @@ -0,0 +1,357 @@ + + + +import { TokenType as tt} from "./parser/tokenizer/types"; +import isAsyncOperation from "./util/isAsyncOperation"; + + + + + + + + + + + +export default class TokenProcessor { + __init() {this.resultCode = ""} + // Array mapping input token index to optional string index position in the + // output code. + __init2() {this.resultMappings = new Array(this.tokens.length)} + __init3() {this.tokenIndex = 0} + + constructor( + code, + tokens, + isFlowEnabled, + disableESTransforms, + helperManager, + ) {;this.code = code;this.tokens = tokens;this.isFlowEnabled = isFlowEnabled;this.disableESTransforms = disableESTransforms;this.helperManager = helperManager;TokenProcessor.prototype.__init.call(this);TokenProcessor.prototype.__init2.call(this);TokenProcessor.prototype.__init3.call(this);} + + /** + * Snapshot the token state in a way that can be restored later, useful for + * things like lookahead. + * + * resultMappings do not need to be copied since in all use cases, they will + * be overwritten anyway after restore. + */ + snapshot() { + return { + resultCode: this.resultCode, + tokenIndex: this.tokenIndex, + }; + } + + restoreToSnapshot(snapshot) { + this.resultCode = snapshot.resultCode; + this.tokenIndex = snapshot.tokenIndex; + } + + /** + * Remove and return the code generated since the snapshot, leaving the + * current token position in-place. Unlike most TokenProcessor operations, + * this operation can result in input/output line number mismatches because + * the removed code may contain newlines, so this operation should be used + * sparingly. + */ + dangerouslyGetAndRemoveCodeSinceSnapshot(snapshot) { + const result = this.resultCode.slice(snapshot.resultCode.length); + this.resultCode = snapshot.resultCode; + return result; + } + + reset() { + this.resultCode = ""; + this.resultMappings = new Array(this.tokens.length); + this.tokenIndex = 0; + } + + matchesContextualAtIndex(index, contextualKeyword) { + return ( + this.matches1AtIndex(index, tt.name) && + this.tokens[index].contextualKeyword === contextualKeyword + ); + } + + identifierNameAtIndex(index) { + // TODO: We need to process escapes since technically you can have unicode escapes in variable + // names. + return this.identifierNameForToken(this.tokens[index]); + } + + identifierNameAtRelativeIndex(relativeIndex) { + return this.identifierNameForToken(this.tokenAtRelativeIndex(relativeIndex)); + } + + identifierName() { + return this.identifierNameForToken(this.currentToken()); + } + + identifierNameForToken(token) { + return this.code.slice(token.start, token.end); + } + + rawCodeForToken(token) { + return this.code.slice(token.start, token.end); + } + + stringValueAtIndex(index) { + return this.stringValueForToken(this.tokens[index]); + } + + stringValue() { + return this.stringValueForToken(this.currentToken()); + } + + stringValueForToken(token) { + // This is used to identify when two imports are the same and to resolve TypeScript enum keys. + // Ideally we'd process escapes within the strings, but for now we pretty much take the raw + // code. + return this.code.slice(token.start + 1, token.end - 1); + } + + matches1AtIndex(index, t1) { + return this.tokens[index].type === t1; + } + + matches2AtIndex(index, t1, t2) { + return this.tokens[index].type === t1 && this.tokens[index + 1].type === t2; + } + + matches3AtIndex(index, t1, t2, t3) { + return ( + this.tokens[index].type === t1 && + this.tokens[index + 1].type === t2 && + this.tokens[index + 2].type === t3 + ); + } + + matches1(t1) { + return this.tokens[this.tokenIndex].type === t1; + } + + matches2(t1, t2) { + return this.tokens[this.tokenIndex].type === t1 && this.tokens[this.tokenIndex + 1].type === t2; + } + + matches3(t1, t2, t3) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 + ); + } + + matches4(t1, t2, t3, t4) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 + ); + } + + matches5(t1, t2, t3, t4, t5) { + return ( + this.tokens[this.tokenIndex].type === t1 && + this.tokens[this.tokenIndex + 1].type === t2 && + this.tokens[this.tokenIndex + 2].type === t3 && + this.tokens[this.tokenIndex + 3].type === t4 && + this.tokens[this.tokenIndex + 4].type === t5 + ); + } + + matchesContextual(contextualKeyword) { + return this.matchesContextualAtIndex(this.tokenIndex, contextualKeyword); + } + + matchesContextIdAndLabel(type, contextId) { + return this.matches1(type) && this.currentToken().contextId === contextId; + } + + previousWhitespaceAndComments() { + let whitespaceAndComments = this.code.slice( + this.tokenIndex > 0 ? this.tokens[this.tokenIndex - 1].end : 0, + this.tokenIndex < this.tokens.length ? this.tokens[this.tokenIndex].start : this.code.length, + ); + if (this.isFlowEnabled) { + whitespaceAndComments = whitespaceAndComments.replace(/@flow/g, ""); + } + return whitespaceAndComments; + } + + replaceToken(newCode) { + this.resultCode += this.previousWhitespaceAndComments(); + this.appendTokenPrefix(); + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += newCode; + this.appendTokenSuffix(); + this.tokenIndex++; + } + + replaceTokenTrimmingLeftWhitespace(newCode) { + this.resultCode += this.previousWhitespaceAndComments().replace(/[^\r\n]/g, ""); + this.appendTokenPrefix(); + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += newCode; + this.appendTokenSuffix(); + this.tokenIndex++; + } + + removeInitialToken() { + this.replaceToken(""); + } + + removeToken() { + this.replaceTokenTrimmingLeftWhitespace(""); + } + + /** + * Remove all code until the next }, accounting for balanced braces. + */ + removeBalancedCode() { + let braceDepth = 0; + while (!this.isAtEnd()) { + if (this.matches1(tt.braceL)) { + braceDepth++; + } else if (this.matches1(tt.braceR)) { + if (braceDepth === 0) { + return; + } + braceDepth--; + } + this.removeToken(); + } + } + + copyExpectedToken(tokenType) { + if (this.tokens[this.tokenIndex].type !== tokenType) { + throw new Error(`Expected token ${tokenType}`); + } + this.copyToken(); + } + + copyToken() { + this.resultCode += this.previousWhitespaceAndComments(); + this.appendTokenPrefix(); + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.appendTokenSuffix(); + this.tokenIndex++; + } + + copyTokenWithPrefix(prefix) { + this.resultCode += this.previousWhitespaceAndComments(); + this.appendTokenPrefix(); + this.resultCode += prefix; + this.resultMappings[this.tokenIndex] = this.resultCode.length; + this.resultCode += this.code.slice( + this.tokens[this.tokenIndex].start, + this.tokens[this.tokenIndex].end, + ); + this.appendTokenSuffix(); + this.tokenIndex++; + } + + appendTokenPrefix() { + const token = this.currentToken(); + if (token.numNullishCoalesceStarts || token.isOptionalChainStart) { + token.isAsyncOperation = isAsyncOperation(this); + } + if (this.disableESTransforms) { + return; + } + if (token.numNullishCoalesceStarts) { + for (let i = 0; i < token.numNullishCoalesceStarts; i++) { + if (token.isAsyncOperation) { + this.resultCode += "await "; + this.resultCode += this.helperManager.getHelperName("asyncNullishCoalesce"); + } else { + this.resultCode += this.helperManager.getHelperName("nullishCoalesce"); + } + this.resultCode += "("; + } + } + if (token.isOptionalChainStart) { + if (token.isAsyncOperation) { + this.resultCode += "await "; + } + if (this.tokenIndex > 0 && this.tokenAtRelativeIndex(-1).type === tt._delete) { + if (token.isAsyncOperation) { + this.resultCode += this.helperManager.getHelperName("asyncOptionalChainDelete"); + } else { + this.resultCode += this.helperManager.getHelperName("optionalChainDelete"); + } + } else if (token.isAsyncOperation) { + this.resultCode += this.helperManager.getHelperName("asyncOptionalChain"); + } else { + this.resultCode += this.helperManager.getHelperName("optionalChain"); + } + this.resultCode += "(["; + } + } + + appendTokenSuffix() { + const token = this.currentToken(); + if (token.isOptionalChainEnd && !this.disableESTransforms) { + this.resultCode += "])"; + } + if (token.numNullishCoalesceEnds && !this.disableESTransforms) { + for (let i = 0; i < token.numNullishCoalesceEnds; i++) { + this.resultCode += "))"; + } + } + } + + appendCode(code) { + this.resultCode += code; + } + + currentToken() { + return this.tokens[this.tokenIndex]; + } + + currentTokenCode() { + const token = this.currentToken(); + return this.code.slice(token.start, token.end); + } + + tokenAtRelativeIndex(relativeIndex) { + return this.tokens[this.tokenIndex + relativeIndex]; + } + + currentIndex() { + return this.tokenIndex; + } + + /** + * Move to the next token. Only suitable in preprocessing steps. When + * generating new code, you should use copyToken or removeToken. + */ + nextToken() { + if (this.tokenIndex === this.tokens.length) { + throw new Error("Unexpectedly reached end of input."); + } + this.tokenIndex++; + } + + previousToken() { + this.tokenIndex--; + } + + finish() { + if (this.tokenIndex !== this.tokens.length) { + throw new Error("Tried to finish processing tokens before reaching the end."); + } + this.resultCode += this.previousWhitespaceAndComments(); + return {code: this.resultCode, mappings: this.resultMappings}; + } + + isAtEnd() { + return this.tokenIndex === this.tokens.length; + } +} diff --git a/node_modules/sucrase/dist/esm/cli.js b/node_modules/sucrase/dist/esm/cli.js new file mode 100644 index 0000000..8256872 --- /dev/null +++ b/node_modules/sucrase/dist/esm/cli.js @@ -0,0 +1,296 @@ +/* eslint-disable no-console */ +import commander from "commander"; +import globCb from "glob"; +import {exists, mkdir, readdir, readFile, stat, writeFile} from "mz/fs"; +import {dirname, join, relative} from "path"; +import {promisify} from "util"; + +import { transform} from "./index"; + + + + + + + + + + + +const glob = promisify(globCb); + +export default function run() { + commander + .description(`Sucrase: super-fast Babel alternative.`) + .usage("[options] ") + .option( + "-d, --out-dir ", + "Compile an input directory of modules into an output directory.", + ) + .option( + "-p, --project ", + "Compile a TypeScript project, will read from tsconfig.json in ", + ) + .option("--out-extension ", "File extension to use for all output files.", "js") + .option("--exclude-dirs ", "Names of directories that should not be traversed.") + .option("-t, --transforms ", "Comma-separated list of transforms to run.") + .option("-q, --quiet", "Don't print the names of converted files.") + .option( + "--enable-legacy-typescript-module-interop", + "Use default TypeScript ESM/CJS interop strategy.", + ) + .option("--enable-legacy-babel5-module-interop", "Use Babel 5 ESM/CJS interop strategy.") + .option("--jsx-pragma ", "Element creation function, defaults to `React.createElement`") + .option("--jsx-fragment-pragma ", "Fragment component, defaults to `React.Fragment`") + .option("--production", "Disable debugging information from JSX in output.") + .parse(process.argv); + + if (commander.project) { + if ( + commander.outDir || + commander.transforms || + commander.args[0] || + commander.enableLegacyTypescriptModuleInterop + ) { + console.error( + "If TypeScript project is specified, out directory, transforms, source " + + "directory, and --enable-legacy-typescript-module-interop may not be specified.", + ); + process.exit(1); + } + } else { + if (!commander.outDir) { + console.error("Out directory is required"); + process.exit(1); + } + + if (!commander.transforms) { + console.error("Transforms option is required."); + process.exit(1); + } + + if (!commander.args[0]) { + console.error("Source directory is required."); + process.exit(1); + } + } + + const options = { + outDirPath: commander.outDir, + srcDirPath: commander.args[0], + project: commander.project, + outExtension: commander.outExtension, + excludeDirs: commander.excludeDirs ? commander.excludeDirs.split(",") : [], + quiet: commander.quiet, + sucraseOptions: { + transforms: commander.transforms ? commander.transforms.split(",") : [], + enableLegacyTypeScriptModuleInterop: commander.enableLegacyTypescriptModuleInterop, + enableLegacyBabel5ModuleInterop: commander.enableLegacyBabel5ModuleInterop, + jsxPragma: commander.jsxPragma || "React.createElement", + jsxFragmentPragma: commander.jsxFragmentPragma || "React.Fragment", + production: commander.production, + }, + }; + + buildDirectory(options).catch((e) => { + process.exitCode = 1; + console.error(e); + }); +} + + + + + + +async function findFiles(options) { + const outDirPath = options.outDirPath; + const srcDirPath = options.srcDirPath; + + const extensions = options.sucraseOptions.transforms.includes("typescript") + ? [".ts", ".tsx"] + : [".js", ".jsx"]; + + if (!(await exists(outDirPath))) { + await mkdir(outDirPath); + } + + const outArr = []; + for (const child of await readdir(srcDirPath)) { + if (["node_modules", ".git"].includes(child) || options.excludeDirs.includes(child)) { + continue; + } + const srcChildPath = join(srcDirPath, child); + const outChildPath = join(outDirPath, child); + if ((await stat(srcChildPath)).isDirectory()) { + const innerOptions = {...options}; + innerOptions.srcDirPath = srcChildPath; + innerOptions.outDirPath = outChildPath; + const innerFiles = await findFiles(innerOptions); + outArr.push(...innerFiles); + } else if (extensions.some((ext) => srcChildPath.endsWith(ext))) { + const outPath = outChildPath.replace(/\.\w+$/, `.${options.outExtension}`); + outArr.push({ + srcPath: srcChildPath, + outPath, + }); + } + } + + return outArr; +} + +async function runGlob(options) { + const tsConfigPath = join(options.project, "tsconfig.json"); + + let str; + try { + str = await readFile(tsConfigPath, "utf8"); + } catch (err) { + console.error("Could not find project tsconfig.json"); + console.error(` --project=${options.project}`); + console.error(err); + process.exit(1); + } + const json = JSON.parse(str); + + const foundFiles = []; + + const files = json.files; + const include = json.include; + + const absProject = join(process.cwd(), options.project); + const outDirs = []; + + if (!(await exists(options.outDirPath))) { + await mkdir(options.outDirPath); + } + + if (files) { + for (const file of files) { + if (file.endsWith(".d.ts")) { + continue; + } + if (!file.endsWith(".ts") && !file.endsWith(".js")) { + continue; + } + + const srcFile = join(absProject, file); + const outFile = join(options.outDirPath, file); + const outPath = outFile.replace(/\.\w+$/, `.${options.outExtension}`); + + const outDir = dirname(outPath); + if (!outDirs.includes(outDir)) { + outDirs.push(outDir); + } + + foundFiles.push({ + srcPath: srcFile, + outPath, + }); + } + } + if (include) { + for (const pattern of include) { + const globFiles = await glob(join(absProject, pattern)); + for (const file of globFiles) { + if (!file.endsWith(".ts") && !file.endsWith(".js")) { + continue; + } + if (file.endsWith(".d.ts")) { + continue; + } + + const relativeFile = relative(absProject, file); + const outFile = join(options.outDirPath, relativeFile); + const outPath = outFile.replace(/\.\w+$/, `.${options.outExtension}`); + + const outDir = dirname(outPath); + if (!outDirs.includes(outDir)) { + outDirs.push(outDir); + } + + foundFiles.push({ + srcPath: file, + outPath, + }); + } + } + } + + for (const outDirPath of outDirs) { + if (!(await exists(outDirPath))) { + await mkdir(outDirPath); + } + } + + // TODO: read exclude + + return foundFiles; +} + +async function updateOptionsFromProject(options) { + /** + * Read the project information and assign the following. + * - outDirPath + * - transform: imports + * - transform: typescript + * - enableLegacyTypescriptModuleInterop: true/false. + */ + + const tsConfigPath = join(options.project, "tsconfig.json"); + + let str; + try { + str = await readFile(tsConfigPath, "utf8"); + } catch (err) { + console.error("Could not find project tsconfig.json"); + console.error(` --project=${options.project}`); + console.error(err); + process.exit(1); + } + const json = JSON.parse(str); + const sucraseOpts = options.sucraseOptions; + if (!sucraseOpts.transforms.includes("typescript")) { + sucraseOpts.transforms.push("typescript"); + } + + const compilerOpts = json.compilerOptions; + if (compilerOpts.outDir) { + options.outDirPath = join(process.cwd(), options.project, compilerOpts.outDir); + } + if (compilerOpts.esModuleInterop !== true) { + sucraseOpts.enableLegacyTypeScriptModuleInterop = true; + } + if (compilerOpts.module === "commonjs") { + if (!sucraseOpts.transforms.includes("imports")) { + sucraseOpts.transforms.push("imports"); + } + } +} + +async function buildDirectory(options) { + let files; + if (options.outDirPath && options.srcDirPath) { + files = await findFiles(options); + } else if (options.project) { + await updateOptionsFromProject(options); + files = await runGlob(options); + } else { + console.error("Project or Source directory required."); + process.exit(1); + } + + for (const file of files) { + await buildFile(file.srcPath, file.outPath, options); + } +} + +async function buildFile(srcPath, outPath, options) { + if (!options.quiet) { + console.log(`${srcPath} -> ${outPath}`); + } + const code = (await readFile(srcPath)).toString(); + const transformedCode = transform(code, {...options.sucraseOptions, filePath: srcPath}).code; + await writeFile(outPath, transformedCode); +} diff --git a/node_modules/sucrase/dist/esm/computeSourceMap.js b/node_modules/sucrase/dist/esm/computeSourceMap.js new file mode 100644 index 0000000..699ea78 --- /dev/null +++ b/node_modules/sucrase/dist/esm/computeSourceMap.js @@ -0,0 +1,89 @@ +import {GenMapping, maybeAddSegment, toEncodedMap} from "@jridgewell/gen-mapping"; + + + +import {charCodes} from "./parser/util/charcodes"; + + + + + + + + + + + + +/** + * Generate a source map indicating that each line maps directly to the original line, + * with the tokens in their new positions. + */ +export default function computeSourceMap( + {code: generatedCode, mappings: rawMappings}, + filePath, + options, + source, + tokens, +) { + const sourceColumns = computeSourceColumns(source, tokens); + const map = new GenMapping({file: options.compiledFilename}); + let tokenIndex = 0; + // currentMapping is the output source index for the current input token being + // considered. + let currentMapping = rawMappings[0]; + while (currentMapping === undefined && tokenIndex < rawMappings.length - 1) { + tokenIndex++; + currentMapping = rawMappings[tokenIndex]; + } + let line = 0; + let lineStart = 0; + if (currentMapping !== lineStart) { + maybeAddSegment(map, line, 0, filePath, line, 0); + } + for (let i = 0; i < generatedCode.length; i++) { + if (i === currentMapping) { + const genColumn = currentMapping - lineStart; + const sourceColumn = sourceColumns[tokenIndex]; + maybeAddSegment(map, line, genColumn, filePath, line, sourceColumn); + while ( + (currentMapping === i || currentMapping === undefined) && + tokenIndex < rawMappings.length - 1 + ) { + tokenIndex++; + currentMapping = rawMappings[tokenIndex]; + } + } + if (generatedCode.charCodeAt(i) === charCodes.lineFeed) { + line++; + lineStart = i + 1; + if (currentMapping !== lineStart) { + maybeAddSegment(map, line, 0, filePath, line, 0); + } + } + } + const {sourceRoot, sourcesContent, ...sourceMap} = toEncodedMap(map); + return sourceMap ; +} + +/** + * Create an array mapping each token index to the 0-based column of the start + * position of the token. + */ +function computeSourceColumns(code, tokens) { + const sourceColumns = new Array(tokens.length); + let tokenIndex = 0; + let currentMapping = tokens[tokenIndex].start; + let lineStart = 0; + for (let i = 0; i < code.length; i++) { + if (i === currentMapping) { + sourceColumns[tokenIndex] = currentMapping - lineStart; + tokenIndex++; + currentMapping = tokens[tokenIndex].start; + } + if (code.charCodeAt(i) === charCodes.lineFeed) { + lineStart = i + 1; + } + } + return sourceColumns; +} diff --git a/node_modules/sucrase/dist/esm/identifyShadowedGlobals.js b/node_modules/sucrase/dist/esm/identifyShadowedGlobals.js new file mode 100644 index 0000000..47ef038 --- /dev/null +++ b/node_modules/sucrase/dist/esm/identifyShadowedGlobals.js @@ -0,0 +1,97 @@ +import { + isBlockScopedDeclaration, + isFunctionScopedDeclaration, + isNonTopLevelDeclaration, +} from "./parser/tokenizer"; + +import {TokenType as tt} from "./parser/tokenizer/types"; + + +/** + * Traverse the given tokens and modify them if necessary to indicate that some names shadow global + * variables. + */ +export default function identifyShadowedGlobals( + tokens, + scopes, + globalNames, +) { + if (!hasShadowedGlobals(tokens, globalNames)) { + return; + } + markShadowedGlobals(tokens, scopes, globalNames); +} + +/** + * We can do a fast up-front check to see if there are any declarations to global names. If not, + * then there's no point in computing scope assignments. + */ +// Exported for testing. +export function hasShadowedGlobals(tokens, globalNames) { + for (const token of tokens.tokens) { + if ( + token.type === tt.name && + isNonTopLevelDeclaration(token) && + globalNames.has(tokens.identifierNameForToken(token)) + ) { + return true; + } + } + return false; +} + +function markShadowedGlobals( + tokens, + scopes, + globalNames, +) { + const scopeStack = []; + let scopeIndex = scopes.length - 1; + // Scopes were generated at completion time, so they're sorted by end index, so we can maintain a + // good stack by going backwards through them. + for (let i = tokens.tokens.length - 1; ; i--) { + while (scopeStack.length > 0 && scopeStack[scopeStack.length - 1].startTokenIndex === i + 1) { + scopeStack.pop(); + } + while (scopeIndex >= 0 && scopes[scopeIndex].endTokenIndex === i + 1) { + scopeStack.push(scopes[scopeIndex]); + scopeIndex--; + } + // Process scopes after the last iteration so we can make sure we pop all of them. + if (i < 0) { + break; + } + + const token = tokens.tokens[i]; + const name = tokens.identifierNameForToken(token); + if (scopeStack.length > 1 && token.type === tt.name && globalNames.has(name)) { + if (isBlockScopedDeclaration(token)) { + markShadowedForScope(scopeStack[scopeStack.length - 1], tokens, name); + } else if (isFunctionScopedDeclaration(token)) { + let stackIndex = scopeStack.length - 1; + while (stackIndex > 0 && !scopeStack[stackIndex].isFunctionScope) { + stackIndex--; + } + if (stackIndex < 0) { + throw new Error("Did not find parent function scope."); + } + markShadowedForScope(scopeStack[stackIndex], tokens, name); + } + } + } + if (scopeStack.length > 0) { + throw new Error("Expected empty scope stack after processing file."); + } +} + +function markShadowedForScope(scope, tokens, name) { + for (let i = scope.startTokenIndex; i < scope.endTokenIndex; i++) { + const token = tokens.tokens[i]; + if ( + (token.type === tt.name || token.type === tt.jsxName) && + tokens.identifierNameForToken(token) === name + ) { + token.shadowsGlobal = true; + } + } +} diff --git a/node_modules/sucrase/dist/esm/index.js b/node_modules/sucrase/dist/esm/index.js new file mode 100644 index 0000000..dbbef7f --- /dev/null +++ b/node_modules/sucrase/dist/esm/index.js @@ -0,0 +1,133 @@ +import CJSImportProcessor from "./CJSImportProcessor"; +import computeSourceMap, {} from "./computeSourceMap"; +import {HelperManager} from "./HelperManager"; +import identifyShadowedGlobals from "./identifyShadowedGlobals"; +import NameManager from "./NameManager"; +import {validateOptions} from "./Options"; +import {parse} from "./parser"; + +import TokenProcessor from "./TokenProcessor"; +import RootTransformer from "./transformers/RootTransformer"; +import formatTokens from "./util/formatTokens"; +import getTSImportedNames from "./util/getTSImportedNames"; + + + + + + + + + + + + + + + + + + + +export function getVersion() { + /* istanbul ignore next */ + return "3.32.0"; +} + +export function transform(code, options) { + validateOptions(options); + try { + const sucraseContext = getSucraseContext(code, options); + const transformer = new RootTransformer( + sucraseContext, + options.transforms, + Boolean(options.enableLegacyBabel5ModuleInterop), + options, + ); + const transformerResult = transformer.transform(); + let result = {code: transformerResult.code}; + if (options.sourceMapOptions) { + if (!options.filePath) { + throw new Error("filePath must be specified when generating a source map."); + } + result = { + ...result, + sourceMap: computeSourceMap( + transformerResult, + options.filePath, + options.sourceMapOptions, + code, + sucraseContext.tokenProcessor.tokens, + ), + }; + } + return result; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e) { + if (options.filePath) { + e.message = `Error transforming ${options.filePath}: ${e.message}`; + } + throw e; + } +} + +/** + * Return a string representation of the sucrase tokens, mostly useful for + * diagnostic purposes. + */ +export function getFormattedTokens(code, options) { + const tokens = getSucraseContext(code, options).tokenProcessor.tokens; + return formatTokens(code, tokens); +} + +/** + * Call into the parser/tokenizer and do some further preprocessing: + * - Come up with a set of used names so that we can assign new names. + * - Preprocess all import/export statements so we know which globals we are interested in. + * - Compute situations where any of those globals are shadowed. + * + * In the future, some of these preprocessing steps can be skipped based on what actual work is + * being done. + */ +function getSucraseContext(code, options) { + const isJSXEnabled = options.transforms.includes("jsx"); + const isTypeScriptEnabled = options.transforms.includes("typescript"); + const isFlowEnabled = options.transforms.includes("flow"); + const disableESTransforms = options.disableESTransforms === true; + const file = parse(code, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const tokens = file.tokens; + const scopes = file.scopes; + + const nameManager = new NameManager(code, tokens); + const helperManager = new HelperManager(nameManager); + const tokenProcessor = new TokenProcessor( + code, + tokens, + isFlowEnabled, + disableESTransforms, + helperManager, + ); + const enableLegacyTypeScriptModuleInterop = Boolean(options.enableLegacyTypeScriptModuleInterop); + + let importProcessor = null; + if (options.transforms.includes("imports")) { + importProcessor = new CJSImportProcessor( + nameManager, + tokenProcessor, + enableLegacyTypeScriptModuleInterop, + options, + options.transforms.includes("typescript"), + helperManager, + ); + importProcessor.preprocessTokens(); + // We need to mark shadowed globals after processing imports so we know that the globals are, + // but before type-only import pruning, since that relies on shadowing information. + identifyShadowedGlobals(tokenProcessor, scopes, importProcessor.getGlobalNames()); + if (options.transforms.includes("typescript")) { + importProcessor.pruneTypeOnlyImports(); + } + } else if (options.transforms.includes("typescript")) { + identifyShadowedGlobals(tokenProcessor, scopes, getTSImportedNames(tokenProcessor)); + } + return {tokenProcessor, scopes, nameManager, importProcessor, helperManager}; +} diff --git a/node_modules/sucrase/dist/esm/parser/index.js b/node_modules/sucrase/dist/esm/parser/index.js new file mode 100644 index 0000000..5074ae4 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/index.js @@ -0,0 +1,31 @@ + + +import {augmentError, initParser, state} from "./traverser/base"; +import {parseFile} from "./traverser/index"; + +export class File { + + + + constructor(tokens, scopes) { + this.tokens = tokens; + this.scopes = scopes; + } +} + +export function parse( + input, + isJSXEnabled, + isTypeScriptEnabled, + isFlowEnabled, +) { + if (isFlowEnabled && isTypeScriptEnabled) { + throw new Error("Cannot combine flow and typescript plugins."); + } + initParser(input, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const result = parseFile(); + if (state.error) { + throw augmentError(state.error); + } + return result; +} diff --git a/node_modules/sucrase/dist/esm/parser/plugins/flow.js b/node_modules/sucrase/dist/esm/parser/plugins/flow.js new file mode 100644 index 0000000..66295d1 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/plugins/flow.js @@ -0,0 +1,1105 @@ +/* eslint max-len: 0 */ + +import { + eat, + lookaheadType, + lookaheadTypeAndKeyword, + match, + next, + popTypeContext, + pushTypeContext, + +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {input, state} from "../traverser/base"; +import { + baseParseMaybeAssign, + baseParseSubscript, + baseParseSubscripts, + parseArrow, + parseArrowExpression, + parseCallExpressionArguments, + parseExprAtom, + parseExpression, + parseFunctionBody, + parseIdentifier, + parseLiteral, + +} from "../traverser/expression"; +import { + baseParseExportStar, + parseExport, + parseExportFrom, + parseExportSpecifiers, + parseFunctionParams, + parseImport, + parseStatement, +} from "../traverser/statement"; +import { + canInsertSemicolon, + eatContextual, + expect, + expectContextual, + isContextual, + isLookaheadContextual, + semicolon, + unexpected, +} from "../traverser/util"; + +function isMaybeDefaultImport(lookahead) { + return ( + (lookahead.type === tt.name || !!(lookahead.type & TokenType.IS_KEYWORD)) && + lookahead.contextualKeyword !== ContextualKeyword._from + ); +} + +function flowParseTypeInitialiser(tok) { + const oldIsType = pushTypeContext(0); + expect(tok || tt.colon); + flowParseType(); + popTypeContext(oldIsType); +} + +function flowParsePredicate() { + expect(tt.modulo); + expectContextual(ContextualKeyword._checks); + if (eat(tt.parenL)) { + parseExpression(); + expect(tt.parenR); + } +} + +function flowParseTypeAndPredicateInitialiser() { + const oldIsType = pushTypeContext(0); + expect(tt.colon); + if (match(tt.modulo)) { + flowParsePredicate(); + } else { + flowParseType(); + if (match(tt.modulo)) { + flowParsePredicate(); + } + } + popTypeContext(oldIsType); +} + +function flowParseDeclareClass() { + next(); + flowParseInterfaceish(/* isClass */ true); +} + +function flowParseDeclareFunction() { + next(); + parseIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + expect(tt.parenL); + flowParseFunctionTypeParams(); + expect(tt.parenR); + + flowParseTypeAndPredicateInitialiser(); + + semicolon(); +} + +function flowParseDeclare() { + if (match(tt._class)) { + flowParseDeclareClass(); + } else if (match(tt._function)) { + flowParseDeclareFunction(); + } else if (match(tt._var)) { + flowParseDeclareVariable(); + } else if (eatContextual(ContextualKeyword._module)) { + if (eat(tt.dot)) { + flowParseDeclareModuleExports(); + } else { + flowParseDeclareModule(); + } + } else if (isContextual(ContextualKeyword._type)) { + flowParseDeclareTypeAlias(); + } else if (isContextual(ContextualKeyword._opaque)) { + flowParseDeclareOpaqueType(); + } else if (isContextual(ContextualKeyword._interface)) { + flowParseDeclareInterface(); + } else if (match(tt._export)) { + flowParseDeclareExportDeclaration(); + } else { + unexpected(); + } +} + +function flowParseDeclareVariable() { + next(); + flowParseTypeAnnotatableIdentifier(); + semicolon(); +} + +function flowParseDeclareModule() { + if (match(tt.string)) { + parseExprAtom(); + } else { + parseIdentifier(); + } + + expect(tt.braceL); + while (!match(tt.braceR) && !state.error) { + if (match(tt._import)) { + next(); + parseImport(); + } else { + unexpected(); + } + } + expect(tt.braceR); +} + +function flowParseDeclareExportDeclaration() { + expect(tt._export); + + if (eat(tt._default)) { + if (match(tt._function) || match(tt._class)) { + // declare export default class ... + // declare export default function ... + flowParseDeclare(); + } else { + // declare export default [type]; + flowParseType(); + semicolon(); + } + } else if ( + match(tt._var) || // declare export var ... + match(tt._function) || // declare export function ... + match(tt._class) || // declare export class ... + isContextual(ContextualKeyword._opaque) // declare export opaque .. + ) { + flowParseDeclare(); + } else if ( + match(tt.star) || // declare export * from '' + match(tt.braceL) || // declare export {} ... + isContextual(ContextualKeyword._interface) || // declare export interface ... + isContextual(ContextualKeyword._type) || // declare export type ... + isContextual(ContextualKeyword._opaque) // declare export opaque type ... + ) { + parseExport(); + } else { + unexpected(); + } +} + +function flowParseDeclareModuleExports() { + expectContextual(ContextualKeyword._exports); + flowParseTypeAnnotation(); + semicolon(); +} + +function flowParseDeclareTypeAlias() { + next(); + flowParseTypeAlias(); +} + +function flowParseDeclareOpaqueType() { + next(); + flowParseOpaqueType(true); +} + +function flowParseDeclareInterface() { + next(); + flowParseInterfaceish(); +} + +// Interfaces + +function flowParseInterfaceish(isClass = false) { + flowParseRestrictedIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + if (eat(tt._extends)) { + do { + flowParseInterfaceExtends(); + } while (!isClass && eat(tt.comma)); + } + + if (isContextual(ContextualKeyword._mixins)) { + next(); + do { + flowParseInterfaceExtends(); + } while (eat(tt.comma)); + } + + if (isContextual(ContextualKeyword._implements)) { + next(); + do { + flowParseInterfaceExtends(); + } while (eat(tt.comma)); + } + + flowParseObjectType(isClass, false, isClass); +} + +function flowParseInterfaceExtends() { + flowParseQualifiedTypeIdentifier(false); + if (match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseInterface() { + flowParseInterfaceish(); +} + +function flowParseRestrictedIdentifier() { + parseIdentifier(); +} + +function flowParseTypeAlias() { + flowParseRestrictedIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + flowParseTypeInitialiser(tt.eq); + semicolon(); +} + +function flowParseOpaqueType(declare) { + expectContextual(ContextualKeyword._type); + flowParseRestrictedIdentifier(); + + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + // Parse the supertype + if (match(tt.colon)) { + flowParseTypeInitialiser(tt.colon); + } + + if (!declare) { + flowParseTypeInitialiser(tt.eq); + } + semicolon(); +} + +function flowParseTypeParameter() { + flowParseVariance(); + flowParseTypeAnnotatableIdentifier(); + + if (eat(tt.eq)) { + flowParseType(); + } +} + +export function flowParseTypeParameterDeclaration() { + const oldIsType = pushTypeContext(0); + // istanbul ignore else: this condition is already checked at all call sites + if (match(tt.lessThan) || match(tt.typeParameterStart)) { + next(); + } else { + unexpected(); + } + + do { + flowParseTypeParameter(); + if (!match(tt.greaterThan)) { + expect(tt.comma); + } + } while (!match(tt.greaterThan) && !state.error); + expect(tt.greaterThan); + popTypeContext(oldIsType); +} + +function flowParseTypeParameterInstantiation() { + const oldIsType = pushTypeContext(0); + expect(tt.lessThan); + while (!match(tt.greaterThan) && !state.error) { + flowParseType(); + if (!match(tt.greaterThan)) { + expect(tt.comma); + } + } + expect(tt.greaterThan); + popTypeContext(oldIsType); +} + +function flowParseInterfaceType() { + expectContextual(ContextualKeyword._interface); + if (eat(tt._extends)) { + do { + flowParseInterfaceExtends(); + } while (eat(tt.comma)); + } + flowParseObjectType(false, false, false); +} + +function flowParseObjectPropertyKey() { + if (match(tt.num) || match(tt.string)) { + parseExprAtom(); + } else { + parseIdentifier(); + } +} + +function flowParseObjectTypeIndexer() { + // Note: bracketL has already been consumed + if (lookaheadType() === tt.colon) { + flowParseObjectPropertyKey(); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } + expect(tt.bracketR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeInternalSlot() { + // Note: both bracketL have already been consumed + flowParseObjectPropertyKey(); + expect(tt.bracketR); + expect(tt.bracketR); + if (match(tt.lessThan) || match(tt.parenL)) { + flowParseObjectTypeMethodish(); + } else { + eat(tt.question); + flowParseTypeInitialiser(); + } +} + +function flowParseObjectTypeMethodish() { + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + expect(tt.parenL); + while (!match(tt.parenR) && !match(tt.ellipsis) && !state.error) { + flowParseFunctionTypeParam(); + if (!match(tt.parenR)) { + expect(tt.comma); + } + } + + if (eat(tt.ellipsis)) { + flowParseFunctionTypeParam(); + } + expect(tt.parenR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeCallProperty() { + flowParseObjectTypeMethodish(); +} + +function flowParseObjectType(allowStatic, allowExact, allowProto) { + let endDelim; + if (allowExact && match(tt.braceBarL)) { + expect(tt.braceBarL); + endDelim = tt.braceBarR; + } else { + expect(tt.braceL); + endDelim = tt.braceR; + } + + while (!match(endDelim) && !state.error) { + if (allowProto && isContextual(ContextualKeyword._proto)) { + const lookahead = lookaheadType(); + if (lookahead !== tt.colon && lookahead !== tt.question) { + next(); + allowStatic = false; + } + } + if (allowStatic && isContextual(ContextualKeyword._static)) { + const lookahead = lookaheadType(); + if (lookahead !== tt.colon && lookahead !== tt.question) { + next(); + } + } + + flowParseVariance(); + + if (eat(tt.bracketL)) { + if (eat(tt.bracketL)) { + flowParseObjectTypeInternalSlot(); + } else { + flowParseObjectTypeIndexer(); + } + } else if (match(tt.parenL) || match(tt.lessThan)) { + flowParseObjectTypeCallProperty(); + } else { + if (isContextual(ContextualKeyword._get) || isContextual(ContextualKeyword._set)) { + const lookahead = lookaheadType(); + if (lookahead === tt.name || lookahead === tt.string || lookahead === tt.num) { + next(); + } + } + + flowParseObjectTypeProperty(); + } + + flowObjectTypeSemicolon(); + } + + expect(endDelim); +} + +function flowParseObjectTypeProperty() { + if (match(tt.ellipsis)) { + expect(tt.ellipsis); + if (!eat(tt.comma)) { + eat(tt.semi); + } + // Explicit inexact object syntax. + if (match(tt.braceR)) { + return; + } + flowParseType(); + } else { + flowParseObjectPropertyKey(); + if (match(tt.lessThan) || match(tt.parenL)) { + // This is a method property + flowParseObjectTypeMethodish(); + } else { + eat(tt.question); + flowParseTypeInitialiser(); + } + } +} + +function flowObjectTypeSemicolon() { + if (!eat(tt.semi) && !eat(tt.comma) && !match(tt.braceR) && !match(tt.braceBarR)) { + unexpected(); + } +} + +function flowParseQualifiedTypeIdentifier(initialIdAlreadyParsed) { + if (!initialIdAlreadyParsed) { + parseIdentifier(); + } + while (eat(tt.dot)) { + parseIdentifier(); + } +} + +function flowParseGenericType() { + flowParseQualifiedTypeIdentifier(true); + if (match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseTypeofType() { + expect(tt._typeof); + flowParsePrimaryType(); +} + +function flowParseTupleType() { + expect(tt.bracketL); + // We allow trailing commas + while (state.pos < input.length && !match(tt.bracketR)) { + flowParseType(); + if (match(tt.bracketR)) { + break; + } + expect(tt.comma); + } + expect(tt.bracketR); +} + +function flowParseFunctionTypeParam() { + const lookahead = lookaheadType(); + if (lookahead === tt.colon || lookahead === tt.question) { + parseIdentifier(); + eat(tt.question); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } +} + +function flowParseFunctionTypeParams() { + while (!match(tt.parenR) && !match(tt.ellipsis) && !state.error) { + flowParseFunctionTypeParam(); + if (!match(tt.parenR)) { + expect(tt.comma); + } + } + if (eat(tt.ellipsis)) { + flowParseFunctionTypeParam(); + } +} + +// The parsing of types roughly parallels the parsing of expressions, and +// primary types are kind of like primary expressions...they're the +// primitives with which other types are constructed. +function flowParsePrimaryType() { + let isGroupedType = false; + const oldNoAnonFunctionType = state.noAnonFunctionType; + + switch (state.type) { + case tt.name: { + if (isContextual(ContextualKeyword._interface)) { + flowParseInterfaceType(); + return; + } + parseIdentifier(); + flowParseGenericType(); + return; + } + + case tt.braceL: + flowParseObjectType(false, false, false); + return; + + case tt.braceBarL: + flowParseObjectType(false, true, false); + return; + + case tt.bracketL: + flowParseTupleType(); + return; + + case tt.lessThan: + flowParseTypeParameterDeclaration(); + expect(tt.parenL); + flowParseFunctionTypeParams(); + expect(tt.parenR); + expect(tt.arrow); + flowParseType(); + return; + + case tt.parenL: + next(); + + // Check to see if this is actually a grouped type + if (!match(tt.parenR) && !match(tt.ellipsis)) { + if (match(tt.name)) { + const token = lookaheadType(); + isGroupedType = token !== tt.question && token !== tt.colon; + } else { + isGroupedType = true; + } + } + + if (isGroupedType) { + state.noAnonFunctionType = false; + flowParseType(); + state.noAnonFunctionType = oldNoAnonFunctionType; + + // A `,` or a `) =>` means this is an anonymous function type + if ( + state.noAnonFunctionType || + !(match(tt.comma) || (match(tt.parenR) && lookaheadType() === tt.arrow)) + ) { + expect(tt.parenR); + return; + } else { + // Eat a comma if there is one + eat(tt.comma); + } + } + + flowParseFunctionTypeParams(); + + expect(tt.parenR); + expect(tt.arrow); + flowParseType(); + return; + + case tt.minus: + next(); + parseLiteral(); + return; + + case tt.string: + case tt.num: + case tt._true: + case tt._false: + case tt._null: + case tt._this: + case tt._void: + case tt.star: + next(); + return; + + default: + if (state.type === tt._typeof) { + flowParseTypeofType(); + return; + } else if (state.type & TokenType.IS_KEYWORD) { + next(); + state.tokens[state.tokens.length - 1].type = tt.name; + return; + } + } + + unexpected(); +} + +function flowParsePostfixType() { + flowParsePrimaryType(); + while (!canInsertSemicolon() && (match(tt.bracketL) || match(tt.questionDot))) { + eat(tt.questionDot); + expect(tt.bracketL); + if (eat(tt.bracketR)) { + // Array type + } else { + // Indexed access type + flowParseType(); + expect(tt.bracketR); + } + } +} + +function flowParsePrefixType() { + if (eat(tt.question)) { + flowParsePrefixType(); + } else { + flowParsePostfixType(); + } +} + +function flowParseAnonFunctionWithoutParens() { + flowParsePrefixType(); + if (!state.noAnonFunctionType && eat(tt.arrow)) { + flowParseType(); + } +} + +function flowParseIntersectionType() { + eat(tt.bitwiseAND); + flowParseAnonFunctionWithoutParens(); + while (eat(tt.bitwiseAND)) { + flowParseAnonFunctionWithoutParens(); + } +} + +function flowParseUnionType() { + eat(tt.bitwiseOR); + flowParseIntersectionType(); + while (eat(tt.bitwiseOR)) { + flowParseIntersectionType(); + } +} + +function flowParseType() { + flowParseUnionType(); +} + +export function flowParseTypeAnnotation() { + flowParseTypeInitialiser(); +} + +function flowParseTypeAnnotatableIdentifier() { + parseIdentifier(); + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } +} + +export function flowParseVariance() { + if (match(tt.plus) || match(tt.minus)) { + next(); + state.tokens[state.tokens.length - 1].isType = true; + } +} + +// ================================== +// Overrides +// ================================== + +export function flowParseFunctionBodyAndFinish(funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (match(tt.colon)) { + flowParseTypeAndPredicateInitialiser(); + } + + parseFunctionBody(false, funcContextId); +} + +export function flowParseSubscript( + startTokenIndex, + noCalls, + stopState, +) { + if (match(tt.questionDot) && lookaheadType() === tt.lessThan) { + if (noCalls) { + stopState.stop = true; + return; + } + next(); + flowParseTypeParameterInstantiation(); + expect(tt.parenL); + parseCallExpressionArguments(); + return; + } else if (!noCalls && match(tt.lessThan)) { + const snapshot = state.snapshot(); + flowParseTypeParameterInstantiation(); + expect(tt.parenL); + parseCallExpressionArguments(); + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return; + } + } + baseParseSubscript(startTokenIndex, noCalls, stopState); +} + +export function flowStartParseNewArguments() { + if (match(tt.lessThan)) { + const snapshot = state.snapshot(); + flowParseTypeParameterInstantiation(); + if (state.error) { + state.restoreFromSnapshot(snapshot); + } + } +} + +// interfaces +export function flowTryParseStatement() { + if (match(tt.name) && state.contextualKeyword === ContextualKeyword._interface) { + const oldIsType = pushTypeContext(0); + next(); + flowParseInterface(); + popTypeContext(oldIsType); + return true; + } else if (isContextual(ContextualKeyword._enum)) { + flowParseEnumDeclaration(); + return true; + } + return false; +} + +export function flowTryParseExportDefaultExpression() { + if (isContextual(ContextualKeyword._enum)) { + flowParseEnumDeclaration(); + return true; + } + return false; +} + +// declares, interfaces and type aliases +export function flowParseIdentifierStatement(contextualKeyword) { + if (contextualKeyword === ContextualKeyword._declare) { + if ( + match(tt._class) || + match(tt.name) || + match(tt._function) || + match(tt._var) || + match(tt._export) + ) { + const oldIsType = pushTypeContext(1); + flowParseDeclare(); + popTypeContext(oldIsType); + } + } else if (match(tt.name)) { + if (contextualKeyword === ContextualKeyword._interface) { + const oldIsType = pushTypeContext(1); + flowParseInterface(); + popTypeContext(oldIsType); + } else if (contextualKeyword === ContextualKeyword._type) { + const oldIsType = pushTypeContext(1); + flowParseTypeAlias(); + popTypeContext(oldIsType); + } else if (contextualKeyword === ContextualKeyword._opaque) { + const oldIsType = pushTypeContext(1); + flowParseOpaqueType(false); + popTypeContext(oldIsType); + } + } + semicolon(); +} + +// export type +export function flowShouldParseExportDeclaration() { + return ( + isContextual(ContextualKeyword._type) || + isContextual(ContextualKeyword._interface) || + isContextual(ContextualKeyword._opaque) || + isContextual(ContextualKeyword._enum) + ); +} + +export function flowShouldDisallowExportDefaultSpecifier() { + return ( + match(tt.name) && + (state.contextualKeyword === ContextualKeyword._type || + state.contextualKeyword === ContextualKeyword._interface || + state.contextualKeyword === ContextualKeyword._opaque || + state.contextualKeyword === ContextualKeyword._enum) + ); +} + +export function flowParseExportDeclaration() { + if (isContextual(ContextualKeyword._type)) { + const oldIsType = pushTypeContext(1); + next(); + + if (match(tt.braceL)) { + // export type { foo, bar }; + parseExportSpecifiers(); + parseExportFrom(); + } else { + // export type Foo = Bar; + flowParseTypeAlias(); + } + popTypeContext(oldIsType); + } else if (isContextual(ContextualKeyword._opaque)) { + const oldIsType = pushTypeContext(1); + next(); + // export opaque type Foo = Bar; + flowParseOpaqueType(false); + popTypeContext(oldIsType); + } else if (isContextual(ContextualKeyword._interface)) { + const oldIsType = pushTypeContext(1); + next(); + flowParseInterface(); + popTypeContext(oldIsType); + } else { + parseStatement(true); + } +} + +export function flowShouldParseExportStar() { + return match(tt.star) || (isContextual(ContextualKeyword._type) && lookaheadType() === tt.star); +} + +export function flowParseExportStar() { + if (eatContextual(ContextualKeyword._type)) { + const oldIsType = pushTypeContext(2); + baseParseExportStar(); + popTypeContext(oldIsType); + } else { + baseParseExportStar(); + } +} + +// parse a the super class type parameters and implements +export function flowAfterParseClassSuper(hasSuper) { + if (hasSuper && match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } + if (isContextual(ContextualKeyword._implements)) { + const oldIsType = pushTypeContext(0); + next(); + state.tokens[state.tokens.length - 1].type = tt._implements; + do { + flowParseRestrictedIdentifier(); + if (match(tt.lessThan)) { + flowParseTypeParameterInstantiation(); + } + } while (eat(tt.comma)); + popTypeContext(oldIsType); + } +} + +// parse type parameters for object method shorthand +export function flowStartParseObjPropValue() { + // method shorthand + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + if (!match(tt.parenL)) unexpected(); + } +} + +export function flowParseAssignableListItemTypes() { + const oldIsType = pushTypeContext(0); + eat(tt.question); + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } + popTypeContext(oldIsType); +} + +// parse typeof and type imports +export function flowStartParseImportSpecifiers() { + if (match(tt._typeof) || isContextual(ContextualKeyword._type)) { + const lh = lookaheadTypeAndKeyword(); + if (isMaybeDefaultImport(lh) || lh.type === tt.braceL || lh.type === tt.star) { + next(); + } + } +} + +// parse import-type/typeof shorthand +export function flowParseImportSpecifier() { + const isTypeKeyword = + state.contextualKeyword === ContextualKeyword._type || state.type === tt._typeof; + if (isTypeKeyword) { + next(); + } else { + parseIdentifier(); + } + + if (isContextual(ContextualKeyword._as) && !isLookaheadContextual(ContextualKeyword._as)) { + parseIdentifier(); + if (isTypeKeyword && !match(tt.name) && !(state.type & TokenType.IS_KEYWORD)) { + // `import {type as ,` or `import {type as }` + } else { + // `import {type as foo` + parseIdentifier(); + } + } else { + if (isTypeKeyword && (match(tt.name) || !!(state.type & TokenType.IS_KEYWORD))) { + // `import {type foo` + parseIdentifier(); + } + if (eatContextual(ContextualKeyword._as)) { + parseIdentifier(); + } + } +} + +// parse function type parameters - function foo() {} +export function flowStartParseFunctionParams() { + // Originally this checked if the method is a getter/setter, but if it was, we'd crash soon + // anyway, so don't try to propagate that information. + if (match(tt.lessThan)) { + const oldIsType = pushTypeContext(0); + flowParseTypeParameterDeclaration(); + popTypeContext(oldIsType); + } +} + +// parse flow type annotations on variable declarator heads - let foo: string = bar +export function flowAfterParseVarHead() { + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } +} + +// parse the return type of an async arrow function - let foo = (async (): number => {}); +export function flowStartParseAsyncArrowFromCallExpression() { + if (match(tt.colon)) { + const oldNoAnonFunctionType = state.noAnonFunctionType; + state.noAnonFunctionType = true; + flowParseTypeAnnotation(); + state.noAnonFunctionType = oldNoAnonFunctionType; + } +} + +// We need to support type parameter declarations for arrow functions. This +// is tricky. There are three situations we need to handle +// +// 1. This is either JSX or an arrow function. We'll try JSX first. If that +// fails, we'll try an arrow function. If that fails, we'll throw the JSX +// error. +// 2. This is an arrow function. We'll parse the type parameter declaration, +// parse the rest, make sure the rest is an arrow function, and go from +// there +// 3. This is neither. Just call the super method +export function flowParseMaybeAssign(noIn, isWithinParens) { + if (match(tt.lessThan)) { + const snapshot = state.snapshot(); + let wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (state.error) { + state.restoreFromSnapshot(snapshot); + state.type = tt.typeParameterStart; + } else { + return wasArrow; + } + + const oldIsType = pushTypeContext(0); + flowParseTypeParameterDeclaration(); + popTypeContext(oldIsType); + wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (wasArrow) { + return true; + } + unexpected(); + } + + return baseParseMaybeAssign(noIn, isWithinParens); +} + +// handle return types for arrow functions +export function flowParseArrow() { + if (match(tt.colon)) { + const oldIsType = pushTypeContext(0); + const snapshot = state.snapshot(); + + const oldNoAnonFunctionType = state.noAnonFunctionType; + state.noAnonFunctionType = true; + flowParseTypeAndPredicateInitialiser(); + state.noAnonFunctionType = oldNoAnonFunctionType; + + if (canInsertSemicolon()) unexpected(); + if (!match(tt.arrow)) unexpected(); + + if (state.error) { + state.restoreFromSnapshot(snapshot); + } + popTypeContext(oldIsType); + } + return eat(tt.arrow); +} + +export function flowParseSubscripts(startTokenIndex, noCalls = false) { + if ( + state.tokens[state.tokens.length - 1].contextualKeyword === ContextualKeyword._async && + match(tt.lessThan) + ) { + const snapshot = state.snapshot(); + const wasArrow = parseAsyncArrowWithTypeParameters(); + if (wasArrow && !state.error) { + return; + } + state.restoreFromSnapshot(snapshot); + } + + baseParseSubscripts(startTokenIndex, noCalls); +} + +// Returns true if there was an arrow function here. +function parseAsyncArrowWithTypeParameters() { + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + parseFunctionParams(); + if (!parseArrow()) { + return false; + } + parseArrowExpression(startTokenIndex); + return true; +} + +function flowParseEnumDeclaration() { + expectContextual(ContextualKeyword._enum); + state.tokens[state.tokens.length - 1].type = tt._enum; + parseIdentifier(); + flowParseEnumBody(); +} + +function flowParseEnumBody() { + if (eatContextual(ContextualKeyword._of)) { + next(); + } + expect(tt.braceL); + flowParseEnumMembers(); + expect(tt.braceR); +} + +function flowParseEnumMembers() { + while (!match(tt.braceR) && !state.error) { + if (eat(tt.ellipsis)) { + break; + } + flowParseEnumMember(); + if (!match(tt.braceR)) { + expect(tt.comma); + } + } +} + +function flowParseEnumMember() { + parseIdentifier(); + if (eat(tt.eq)) { + // Flow enum values are always just one token (a string, number, or boolean literal). + next(); + } +} diff --git a/node_modules/sucrase/dist/esm/parser/plugins/jsx/index.js b/node_modules/sucrase/dist/esm/parser/plugins/jsx/index.js new file mode 100644 index 0000000..83f3983 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/plugins/jsx/index.js @@ -0,0 +1,367 @@ +import { + eat, + finishToken, + getTokenFromCode, + IdentifierRole, + JSXRole, + match, + next, + skipSpace, + Token, +} from "../../tokenizer/index"; +import {TokenType as tt} from "../../tokenizer/types"; +import {input, isTypeScriptEnabled, state} from "../../traverser/base"; +import {parseExpression, parseMaybeAssign} from "../../traverser/expression"; +import {expect, unexpected} from "../../traverser/util"; +import {charCodes} from "../../util/charcodes"; +import {IS_IDENTIFIER_CHAR, IS_IDENTIFIER_START} from "../../util/identifier"; +import {tsTryParseJSXTypeArgument} from "../typescript"; + +/** + * Read token with JSX contents. + * + * In addition to detecting jsxTagStart and also regular tokens that might be + * part of an expression, this code detects the start and end of text ranges + * within JSX children. In order to properly count the number of children, we + * distinguish jsxText from jsxEmptyText, which is a text range that simplifies + * to the empty string after JSX whitespace trimming. + * + * It turns out that a JSX text range will simplify to the empty string if and + * only if both of these conditions hold: + * - The range consists entirely of whitespace characters (only counting space, + * tab, \r, and \n). + * - The range has at least one newline. + * This can be proven by analyzing any implementation of whitespace trimming, + * e.g. formatJSXTextLiteral in Sucrase or cleanJSXElementLiteralChild in Babel. + */ +function jsxReadToken() { + let sawNewline = false; + let sawNonWhitespace = false; + while (true) { + if (state.pos >= input.length) { + unexpected("Unterminated JSX contents"); + return; + } + + const ch = input.charCodeAt(state.pos); + if (ch === charCodes.lessThan || ch === charCodes.leftCurlyBrace) { + if (state.pos === state.start) { + if (ch === charCodes.lessThan) { + state.pos++; + finishToken(tt.jsxTagStart); + return; + } + getTokenFromCode(ch); + return; + } + if (sawNewline && !sawNonWhitespace) { + finishToken(tt.jsxEmptyText); + } else { + finishToken(tt.jsxText); + } + return; + } + + // This is part of JSX text. + if (ch === charCodes.lineFeed) { + sawNewline = true; + } else if (ch !== charCodes.space && ch !== charCodes.carriageReturn && ch !== charCodes.tab) { + sawNonWhitespace = true; + } + state.pos++; + } +} + +function jsxReadString(quote) { + state.pos++; + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated string constant"); + return; + } + + const ch = input.charCodeAt(state.pos); + if (ch === quote) { + state.pos++; + break; + } + state.pos++; + } + finishToken(tt.string); +} + +// Read a JSX identifier (valid tag or attribute name). +// +// Optimized version since JSX identifiers can't contain +// escape characters and so can be read as single slice. +// Also assumes that first character was already checked +// by isIdentifierStart in readToken. + +function jsxReadWord() { + let ch; + do { + if (state.pos > input.length) { + unexpected("Unexpectedly reached the end of input."); + return; + } + ch = input.charCodeAt(++state.pos); + } while (IS_IDENTIFIER_CHAR[ch] || ch === charCodes.dash); + finishToken(tt.jsxName); +} + +// Parse next token as JSX identifier +function jsxParseIdentifier() { + nextJSXTagToken(); +} + +// Parse namespaced identifier. +function jsxParseNamespacedName(identifierRole) { + jsxParseIdentifier(); + if (!eat(tt.colon)) { + // Plain identifier, so this is an access. + state.tokens[state.tokens.length - 1].identifierRole = identifierRole; + return; + } + // Process the second half of the namespaced name. + jsxParseIdentifier(); +} + +// Parses element name in any form - namespaced, member +// or single identifier. +function jsxParseElementName() { + const firstTokenIndex = state.tokens.length; + jsxParseNamespacedName(IdentifierRole.Access); + let hadDot = false; + while (match(tt.dot)) { + hadDot = true; + nextJSXTagToken(); + jsxParseIdentifier(); + } + // For tags like
with a lowercase letter and no dots, the name is + // actually *not* an identifier access, since it's referring to a built-in + // tag name. Remove the identifier role in this case so that it's not + // accidentally transformed by the imports transform when preserving JSX. + if (!hadDot) { + const firstToken = state.tokens[firstTokenIndex]; + const firstChar = input.charCodeAt(firstToken.start); + if (firstChar >= charCodes.lowercaseA && firstChar <= charCodes.lowercaseZ) { + firstToken.identifierRole = null; + } + } +} + +// Parses any type of JSX attribute value. +function jsxParseAttributeValue() { + switch (state.type) { + case tt.braceL: + next(); + parseExpression(); + nextJSXTagToken(); + return; + + case tt.jsxTagStart: + jsxParseElement(); + nextJSXTagToken(); + return; + + case tt.string: + nextJSXTagToken(); + return; + + default: + unexpected("JSX value should be either an expression or a quoted JSX text"); + } +} + +// Parse JSX spread child, after already processing the { +// Does not parse the closing } +function jsxParseSpreadChild() { + expect(tt.ellipsis); + parseExpression(); +} + +// Parses JSX opening tag starting after "<". +// Returns true if the tag was self-closing. +// Does not parse the last token. +function jsxParseOpeningElement(initialTokenIndex) { + if (match(tt.jsxTagEnd)) { + // This is an open-fragment. + return false; + } + jsxParseElementName(); + if (isTypeScriptEnabled) { + tsTryParseJSXTypeArgument(); + } + let hasSeenPropSpread = false; + while (!match(tt.slash) && !match(tt.jsxTagEnd) && !state.error) { + if (eat(tt.braceL)) { + hasSeenPropSpread = true; + expect(tt.ellipsis); + parseMaybeAssign(); + // } + nextJSXTagToken(); + continue; + } + if ( + hasSeenPropSpread && + state.end - state.start === 3 && + input.charCodeAt(state.start) === charCodes.lowercaseK && + input.charCodeAt(state.start + 1) === charCodes.lowercaseE && + input.charCodeAt(state.start + 2) === charCodes.lowercaseY + ) { + state.tokens[initialTokenIndex].jsxRole = JSXRole.KeyAfterPropSpread; + } + jsxParseNamespacedName(IdentifierRole.ObjectKey); + if (match(tt.eq)) { + nextJSXTagToken(); + jsxParseAttributeValue(); + } + } + const isSelfClosing = match(tt.slash); + if (isSelfClosing) { + // / + nextJSXTagToken(); + } + return isSelfClosing; +} + +// Parses JSX closing tag starting after " 1) { + state.tokens[initialTokenIndex].jsxRole = JSXRole.StaticChildren; + } + } + return; + } + numExplicitChildren++; + jsxParseElementAt(); + nextJSXExprToken(); + break; + + case tt.jsxText: + numExplicitChildren++; + nextJSXExprToken(); + break; + + case tt.jsxEmptyText: + nextJSXExprToken(); + break; + + case tt.braceL: + next(); + if (match(tt.ellipsis)) { + jsxParseSpreadChild(); + nextJSXExprToken(); + // Spread children are a mechanism to explicitly mark children as + // static, so count it as 2 children to satisfy the "more than one + // child" condition. + numExplicitChildren += 2; + } else { + // If we see {}, this is an empty pseudo-expression that doesn't + // count as a child. + if (!match(tt.braceR)) { + numExplicitChildren++; + parseExpression(); + } + nextJSXExprToken(); + } + + break; + + // istanbul ignore next - should never happen + default: + unexpected(); + return; + } + } + } +} + +// Parses entire JSX element from current position. +// Does not parse the last token. +export function jsxParseElement() { + nextJSXTagToken(); + jsxParseElementAt(); +} + +// ================================== +// Overrides +// ================================== + +export function nextJSXTagToken() { + state.tokens.push(new Token()); + skipSpace(); + state.start = state.pos; + const code = input.charCodeAt(state.pos); + + if (IS_IDENTIFIER_START[code]) { + jsxReadWord(); + } else if (code === charCodes.quotationMark || code === charCodes.apostrophe) { + jsxReadString(code); + } else { + // The following tokens are just one character each. + ++state.pos; + switch (code) { + case charCodes.greaterThan: + finishToken(tt.jsxTagEnd); + break; + case charCodes.lessThan: + finishToken(tt.jsxTagStart); + break; + case charCodes.slash: + finishToken(tt.slash); + break; + case charCodes.equalsTo: + finishToken(tt.eq); + break; + case charCodes.leftCurlyBrace: + finishToken(tt.braceL); + break; + case charCodes.dot: + finishToken(tt.dot); + break; + case charCodes.colon: + finishToken(tt.colon); + break; + default: + unexpected(); + } + } +} + +function nextJSXExprToken() { + state.tokens.push(new Token()); + state.start = state.pos; + jsxReadToken(); +} diff --git a/node_modules/sucrase/dist/esm/parser/plugins/jsx/xhtml.js b/node_modules/sucrase/dist/esm/parser/plugins/jsx/xhtml.js new file mode 100644 index 0000000..c6a0741 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/plugins/jsx/xhtml.js @@ -0,0 +1,256 @@ +// Use a Map rather than object to avoid unexpected __proto__ access. +export default new Map([ + ["quot", "\u0022"], + ["amp", "&"], + ["apos", "\u0027"], + ["lt", "<"], + ["gt", ">"], + ["nbsp", "\u00A0"], + ["iexcl", "\u00A1"], + ["cent", "\u00A2"], + ["pound", "\u00A3"], + ["curren", "\u00A4"], + ["yen", "\u00A5"], + ["brvbar", "\u00A6"], + ["sect", "\u00A7"], + ["uml", "\u00A8"], + ["copy", "\u00A9"], + ["ordf", "\u00AA"], + ["laquo", "\u00AB"], + ["not", "\u00AC"], + ["shy", "\u00AD"], + ["reg", "\u00AE"], + ["macr", "\u00AF"], + ["deg", "\u00B0"], + ["plusmn", "\u00B1"], + ["sup2", "\u00B2"], + ["sup3", "\u00B3"], + ["acute", "\u00B4"], + ["micro", "\u00B5"], + ["para", "\u00B6"], + ["middot", "\u00B7"], + ["cedil", "\u00B8"], + ["sup1", "\u00B9"], + ["ordm", "\u00BA"], + ["raquo", "\u00BB"], + ["frac14", "\u00BC"], + ["frac12", "\u00BD"], + ["frac34", "\u00BE"], + ["iquest", "\u00BF"], + ["Agrave", "\u00C0"], + ["Aacute", "\u00C1"], + ["Acirc", "\u00C2"], + ["Atilde", "\u00C3"], + ["Auml", "\u00C4"], + ["Aring", "\u00C5"], + ["AElig", "\u00C6"], + ["Ccedil", "\u00C7"], + ["Egrave", "\u00C8"], + ["Eacute", "\u00C9"], + ["Ecirc", "\u00CA"], + ["Euml", "\u00CB"], + ["Igrave", "\u00CC"], + ["Iacute", "\u00CD"], + ["Icirc", "\u00CE"], + ["Iuml", "\u00CF"], + ["ETH", "\u00D0"], + ["Ntilde", "\u00D1"], + ["Ograve", "\u00D2"], + ["Oacute", "\u00D3"], + ["Ocirc", "\u00D4"], + ["Otilde", "\u00D5"], + ["Ouml", "\u00D6"], + ["times", "\u00D7"], + ["Oslash", "\u00D8"], + ["Ugrave", "\u00D9"], + ["Uacute", "\u00DA"], + ["Ucirc", "\u00DB"], + ["Uuml", "\u00DC"], + ["Yacute", "\u00DD"], + ["THORN", "\u00DE"], + ["szlig", "\u00DF"], + ["agrave", "\u00E0"], + ["aacute", "\u00E1"], + ["acirc", "\u00E2"], + ["atilde", "\u00E3"], + ["auml", "\u00E4"], + ["aring", "\u00E5"], + ["aelig", "\u00E6"], + ["ccedil", "\u00E7"], + ["egrave", "\u00E8"], + ["eacute", "\u00E9"], + ["ecirc", "\u00EA"], + ["euml", "\u00EB"], + ["igrave", "\u00EC"], + ["iacute", "\u00ED"], + ["icirc", "\u00EE"], + ["iuml", "\u00EF"], + ["eth", "\u00F0"], + ["ntilde", "\u00F1"], + ["ograve", "\u00F2"], + ["oacute", "\u00F3"], + ["ocirc", "\u00F4"], + ["otilde", "\u00F5"], + ["ouml", "\u00F6"], + ["divide", "\u00F7"], + ["oslash", "\u00F8"], + ["ugrave", "\u00F9"], + ["uacute", "\u00FA"], + ["ucirc", "\u00FB"], + ["uuml", "\u00FC"], + ["yacute", "\u00FD"], + ["thorn", "\u00FE"], + ["yuml", "\u00FF"], + ["OElig", "\u0152"], + ["oelig", "\u0153"], + ["Scaron", "\u0160"], + ["scaron", "\u0161"], + ["Yuml", "\u0178"], + ["fnof", "\u0192"], + ["circ", "\u02C6"], + ["tilde", "\u02DC"], + ["Alpha", "\u0391"], + ["Beta", "\u0392"], + ["Gamma", "\u0393"], + ["Delta", "\u0394"], + ["Epsilon", "\u0395"], + ["Zeta", "\u0396"], + ["Eta", "\u0397"], + ["Theta", "\u0398"], + ["Iota", "\u0399"], + ["Kappa", "\u039A"], + ["Lambda", "\u039B"], + ["Mu", "\u039C"], + ["Nu", "\u039D"], + ["Xi", "\u039E"], + ["Omicron", "\u039F"], + ["Pi", "\u03A0"], + ["Rho", "\u03A1"], + ["Sigma", "\u03A3"], + ["Tau", "\u03A4"], + ["Upsilon", "\u03A5"], + ["Phi", "\u03A6"], + ["Chi", "\u03A7"], + ["Psi", "\u03A8"], + ["Omega", "\u03A9"], + ["alpha", "\u03B1"], + ["beta", "\u03B2"], + ["gamma", "\u03B3"], + ["delta", "\u03B4"], + ["epsilon", "\u03B5"], + ["zeta", "\u03B6"], + ["eta", "\u03B7"], + ["theta", "\u03B8"], + ["iota", "\u03B9"], + ["kappa", "\u03BA"], + ["lambda", "\u03BB"], + ["mu", "\u03BC"], + ["nu", "\u03BD"], + ["xi", "\u03BE"], + ["omicron", "\u03BF"], + ["pi", "\u03C0"], + ["rho", "\u03C1"], + ["sigmaf", "\u03C2"], + ["sigma", "\u03C3"], + ["tau", "\u03C4"], + ["upsilon", "\u03C5"], + ["phi", "\u03C6"], + ["chi", "\u03C7"], + ["psi", "\u03C8"], + ["omega", "\u03C9"], + ["thetasym", "\u03D1"], + ["upsih", "\u03D2"], + ["piv", "\u03D6"], + ["ensp", "\u2002"], + ["emsp", "\u2003"], + ["thinsp", "\u2009"], + ["zwnj", "\u200C"], + ["zwj", "\u200D"], + ["lrm", "\u200E"], + ["rlm", "\u200F"], + ["ndash", "\u2013"], + ["mdash", "\u2014"], + ["lsquo", "\u2018"], + ["rsquo", "\u2019"], + ["sbquo", "\u201A"], + ["ldquo", "\u201C"], + ["rdquo", "\u201D"], + ["bdquo", "\u201E"], + ["dagger", "\u2020"], + ["Dagger", "\u2021"], + ["bull", "\u2022"], + ["hellip", "\u2026"], + ["permil", "\u2030"], + ["prime", "\u2032"], + ["Prime", "\u2033"], + ["lsaquo", "\u2039"], + ["rsaquo", "\u203A"], + ["oline", "\u203E"], + ["frasl", "\u2044"], + ["euro", "\u20AC"], + ["image", "\u2111"], + ["weierp", "\u2118"], + ["real", "\u211C"], + ["trade", "\u2122"], + ["alefsym", "\u2135"], + ["larr", "\u2190"], + ["uarr", "\u2191"], + ["rarr", "\u2192"], + ["darr", "\u2193"], + ["harr", "\u2194"], + ["crarr", "\u21B5"], + ["lArr", "\u21D0"], + ["uArr", "\u21D1"], + ["rArr", "\u21D2"], + ["dArr", "\u21D3"], + ["hArr", "\u21D4"], + ["forall", "\u2200"], + ["part", "\u2202"], + ["exist", "\u2203"], + ["empty", "\u2205"], + ["nabla", "\u2207"], + ["isin", "\u2208"], + ["notin", "\u2209"], + ["ni", "\u220B"], + ["prod", "\u220F"], + ["sum", "\u2211"], + ["minus", "\u2212"], + ["lowast", "\u2217"], + ["radic", "\u221A"], + ["prop", "\u221D"], + ["infin", "\u221E"], + ["ang", "\u2220"], + ["and", "\u2227"], + ["or", "\u2228"], + ["cap", "\u2229"], + ["cup", "\u222A"], + ["int", "\u222B"], + ["there4", "\u2234"], + ["sim", "\u223C"], + ["cong", "\u2245"], + ["asymp", "\u2248"], + ["ne", "\u2260"], + ["equiv", "\u2261"], + ["le", "\u2264"], + ["ge", "\u2265"], + ["sub", "\u2282"], + ["sup", "\u2283"], + ["nsub", "\u2284"], + ["sube", "\u2286"], + ["supe", "\u2287"], + ["oplus", "\u2295"], + ["otimes", "\u2297"], + ["perp", "\u22A5"], + ["sdot", "\u22C5"], + ["lceil", "\u2308"], + ["rceil", "\u2309"], + ["lfloor", "\u230A"], + ["rfloor", "\u230B"], + ["lang", "\u2329"], + ["rang", "\u232A"], + ["loz", "\u25CA"], + ["spades", "\u2660"], + ["clubs", "\u2663"], + ["hearts", "\u2665"], + ["diams", "\u2666"], +]); diff --git a/node_modules/sucrase/dist/esm/parser/plugins/types.js b/node_modules/sucrase/dist/esm/parser/plugins/types.js new file mode 100644 index 0000000..78e4af4 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/plugins/types.js @@ -0,0 +1,37 @@ +import {eatTypeToken, lookaheadType, match} from "../tokenizer/index"; +import {TokenType as tt} from "../tokenizer/types"; +import {isFlowEnabled, isTypeScriptEnabled} from "../traverser/base"; +import {baseParseConditional} from "../traverser/expression"; +import {flowParseTypeAnnotation} from "./flow"; +import {tsParseTypeAnnotation} from "./typescript"; + +/** + * Common parser code for TypeScript and Flow. + */ + +// An apparent conditional expression could actually be an optional parameter in an arrow function. +export function typedParseConditional(noIn) { + // If we see ?:, this can't possibly be a valid conditional. typedParseParenItem will be called + // later to finish off the arrow parameter. We also need to handle bare ? tokens for optional + // parameters without type annotations, i.e. ?, and ?) . + if (match(tt.question)) { + const nextType = lookaheadType(); + if (nextType === tt.colon || nextType === tt.comma || nextType === tt.parenR) { + return; + } + } + baseParseConditional(noIn); +} + +// Note: These "type casts" are *not* valid TS expressions. +// But we parse them here and change them when completing the arrow function. +export function typedParseParenItem() { + eatTypeToken(tt.question); + if (match(tt.colon)) { + if (isTypeScriptEnabled) { + tsParseTypeAnnotation(); + } else if (isFlowEnabled) { + flowParseTypeAnnotation(); + } + } +} diff --git a/node_modules/sucrase/dist/esm/parser/plugins/typescript.js b/node_modules/sucrase/dist/esm/parser/plugins/typescript.js new file mode 100644 index 0000000..ca873c0 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/plugins/typescript.js @@ -0,0 +1,1616 @@ +import { + eat, + finishToken, + IdentifierRole, + lookaheadType, + lookaheadTypeAndKeyword, + match, + next, + nextTemplateToken, + popTypeContext, + pushTypeContext, +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {isJSXEnabled, state} from "../traverser/base"; +import { + atPossibleAsync, + baseParseMaybeAssign, + baseParseSubscript, + parseCallExpressionArguments, + parseExprAtom, + parseExpression, + parseFunctionBody, + parseIdentifier, + parseLiteral, + parseMaybeAssign, + parseMaybeUnary, + parsePropertyName, + parseTemplate, + +} from "../traverser/expression"; +import {parseBindingIdentifier, parseBindingList, parseImportedIdentifier} from "../traverser/lval"; +import { + baseParseMaybeDecoratorArguments, + parseBlockBody, + parseClass, + parseFunction, + parseFunctionParams, + parseStatement, + parseVarStatement, +} from "../traverser/statement"; +import { + canInsertSemicolon, + eatContextual, + expect, + expectContextual, + hasPrecedingLineBreak, + isContextual, + isLineTerminator, + isLookaheadContextual, + semicolon, + unexpected, +} from "../traverser/util"; +import {nextJSXTagToken} from "./jsx"; + +function tsIsIdentifier() { + // TODO: actually a bit more complex in TypeScript, but shouldn't matter. + // See https://github.com/Microsoft/TypeScript/issues/15008 + return match(tt.name); +} + +function isLiteralPropertyName() { + return ( + match(tt.name) || + Boolean(state.type & TokenType.IS_KEYWORD) || + match(tt.string) || + match(tt.num) || + match(tt.bigint) || + match(tt.decimal) + ); +} + +function tsNextTokenCanFollowModifier() { + // Note: TypeScript's implementation is much more complicated because + // more things are considered modifiers there. + // This implementation only handles modifiers not handled by babylon itself. And "static". + // TODO: Would be nice to avoid lookahead. Want a hasLineBreakUpNext() method... + const snapshot = state.snapshot(); + + next(); + const canFollowModifier = + (match(tt.bracketL) || + match(tt.braceL) || + match(tt.star) || + match(tt.ellipsis) || + match(tt.hash) || + isLiteralPropertyName()) && + !hasPrecedingLineBreak(); + + if (canFollowModifier) { + return true; + } else { + state.restoreFromSnapshot(snapshot); + return false; + } +} + +export function tsParseModifiers(allowedModifiers) { + while (true) { + const modifier = tsParseModifier(allowedModifiers); + if (modifier === null) { + break; + } + } +} + +/** Parses a modifier matching one the given modifier names. */ +export function tsParseModifier( + allowedModifiers, +) { + if (!match(tt.name)) { + return null; + } + + const modifier = state.contextualKeyword; + if (allowedModifiers.indexOf(modifier) !== -1 && tsNextTokenCanFollowModifier()) { + switch (modifier) { + case ContextualKeyword._readonly: + state.tokens[state.tokens.length - 1].type = tt._readonly; + break; + case ContextualKeyword._abstract: + state.tokens[state.tokens.length - 1].type = tt._abstract; + break; + case ContextualKeyword._static: + state.tokens[state.tokens.length - 1].type = tt._static; + break; + case ContextualKeyword._public: + state.tokens[state.tokens.length - 1].type = tt._public; + break; + case ContextualKeyword._private: + state.tokens[state.tokens.length - 1].type = tt._private; + break; + case ContextualKeyword._protected: + state.tokens[state.tokens.length - 1].type = tt._protected; + break; + case ContextualKeyword._override: + state.tokens[state.tokens.length - 1].type = tt._override; + break; + case ContextualKeyword._declare: + state.tokens[state.tokens.length - 1].type = tt._declare; + break; + default: + break; + } + return modifier; + } + return null; +} + +function tsParseEntityName() { + parseIdentifier(); + while (eat(tt.dot)) { + parseIdentifier(); + } +} + +function tsParseTypeReference() { + tsParseEntityName(); + if (!hasPrecedingLineBreak() && match(tt.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseThisTypePredicate() { + next(); + tsParseTypeAnnotation(); +} + +function tsParseThisTypeNode() { + next(); +} + +function tsParseTypeQuery() { + expect(tt._typeof); + if (match(tt._import)) { + tsParseImportType(); + } else { + tsParseEntityName(); + } + if (!hasPrecedingLineBreak() && match(tt.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseImportType() { + expect(tt._import); + expect(tt.parenL); + expect(tt.string); + expect(tt.parenR); + if (eat(tt.dot)) { + tsParseEntityName(); + } + if (match(tt.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseTypeParameter() { + eat(tt._const); + const hadIn = eat(tt._in); + const hadOut = eatContextual(ContextualKeyword._out); + eat(tt._const); + if ((hadIn || hadOut) && !match(tt.name)) { + // The "in" or "out" keyword must have actually been the type parameter + // name, so set it as the name. + state.tokens[state.tokens.length - 1].type = tt.name; + } else { + parseIdentifier(); + } + + if (eat(tt._extends)) { + tsParseType(); + } + if (eat(tt.eq)) { + tsParseType(); + } +} + +export function tsTryParseTypeParameters() { + if (match(tt.lessThan)) { + tsParseTypeParameters(); + } +} + +function tsParseTypeParameters() { + const oldIsType = pushTypeContext(0); + if (match(tt.lessThan) || match(tt.typeParameterStart)) { + next(); + } else { + unexpected(); + } + + while (!eat(tt.greaterThan) && !state.error) { + tsParseTypeParameter(); + eat(tt.comma); + } + popTypeContext(oldIsType); +} + +// Note: In TypeScript implementation we must provide `yieldContext` and `awaitContext`, +// but here it's always false, because this is only used for types. +function tsFillSignature(returnToken) { + // Arrow fns *must* have return token (`=>`). Normal functions can omit it. + const returnTokenRequired = returnToken === tt.arrow; + tsTryParseTypeParameters(); + expect(tt.parenL); + // Create a scope even though we're doing type parsing so we don't accidentally + // treat params as top-level bindings. + state.scopeDepth++; + tsParseBindingListForSignature(false /* isBlockScope */); + state.scopeDepth--; + if (returnTokenRequired) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } else if (match(returnToken)) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } +} + +function tsParseBindingListForSignature(isBlockScope) { + parseBindingList(tt.parenR, isBlockScope); +} + +function tsParseTypeMemberSemicolon() { + if (!eat(tt.comma)) { + semicolon(); + } +} + +function tsParseSignatureMember() { + tsFillSignature(tt.colon); + tsParseTypeMemberSemicolon(); +} + +function tsIsUnambiguouslyIndexSignature() { + const snapshot = state.snapshot(); + next(); // Skip '{' + const isIndexSignature = eat(tt.name) && match(tt.colon); + state.restoreFromSnapshot(snapshot); + return isIndexSignature; +} + +function tsTryParseIndexSignature() { + if (!(match(tt.bracketL) && tsIsUnambiguouslyIndexSignature())) { + return false; + } + + const oldIsType = pushTypeContext(0); + + expect(tt.bracketL); + parseIdentifier(); + tsParseTypeAnnotation(); + expect(tt.bracketR); + + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + + popTypeContext(oldIsType); + return true; +} + +function tsParsePropertyOrMethodSignature(isReadonly) { + eat(tt.question); + + if (!isReadonly && (match(tt.parenL) || match(tt.lessThan))) { + tsFillSignature(tt.colon); + tsParseTypeMemberSemicolon(); + } else { + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + } +} + +function tsParseTypeMember() { + if (match(tt.parenL) || match(tt.lessThan)) { + // call signature + tsParseSignatureMember(); + return; + } + if (match(tt._new)) { + next(); + if (match(tt.parenL) || match(tt.lessThan)) { + // constructor signature + tsParseSignatureMember(); + } else { + tsParsePropertyOrMethodSignature(false); + } + return; + } + const readonly = !!tsParseModifier([ContextualKeyword._readonly]); + + const found = tsTryParseIndexSignature(); + if (found) { + return; + } + if ( + (isContextual(ContextualKeyword._get) || isContextual(ContextualKeyword._set)) && + tsNextTokenCanFollowModifier() + ) { + // This is a getter/setter on a type. The tsNextTokenCanFollowModifier + // function already called next() for us, so continue parsing the name. + } + parsePropertyName(-1 /* Types don't need context IDs. */); + tsParsePropertyOrMethodSignature(readonly); +} + +function tsParseTypeLiteral() { + tsParseObjectTypeMembers(); +} + +function tsParseObjectTypeMembers() { + expect(tt.braceL); + while (!eat(tt.braceR) && !state.error) { + tsParseTypeMember(); + } +} + +function tsLookaheadIsStartOfMappedType() { + const snapshot = state.snapshot(); + const isStartOfMappedType = tsIsStartOfMappedType(); + state.restoreFromSnapshot(snapshot); + return isStartOfMappedType; +} + +function tsIsStartOfMappedType() { + next(); + if (eat(tt.plus) || eat(tt.minus)) { + return isContextual(ContextualKeyword._readonly); + } + if (isContextual(ContextualKeyword._readonly)) { + next(); + } + if (!match(tt.bracketL)) { + return false; + } + next(); + if (!tsIsIdentifier()) { + return false; + } + next(); + return match(tt._in); +} + +function tsParseMappedTypeParameter() { + parseIdentifier(); + expect(tt._in); + tsParseType(); +} + +function tsParseMappedType() { + expect(tt.braceL); + if (match(tt.plus) || match(tt.minus)) { + next(); + expectContextual(ContextualKeyword._readonly); + } else { + eatContextual(ContextualKeyword._readonly); + } + expect(tt.bracketL); + tsParseMappedTypeParameter(); + if (eatContextual(ContextualKeyword._as)) { + tsParseType(); + } + expect(tt.bracketR); + if (match(tt.plus) || match(tt.minus)) { + next(); + expect(tt.question); + } else { + eat(tt.question); + } + tsTryParseType(); + semicolon(); + expect(tt.braceR); +} + +function tsParseTupleType() { + expect(tt.bracketL); + while (!eat(tt.bracketR) && !state.error) { + // Do not validate presence of either none or only labeled elements + tsParseTupleElementType(); + eat(tt.comma); + } +} + +function tsParseTupleElementType() { + // parses `...TsType[]` + if (eat(tt.ellipsis)) { + tsParseType(); + } else { + // parses `TsType?` + tsParseType(); + eat(tt.question); + } + + // The type we parsed above was actually a label + if (eat(tt.colon)) { + // Labeled tuple types must affix the label with `...` or `?`, so no need to handle those here + tsParseType(); + } +} + +function tsParseParenthesizedType() { + expect(tt.parenL); + tsParseType(); + expect(tt.parenR); +} + +function tsParseTemplateLiteralType() { + // Finish `, read quasi + nextTemplateToken(); + // Finish quasi, read ${ + nextTemplateToken(); + while (!match(tt.backQuote) && !state.error) { + expect(tt.dollarBraceL); + tsParseType(); + // Finish }, read quasi + nextTemplateToken(); + // Finish quasi, read either ${ or ` + nextTemplateToken(); + } + next(); +} + +var FunctionType; (function (FunctionType) { + const TSFunctionType = 0; FunctionType[FunctionType["TSFunctionType"] = TSFunctionType] = "TSFunctionType"; + const TSConstructorType = TSFunctionType + 1; FunctionType[FunctionType["TSConstructorType"] = TSConstructorType] = "TSConstructorType"; + const TSAbstractConstructorType = TSConstructorType + 1; FunctionType[FunctionType["TSAbstractConstructorType"] = TSAbstractConstructorType] = "TSAbstractConstructorType"; +})(FunctionType || (FunctionType = {})); + +function tsParseFunctionOrConstructorType(type) { + if (type === FunctionType.TSAbstractConstructorType) { + expectContextual(ContextualKeyword._abstract); + } + if (type === FunctionType.TSConstructorType || type === FunctionType.TSAbstractConstructorType) { + expect(tt._new); + } + const oldInDisallowConditionalTypesContext = state.inDisallowConditionalTypesContext; + state.inDisallowConditionalTypesContext = false; + tsFillSignature(tt.arrow); + state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; +} + +function tsParseNonArrayType() { + switch (state.type) { + case tt.name: + tsParseTypeReference(); + return; + case tt._void: + case tt._null: + next(); + return; + case tt.string: + case tt.num: + case tt.bigint: + case tt.decimal: + case tt._true: + case tt._false: + parseLiteral(); + return; + case tt.minus: + next(); + parseLiteral(); + return; + case tt._this: { + tsParseThisTypeNode(); + if (isContextual(ContextualKeyword._is) && !hasPrecedingLineBreak()) { + tsParseThisTypePredicate(); + } + return; + } + case tt._typeof: + tsParseTypeQuery(); + return; + case tt._import: + tsParseImportType(); + return; + case tt.braceL: + if (tsLookaheadIsStartOfMappedType()) { + tsParseMappedType(); + } else { + tsParseTypeLiteral(); + } + return; + case tt.bracketL: + tsParseTupleType(); + return; + case tt.parenL: + tsParseParenthesizedType(); + return; + case tt.backQuote: + tsParseTemplateLiteralType(); + return; + default: + if (state.type & TokenType.IS_KEYWORD) { + next(); + state.tokens[state.tokens.length - 1].type = tt.name; + return; + } + break; + } + + unexpected(); +} + +function tsParseArrayTypeOrHigher() { + tsParseNonArrayType(); + while (!hasPrecedingLineBreak() && eat(tt.bracketL)) { + if (!eat(tt.bracketR)) { + // If we hit ] immediately, this is an array type, otherwise it's an indexed access type. + tsParseType(); + expect(tt.bracketR); + } + } +} + +function tsParseInferType() { + expectContextual(ContextualKeyword._infer); + parseIdentifier(); + if (match(tt._extends)) { + // Infer type constraints introduce an ambiguity about whether the "extends" + // is a constraint for this infer type or is another conditional type. + const snapshot = state.snapshot(); + expect(tt._extends); + const oldInDisallowConditionalTypesContext = state.inDisallowConditionalTypesContext; + state.inDisallowConditionalTypesContext = true; + tsParseType(); + state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; + if (state.error || (!state.inDisallowConditionalTypesContext && match(tt.question))) { + state.restoreFromSnapshot(snapshot); + } + } +} + +function tsParseTypeOperatorOrHigher() { + if ( + isContextual(ContextualKeyword._keyof) || + isContextual(ContextualKeyword._unique) || + isContextual(ContextualKeyword._readonly) + ) { + next(); + tsParseTypeOperatorOrHigher(); + } else if (isContextual(ContextualKeyword._infer)) { + tsParseInferType(); + } else { + const oldInDisallowConditionalTypesContext = state.inDisallowConditionalTypesContext; + state.inDisallowConditionalTypesContext = false; + tsParseArrayTypeOrHigher(); + state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; + } +} + +function tsParseIntersectionTypeOrHigher() { + eat(tt.bitwiseAND); + tsParseTypeOperatorOrHigher(); + if (match(tt.bitwiseAND)) { + while (eat(tt.bitwiseAND)) { + tsParseTypeOperatorOrHigher(); + } + } +} + +function tsParseUnionTypeOrHigher() { + eat(tt.bitwiseOR); + tsParseIntersectionTypeOrHigher(); + if (match(tt.bitwiseOR)) { + while (eat(tt.bitwiseOR)) { + tsParseIntersectionTypeOrHigher(); + } + } +} + +function tsIsStartOfFunctionType() { + if (match(tt.lessThan)) { + return true; + } + return match(tt.parenL) && tsLookaheadIsUnambiguouslyStartOfFunctionType(); +} + +function tsSkipParameterStart() { + if (match(tt.name) || match(tt._this)) { + next(); + return true; + } + // If this is a possible array/object destructure, walk to the matching bracket/brace. + // The next token after will tell us definitively whether this is a function param. + if (match(tt.braceL) || match(tt.bracketL)) { + let depth = 1; + next(); + while (depth > 0 && !state.error) { + if (match(tt.braceL) || match(tt.bracketL)) { + depth++; + } else if (match(tt.braceR) || match(tt.bracketR)) { + depth--; + } + next(); + } + return true; + } + return false; +} + +function tsLookaheadIsUnambiguouslyStartOfFunctionType() { + const snapshot = state.snapshot(); + const isUnambiguouslyStartOfFunctionType = tsIsUnambiguouslyStartOfFunctionType(); + state.restoreFromSnapshot(snapshot); + return isUnambiguouslyStartOfFunctionType; +} + +function tsIsUnambiguouslyStartOfFunctionType() { + next(); + if (match(tt.parenR) || match(tt.ellipsis)) { + // ( ) + // ( ... + return true; + } + if (tsSkipParameterStart()) { + if (match(tt.colon) || match(tt.comma) || match(tt.question) || match(tt.eq)) { + // ( xxx : + // ( xxx , + // ( xxx ? + // ( xxx = + return true; + } + if (match(tt.parenR)) { + next(); + if (match(tt.arrow)) { + // ( xxx ) => + return true; + } + } + } + return false; +} + +function tsParseTypeOrTypePredicateAnnotation(returnToken) { + const oldIsType = pushTypeContext(0); + expect(returnToken); + const finishedReturn = tsParseTypePredicateOrAssertsPrefix(); + if (!finishedReturn) { + tsParseType(); + } + popTypeContext(oldIsType); +} + +function tsTryParseTypeOrTypePredicateAnnotation() { + if (match(tt.colon)) { + tsParseTypeOrTypePredicateAnnotation(tt.colon); + } +} + +export function tsTryParseTypeAnnotation() { + if (match(tt.colon)) { + tsParseTypeAnnotation(); + } +} + +function tsTryParseType() { + if (eat(tt.colon)) { + tsParseType(); + } +} + +/** + * Detect a few special return syntax cases: `x is T`, `asserts x`, `asserts x is T`, + * `asserts this is T`. + * + * Returns true if we parsed the return type, false if there's still a type to be parsed. + */ +function tsParseTypePredicateOrAssertsPrefix() { + const snapshot = state.snapshot(); + if (isContextual(ContextualKeyword._asserts)) { + // Normally this is `asserts x is T`, but at this point, it might be `asserts is T` (a user- + // defined type guard on the `asserts` variable) or just a type called `asserts`. + next(); + if (eatContextual(ContextualKeyword._is)) { + // If we see `asserts is`, then this must be of the form `asserts is T`, since + // `asserts is is T` isn't valid. + tsParseType(); + return true; + } else if (tsIsIdentifier() || match(tt._this)) { + next(); + if (eatContextual(ContextualKeyword._is)) { + // If we see `is`, then this is `asserts x is T`. Otherwise, it's `asserts x`. + tsParseType(); + } + return true; + } else { + // Regular type, so bail out and start type parsing from scratch. + state.restoreFromSnapshot(snapshot); + return false; + } + } else if (tsIsIdentifier() || match(tt._this)) { + // This is a regular identifier, which may or may not have "is" after it. + next(); + if (isContextual(ContextualKeyword._is) && !hasPrecedingLineBreak()) { + next(); + tsParseType(); + return true; + } else { + // Regular type, so bail out and start type parsing from scratch. + state.restoreFromSnapshot(snapshot); + return false; + } + } + return false; +} + +export function tsParseTypeAnnotation() { + const oldIsType = pushTypeContext(0); + expect(tt.colon); + tsParseType(); + popTypeContext(oldIsType); +} + +export function tsParseType() { + tsParseNonConditionalType(); + if (state.inDisallowConditionalTypesContext || hasPrecedingLineBreak() || !eat(tt._extends)) { + return; + } + // extends type + const oldInDisallowConditionalTypesContext = state.inDisallowConditionalTypesContext; + state.inDisallowConditionalTypesContext = true; + tsParseNonConditionalType(); + state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; + + expect(tt.question); + // true type + tsParseType(); + expect(tt.colon); + // false type + tsParseType(); +} + +function isAbstractConstructorSignature() { + return isContextual(ContextualKeyword._abstract) && lookaheadType() === tt._new; +} + +export function tsParseNonConditionalType() { + if (tsIsStartOfFunctionType()) { + tsParseFunctionOrConstructorType(FunctionType.TSFunctionType); + return; + } + if (match(tt._new)) { + // As in `new () => Date` + tsParseFunctionOrConstructorType(FunctionType.TSConstructorType); + return; + } else if (isAbstractConstructorSignature()) { + // As in `abstract new () => Date` + tsParseFunctionOrConstructorType(FunctionType.TSAbstractConstructorType); + return; + } + tsParseUnionTypeOrHigher(); +} + +export function tsParseTypeAssertion() { + const oldIsType = pushTypeContext(1); + tsParseType(); + expect(tt.greaterThan); + popTypeContext(oldIsType); + parseMaybeUnary(); +} + +export function tsTryParseJSXTypeArgument() { + if (eat(tt.jsxTagStart)) { + state.tokens[state.tokens.length - 1].type = tt.typeParameterStart; + const oldIsType = pushTypeContext(1); + while (!match(tt.greaterThan) && !state.error) { + tsParseType(); + eat(tt.comma); + } + // Process >, but the one after needs to be parsed JSX-style. + nextJSXTagToken(); + popTypeContext(oldIsType); + } +} + +function tsParseHeritageClause() { + while (!match(tt.braceL) && !state.error) { + tsParseExpressionWithTypeArguments(); + eat(tt.comma); + } +} + +function tsParseExpressionWithTypeArguments() { + // Note: TS uses parseLeftHandSideExpressionOrHigher, + // then has grammar errors later if it's not an EntityName. + tsParseEntityName(); + if (match(tt.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseInterfaceDeclaration() { + parseBindingIdentifier(false); + tsTryParseTypeParameters(); + if (eat(tt._extends)) { + tsParseHeritageClause(); + } + tsParseObjectTypeMembers(); +} + +function tsParseTypeAliasDeclaration() { + parseBindingIdentifier(false); + tsTryParseTypeParameters(); + expect(tt.eq); + tsParseType(); + semicolon(); +} + +function tsParseEnumMember() { + // Computed property names are grammar errors in an enum, so accept just string literal or identifier. + if (match(tt.string)) { + parseLiteral(); + } else { + parseIdentifier(); + } + if (eat(tt.eq)) { + const eqIndex = state.tokens.length - 1; + parseMaybeAssign(); + state.tokens[eqIndex].rhsEndIndex = state.tokens.length; + } +} + +function tsParseEnumDeclaration() { + parseBindingIdentifier(false); + expect(tt.braceL); + while (!eat(tt.braceR) && !state.error) { + tsParseEnumMember(); + eat(tt.comma); + } +} + +function tsParseModuleBlock() { + expect(tt.braceL); + parseBlockBody(/* end */ tt.braceR); +} + +function tsParseModuleOrNamespaceDeclaration() { + parseBindingIdentifier(false); + if (eat(tt.dot)) { + tsParseModuleOrNamespaceDeclaration(); + } else { + tsParseModuleBlock(); + } +} + +function tsParseAmbientExternalModuleDeclaration() { + if (isContextual(ContextualKeyword._global)) { + parseIdentifier(); + } else if (match(tt.string)) { + parseExprAtom(); + } else { + unexpected(); + } + + if (match(tt.braceL)) { + tsParseModuleBlock(); + } else { + semicolon(); + } +} + +export function tsParseImportEqualsDeclaration() { + parseImportedIdentifier(); + expect(tt.eq); + tsParseModuleReference(); + semicolon(); +} + +function tsIsExternalModuleReference() { + return isContextual(ContextualKeyword._require) && lookaheadType() === tt.parenL; +} + +function tsParseModuleReference() { + if (tsIsExternalModuleReference()) { + tsParseExternalModuleReference(); + } else { + tsParseEntityName(); + } +} + +function tsParseExternalModuleReference() { + expectContextual(ContextualKeyword._require); + expect(tt.parenL); + if (!match(tt.string)) { + unexpected(); + } + parseLiteral(); + expect(tt.parenR); +} + +// Utilities + +// Returns true if a statement matched. +function tsTryParseDeclare() { + if (isLineTerminator()) { + return false; + } + switch (state.type) { + case tt._function: { + const oldIsType = pushTypeContext(1); + next(); + // We don't need to precisely get the function start here, since it's only used to mark + // the function as a type if it's bodiless, and it's already a type here. + const functionStart = state.start; + parseFunction(functionStart, /* isStatement */ true); + popTypeContext(oldIsType); + return true; + } + case tt._class: { + const oldIsType = pushTypeContext(1); + parseClass(/* isStatement */ true, /* optionalId */ false); + popTypeContext(oldIsType); + return true; + } + case tt._const: { + if (match(tt._const) && isLookaheadContextual(ContextualKeyword._enum)) { + const oldIsType = pushTypeContext(1); + // `const enum = 0;` not allowed because "enum" is a strict mode reserved word. + expect(tt._const); + expectContextual(ContextualKeyword._enum); + state.tokens[state.tokens.length - 1].type = tt._enum; + tsParseEnumDeclaration(); + popTypeContext(oldIsType); + return true; + } + } + // falls through + case tt._var: + case tt._let: { + const oldIsType = pushTypeContext(1); + parseVarStatement(state.type !== tt._var); + popTypeContext(oldIsType); + return true; + } + case tt.name: { + const oldIsType = pushTypeContext(1); + const contextualKeyword = state.contextualKeyword; + let matched = false; + if (contextualKeyword === ContextualKeyword._global) { + tsParseAmbientExternalModuleDeclaration(); + matched = true; + } else { + matched = tsParseDeclaration(contextualKeyword, /* isBeforeToken */ true); + } + popTypeContext(oldIsType); + return matched; + } + default: + return false; + } +} + +// Note: this won't be called unless the keyword is allowed in `shouldParseExportDeclaration`. +// Returns true if it matched a declaration. +function tsTryParseExportDeclaration() { + return tsParseDeclaration(state.contextualKeyword, /* isBeforeToken */ true); +} + +// Returns true if it matched a statement. +function tsParseExpressionStatement(contextualKeyword) { + switch (contextualKeyword) { + case ContextualKeyword._declare: { + const declareTokenIndex = state.tokens.length - 1; + const matched = tsTryParseDeclare(); + if (matched) { + state.tokens[declareTokenIndex].type = tt._declare; + return true; + } + break; + } + case ContextualKeyword._global: + // `global { }` (with no `declare`) may appear inside an ambient module declaration. + // Would like to use tsParseAmbientExternalModuleDeclaration here, but already ran past "global". + if (match(tt.braceL)) { + tsParseModuleBlock(); + return true; + } + break; + + default: + return tsParseDeclaration(contextualKeyword, /* isBeforeToken */ false); + } + return false; +} + +/** + * Common code for parsing a declaration. + * + * isBeforeToken indicates that the current parser state is at the contextual + * keyword (and that it is not yet emitted) rather than reading the token after + * it. When isBeforeToken is true, we may be preceded by an `export` token and + * should include that token in a type context we create, e.g. to handle + * `export interface` or `export type`. (This is a bit of a hack and should be + * cleaned up at some point.) + * + * Returns true if it matched a declaration. + */ +function tsParseDeclaration(contextualKeyword, isBeforeToken) { + switch (contextualKeyword) { + case ContextualKeyword._abstract: + if (tsCheckLineTerminator(isBeforeToken) && match(tt._class)) { + state.tokens[state.tokens.length - 1].type = tt._abstract; + parseClass(/* isStatement */ true, /* optionalId */ false); + return true; + } + break; + + case ContextualKeyword._enum: + if (tsCheckLineTerminator(isBeforeToken) && match(tt.name)) { + state.tokens[state.tokens.length - 1].type = tt._enum; + tsParseEnumDeclaration(); + return true; + } + break; + + case ContextualKeyword._interface: + if (tsCheckLineTerminator(isBeforeToken) && match(tt.name)) { + // `next` is true in "export" and "declare" contexts, so we want to remove that token + // as well. + const oldIsType = pushTypeContext(isBeforeToken ? 2 : 1); + tsParseInterfaceDeclaration(); + popTypeContext(oldIsType); + return true; + } + break; + + case ContextualKeyword._module: + if (tsCheckLineTerminator(isBeforeToken)) { + if (match(tt.string)) { + const oldIsType = pushTypeContext(isBeforeToken ? 2 : 1); + tsParseAmbientExternalModuleDeclaration(); + popTypeContext(oldIsType); + return true; + } else if (match(tt.name)) { + const oldIsType = pushTypeContext(isBeforeToken ? 2 : 1); + tsParseModuleOrNamespaceDeclaration(); + popTypeContext(oldIsType); + return true; + } + } + break; + + case ContextualKeyword._namespace: + if (tsCheckLineTerminator(isBeforeToken) && match(tt.name)) { + const oldIsType = pushTypeContext(isBeforeToken ? 2 : 1); + tsParseModuleOrNamespaceDeclaration(); + popTypeContext(oldIsType); + return true; + } + break; + + case ContextualKeyword._type: + if (tsCheckLineTerminator(isBeforeToken) && match(tt.name)) { + const oldIsType = pushTypeContext(isBeforeToken ? 2 : 1); + tsParseTypeAliasDeclaration(); + popTypeContext(oldIsType); + return true; + } + break; + + default: + break; + } + return false; +} + +function tsCheckLineTerminator(isBeforeToken) { + if (isBeforeToken) { + // Babel checks hasFollowingLineBreak here and returns false, but this + // doesn't actually come up, e.g. `export interface` can never be on its own + // line in valid code. + next(); + return true; + } else { + return !isLineTerminator(); + } +} + +// Returns true if there was a generic async arrow function. +function tsTryParseGenericAsyncArrowFunction() { + const snapshot = state.snapshot(); + + tsParseTypeParameters(); + parseFunctionParams(); + tsTryParseTypeOrTypePredicateAnnotation(); + expect(tt.arrow); + + if (state.error) { + state.restoreFromSnapshot(snapshot); + return false; + } + + parseFunctionBody(true); + return true; +} + +/** + * If necessary, hack the tokenizer state so that this bitshift was actually a + * less-than token, then keep parsing. This should only be used in situations + * where we restore from snapshot on error (which reverts this change) or + * where bitshift would be illegal anyway (e.g. in a class "extends" clause). + * + * This hack is useful to handle situations like foo<() => void>() where + * there can legitimately be two open-angle-brackets in a row in TS. + */ +function tsParseTypeArgumentsWithPossibleBitshift() { + if (state.type === tt.bitShiftL) { + state.pos -= 1; + finishToken(tt.lessThan); + } + tsParseTypeArguments(); +} + +function tsParseTypeArguments() { + const oldIsType = pushTypeContext(0); + expect(tt.lessThan); + while (!eat(tt.greaterThan) && !state.error) { + tsParseType(); + eat(tt.comma); + } + popTypeContext(oldIsType); +} + +export function tsIsDeclarationStart() { + if (match(tt.name)) { + switch (state.contextualKeyword) { + case ContextualKeyword._abstract: + case ContextualKeyword._declare: + case ContextualKeyword._enum: + case ContextualKeyword._interface: + case ContextualKeyword._module: + case ContextualKeyword._namespace: + case ContextualKeyword._type: + return true; + default: + break; + } + } + + return false; +} + +// ====================================================== +// OVERRIDES +// ====================================================== + +export function tsParseFunctionBodyAndFinish(functionStart, funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (match(tt.colon)) { + tsParseTypeOrTypePredicateAnnotation(tt.colon); + } + + // The original code checked the node type to make sure this function type allows a missing + // body, but we skip that to avoid sending around the node type. We instead just use the + // allowExpressionBody boolean to make sure it's not an arrow function. + if (!match(tt.braceL) && isLineTerminator()) { + // Retroactively mark the function declaration as a type. + let i = state.tokens.length - 1; + while ( + i >= 0 && + (state.tokens[i].start >= functionStart || + state.tokens[i].type === tt._default || + state.tokens[i].type === tt._export) + ) { + state.tokens[i].isType = true; + i--; + } + return; + } + + parseFunctionBody(false, funcContextId); +} + +export function tsParseSubscript( + startTokenIndex, + noCalls, + stopState, +) { + if (!hasPrecedingLineBreak() && eat(tt.bang)) { + state.tokens[state.tokens.length - 1].type = tt.nonNullAssertion; + return; + } + + if (match(tt.lessThan) || match(tt.bitShiftL)) { + // There are number of things we are going to "maybe" parse, like type arguments on + // tagged template expressions. If any of them fail, walk it back and continue. + const snapshot = state.snapshot(); + + if (!noCalls && atPossibleAsync()) { + // Almost certainly this is a generic async function `async () => ... + // But it might be a call with a type argument `async();` + const asyncArrowFn = tsTryParseGenericAsyncArrowFunction(); + if (asyncArrowFn) { + return; + } + } + tsParseTypeArgumentsWithPossibleBitshift(); + if (!noCalls && eat(tt.parenL)) { + // With f(), the subscriptStartIndex marker is on the ( token. + state.tokens[state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + parseCallExpressionArguments(); + } else if (match(tt.backQuote)) { + // Tagged template with a type argument. + parseTemplate(); + } else if ( + // The remaining possible case is an instantiation expression, e.g. + // Array . Check for a few cases that would disqualify it and + // cause us to bail out. + // a>c is not (a)>c, but a<(b>>c) + state.type === tt.greaterThan || + // ac is (ac + (state.type !== tt.parenL && + Boolean(state.type & TokenType.IS_EXPRESSION_START) && + !hasPrecedingLineBreak()) + ) { + // Bail out. We have something like ac, which is not an expression with + // type arguments but an (a < b) > c comparison. + unexpected(); + } + + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return; + } + } else if (!noCalls && match(tt.questionDot) && lookaheadType() === tt.lessThan) { + // If we see f?.<, then this must be an optional call with a type argument. + next(); + state.tokens[startTokenIndex].isOptionalChainStart = true; + // With f?.(), the subscriptStartIndex marker is on the ?. token. + state.tokens[state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + + tsParseTypeArguments(); + expect(tt.parenL); + parseCallExpressionArguments(); + } + baseParseSubscript(startTokenIndex, noCalls, stopState); +} + +export function tsTryParseExport() { + if (eat(tt._import)) { + // One of these cases: + // export import A = B; + // export import type A = require("A"); + if (isContextual(ContextualKeyword._type) && lookaheadType() !== tt.eq) { + // Eat a `type` token, unless it's actually an identifier name. + expectContextual(ContextualKeyword._type); + } + tsParseImportEqualsDeclaration(); + return true; + } else if (eat(tt.eq)) { + // `export = x;` + parseExpression(); + semicolon(); + return true; + } else if (eatContextual(ContextualKeyword._as)) { + // `export as namespace A;` + // See `parseNamespaceExportDeclaration` in TypeScript's own parser + expectContextual(ContextualKeyword._namespace); + parseIdentifier(); + semicolon(); + return true; + } else { + if (isContextual(ContextualKeyword._type)) { + const nextType = lookaheadType(); + // export type {foo} from 'a'; + // export type * from 'a';' + // export type * as ns from 'a';' + if (nextType === tt.braceL || nextType === tt.star) { + next(); + } + } + return false; + } +} + +/** + * Parse a TS import specifier, which may be prefixed with "type" and may be of + * the form `foo as bar`. + * + * The number of identifier-like tokens we see happens to be enough to uniquely + * identify the form, so simply count the number of identifiers rather than + * matching the words `type` or `as`. This is particularly important because + * `type` and `as` could each actually be plain identifiers rather than + * keywords. + */ +export function tsParseImportSpecifier() { + parseIdentifier(); + if (match(tt.comma) || match(tt.braceR)) { + // import {foo} + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportDeclaration; + return; + } + parseIdentifier(); + if (match(tt.comma) || match(tt.braceR)) { + // import {type foo} + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportDeclaration; + state.tokens[state.tokens.length - 2].isType = true; + state.tokens[state.tokens.length - 1].isType = true; + return; + } + parseIdentifier(); + if (match(tt.comma) || match(tt.braceR)) { + // import {foo as bar} + state.tokens[state.tokens.length - 3].identifierRole = IdentifierRole.ImportAccess; + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportDeclaration; + return; + } + parseIdentifier(); + // import {type foo as bar} + state.tokens[state.tokens.length - 3].identifierRole = IdentifierRole.ImportAccess; + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportDeclaration; + state.tokens[state.tokens.length - 4].isType = true; + state.tokens[state.tokens.length - 3].isType = true; + state.tokens[state.tokens.length - 2].isType = true; + state.tokens[state.tokens.length - 1].isType = true; +} + +/** + * Just like named import specifiers, export specifiers can have from 1 to 4 + * tokens, inclusive, and the number of tokens determines the role of each token. + */ +export function tsParseExportSpecifier() { + parseIdentifier(); + if (match(tt.comma) || match(tt.braceR)) { + // export {foo} + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ExportAccess; + return; + } + parseIdentifier(); + if (match(tt.comma) || match(tt.braceR)) { + // export {type foo} + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ExportAccess; + state.tokens[state.tokens.length - 2].isType = true; + state.tokens[state.tokens.length - 1].isType = true; + return; + } + parseIdentifier(); + if (match(tt.comma) || match(tt.braceR)) { + // export {foo as bar} + state.tokens[state.tokens.length - 3].identifierRole = IdentifierRole.ExportAccess; + return; + } + parseIdentifier(); + // export {type foo as bar} + state.tokens[state.tokens.length - 3].identifierRole = IdentifierRole.ExportAccess; + state.tokens[state.tokens.length - 4].isType = true; + state.tokens[state.tokens.length - 3].isType = true; + state.tokens[state.tokens.length - 2].isType = true; + state.tokens[state.tokens.length - 1].isType = true; +} + +export function tsTryParseExportDefaultExpression() { + if (isContextual(ContextualKeyword._abstract) && lookaheadType() === tt._class) { + state.type = tt._abstract; + next(); // Skip "abstract" + parseClass(true, true); + return true; + } + if (isContextual(ContextualKeyword._interface)) { + // Make sure "export default" are considered type tokens so the whole thing is removed. + const oldIsType = pushTypeContext(2); + tsParseDeclaration(ContextualKeyword._interface, true); + popTypeContext(oldIsType); + return true; + } + return false; +} + +export function tsTryParseStatementContent() { + if (state.type === tt._const) { + const ahead = lookaheadTypeAndKeyword(); + if (ahead.type === tt.name && ahead.contextualKeyword === ContextualKeyword._enum) { + expect(tt._const); + expectContextual(ContextualKeyword._enum); + state.tokens[state.tokens.length - 1].type = tt._enum; + tsParseEnumDeclaration(); + return true; + } + } + return false; +} + +export function tsTryParseClassMemberWithIsStatic(isStatic) { + const memberStartIndexAfterStatic = state.tokens.length; + tsParseModifiers([ + ContextualKeyword._abstract, + ContextualKeyword._readonly, + ContextualKeyword._declare, + ContextualKeyword._static, + ContextualKeyword._override, + ]); + + const modifiersEndIndex = state.tokens.length; + const found = tsTryParseIndexSignature(); + if (found) { + // Index signatures are type declarations, so set the modifier tokens as + // type tokens. Most tokens could be assumed to be type tokens, but `static` + // is ambiguous unless we set it explicitly here. + const memberStartIndex = isStatic + ? memberStartIndexAfterStatic - 1 + : memberStartIndexAfterStatic; + for (let i = memberStartIndex; i < modifiersEndIndex; i++) { + state.tokens[i].isType = true; + } + return true; + } + return false; +} + +// Note: The reason we do this in `parseIdentifierStatement` and not `parseStatement` +// is that e.g. `type()` is valid JS, so we must try parsing that first. +// If it's really a type, we will parse `type` as the statement, and can correct it here +// by parsing the rest. +export function tsParseIdentifierStatement(contextualKeyword) { + const matched = tsParseExpressionStatement(contextualKeyword); + if (!matched) { + semicolon(); + } +} + +export function tsParseExportDeclaration() { + // "export declare" is equivalent to just "export". + const isDeclare = eatContextual(ContextualKeyword._declare); + if (isDeclare) { + state.tokens[state.tokens.length - 1].type = tt._declare; + } + + let matchedDeclaration = false; + if (match(tt.name)) { + if (isDeclare) { + const oldIsType = pushTypeContext(2); + matchedDeclaration = tsTryParseExportDeclaration(); + popTypeContext(oldIsType); + } else { + matchedDeclaration = tsTryParseExportDeclaration(); + } + } + if (!matchedDeclaration) { + if (isDeclare) { + const oldIsType = pushTypeContext(2); + parseStatement(true); + popTypeContext(oldIsType); + } else { + parseStatement(true); + } + } +} + +export function tsAfterParseClassSuper(hasSuper) { + if (hasSuper && (match(tt.lessThan) || match(tt.bitShiftL))) { + tsParseTypeArgumentsWithPossibleBitshift(); + } + if (eatContextual(ContextualKeyword._implements)) { + state.tokens[state.tokens.length - 1].type = tt._implements; + const oldIsType = pushTypeContext(1); + tsParseHeritageClause(); + popTypeContext(oldIsType); + } +} + +export function tsStartParseObjPropValue() { + tsTryParseTypeParameters(); +} + +export function tsStartParseFunctionParams() { + tsTryParseTypeParameters(); +} + +// `let x: number;` +export function tsAfterParseVarHead() { + const oldIsType = pushTypeContext(0); + if (!hasPrecedingLineBreak()) { + eat(tt.bang); + } + tsTryParseTypeAnnotation(); + popTypeContext(oldIsType); +} + +// parse the return type of an async arrow function - let foo = (async (): number => {}); +export function tsStartParseAsyncArrowFromCallExpression() { + if (match(tt.colon)) { + tsParseTypeAnnotation(); + } +} + +// Returns true if the expression was an arrow function. +export function tsParseMaybeAssign(noIn, isWithinParens) { + // Note: When the JSX plugin is on, type assertions (` x`) aren't valid syntax. + if (isJSXEnabled) { + return tsParseMaybeAssignWithJSX(noIn, isWithinParens); + } else { + return tsParseMaybeAssignWithoutJSX(noIn, isWithinParens); + } +} + +export function tsParseMaybeAssignWithJSX(noIn, isWithinParens) { + if (!match(tt.lessThan)) { + return baseParseMaybeAssign(noIn, isWithinParens); + } + + // Prefer to parse JSX if possible. But may be an arrow fn. + const snapshot = state.snapshot(); + let wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Otherwise, try as type-parameterized arrow function. + state.type = tt.typeParameterStart; + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (!wasArrow) { + unexpected(); + } + + return wasArrow; +} + +export function tsParseMaybeAssignWithoutJSX(noIn, isWithinParens) { + if (!match(tt.lessThan)) { + return baseParseMaybeAssign(noIn, isWithinParens); + } + + const snapshot = state.snapshot(); + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + const wasArrow = baseParseMaybeAssign(noIn, isWithinParens); + if (!wasArrow) { + unexpected(); + } + if (state.error) { + state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Try parsing a type cast instead of an arrow function. + // This will start with a type assertion (via parseMaybeUnary). + // But don't directly call `tsParseTypeAssertion` because we want to handle any binary after it. + return baseParseMaybeAssign(noIn, isWithinParens); +} + +export function tsParseArrow() { + if (match(tt.colon)) { + // This is different from how the TS parser does it. + // TS uses lookahead. Babylon parses it as a parenthesized expression and converts. + const snapshot = state.snapshot(); + + tsParseTypeOrTypePredicateAnnotation(tt.colon); + if (canInsertSemicolon()) unexpected(); + if (!match(tt.arrow)) unexpected(); + + if (state.error) { + state.restoreFromSnapshot(snapshot); + } + } + return eat(tt.arrow); +} + +// Allow type annotations inside of a parameter list. +export function tsParseAssignableListItemTypes() { + const oldIsType = pushTypeContext(0); + eat(tt.question); + tsTryParseTypeAnnotation(); + popTypeContext(oldIsType); +} + +export function tsParseMaybeDecoratorArguments() { + if (match(tt.lessThan) || match(tt.bitShiftL)) { + tsParseTypeArgumentsWithPossibleBitshift(); + } + baseParseMaybeDecoratorArguments(); +} diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/index.js b/node_modules/sucrase/dist/esm/parser/tokenizer/index.js new file mode 100644 index 0000000..b7c532d --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/tokenizer/index.js @@ -0,0 +1,1001 @@ +/* eslint max-len: 0 */ + +import {input, isFlowEnabled, state} from "../traverser/base"; +import {unexpected} from "../traverser/util"; +import {charCodes} from "../util/charcodes"; +import {IS_IDENTIFIER_CHAR, IS_IDENTIFIER_START} from "../util/identifier"; +import {IS_WHITESPACE, skipWhiteSpace} from "../util/whitespace"; +import {ContextualKeyword} from "./keywords"; +import readWord from "./readWord"; +import { TokenType as tt} from "./types"; + +export var IdentifierRole; (function (IdentifierRole) { + const Access = 0; IdentifierRole[IdentifierRole["Access"] = Access] = "Access"; + const ExportAccess = Access + 1; IdentifierRole[IdentifierRole["ExportAccess"] = ExportAccess] = "ExportAccess"; + const TopLevelDeclaration = ExportAccess + 1; IdentifierRole[IdentifierRole["TopLevelDeclaration"] = TopLevelDeclaration] = "TopLevelDeclaration"; + const FunctionScopedDeclaration = TopLevelDeclaration + 1; IdentifierRole[IdentifierRole["FunctionScopedDeclaration"] = FunctionScopedDeclaration] = "FunctionScopedDeclaration"; + const BlockScopedDeclaration = FunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["BlockScopedDeclaration"] = BlockScopedDeclaration] = "BlockScopedDeclaration"; + const ObjectShorthandTopLevelDeclaration = BlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandTopLevelDeclaration"] = ObjectShorthandTopLevelDeclaration] = "ObjectShorthandTopLevelDeclaration"; + const ObjectShorthandFunctionScopedDeclaration = ObjectShorthandTopLevelDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandFunctionScopedDeclaration"] = ObjectShorthandFunctionScopedDeclaration] = "ObjectShorthandFunctionScopedDeclaration"; + const ObjectShorthandBlockScopedDeclaration = ObjectShorthandFunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandBlockScopedDeclaration"] = ObjectShorthandBlockScopedDeclaration] = "ObjectShorthandBlockScopedDeclaration"; + const ObjectShorthand = ObjectShorthandBlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthand"] = ObjectShorthand] = "ObjectShorthand"; + // Any identifier bound in an import statement, e.g. both A and b from + // `import A, * as b from 'A';` + const ImportDeclaration = ObjectShorthand + 1; IdentifierRole[IdentifierRole["ImportDeclaration"] = ImportDeclaration] = "ImportDeclaration"; + const ObjectKey = ImportDeclaration + 1; IdentifierRole[IdentifierRole["ObjectKey"] = ObjectKey] = "ObjectKey"; + // The `foo` in `import {foo as bar} from "./abc";`. + const ImportAccess = ObjectKey + 1; IdentifierRole[IdentifierRole["ImportAccess"] = ImportAccess] = "ImportAccess"; +})(IdentifierRole || (IdentifierRole = {})); + +/** + * Extra information on jsxTagStart tokens, used to determine which of the three + * jsx functions are called in the automatic transform. + */ +export var JSXRole; (function (JSXRole) { + // The element is self-closing or has a body that resolves to empty. We + // shouldn't emit children at all in this case. + const NoChildren = 0; JSXRole[JSXRole["NoChildren"] = NoChildren] = "NoChildren"; + // The element has a single explicit child, which might still be an arbitrary + // expression like an array. We should emit that expression as the children. + const OneChild = NoChildren + 1; JSXRole[JSXRole["OneChild"] = OneChild] = "OneChild"; + // The element has at least two explicitly-specified children or has spread + // children, so child positions are assumed to be "static". We should wrap + // these children in an array. + const StaticChildren = OneChild + 1; JSXRole[JSXRole["StaticChildren"] = StaticChildren] = "StaticChildren"; + // The element has a prop named "key" after a prop spread, so we should fall + // back to the createElement function. + const KeyAfterPropSpread = StaticChildren + 1; JSXRole[JSXRole["KeyAfterPropSpread"] = KeyAfterPropSpread] = "KeyAfterPropSpread"; +})(JSXRole || (JSXRole = {})); + +export function isDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} + +export function isNonTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} + +export function isTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ImportDeclaration + ); +} + +export function isBlockScopedDeclaration(token) { + const role = token.identifierRole; + // Treat top-level declarations as block scope since the distinction doesn't matter here. + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} + +export function isFunctionScopedDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} + +export function isObjectShorthandDeclaration(token) { + return ( + token.identifierRole === IdentifierRole.ObjectShorthandTopLevelDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandBlockScopedDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} + +// Object type used to represent tokens. Note that normally, tokens +// simply exist as properties on the parser object. This is only +// used for the onToken callback and the external tokenizer. +export class Token { + constructor() { + this.type = state.type; + this.contextualKeyword = state.contextualKeyword; + this.start = state.start; + this.end = state.end; + this.scopeDepth = state.scopeDepth; + this.isType = state.isType; + this.identifierRole = null; + this.jsxRole = null; + this.shadowsGlobal = false; + this.isAsyncOperation = false; + this.contextId = null; + this.rhsEndIndex = null; + this.isExpression = false; + this.numNullishCoalesceStarts = 0; + this.numNullishCoalesceEnds = 0; + this.isOptionalChainStart = false; + this.isOptionalChainEnd = false; + this.subscriptStartIndex = null; + this.nullishStartIndex = null; + } + + + + + + + + + + // Initially false for all tokens, then may be computed in a follow-up step that does scope + // analysis. + + // Initially false for all tokens, but may be set during transform to mark it as containing an + // await operation. + + + // For assignments, the index of the RHS. For export tokens, the end of the export. + + // For class tokens, records if the class is a class expression or a class statement. + + // Number of times to insert a `nullishCoalesce(` snippet before this token. + + // Number of times to insert a `)` snippet after this token. + + // If true, insert an `optionalChain([` snippet before this token. + + // If true, insert a `])` snippet after this token. + + // Tag for `.`, `?.`, `[`, `?.[`, `(`, and `?.(` to denote the "root" token for this + // subscript chain. This can be used to determine if this chain is an optional chain. + + // Tag for `??` operators to denote the root token for this nullish coalescing call. + +} + +// ## Tokenizer + +// Move to the next token +export function next() { + state.tokens.push(new Token()); + nextToken(); +} + +// Call instead of next when inside a template, since that needs to be handled differently. +export function nextTemplateToken() { + state.tokens.push(new Token()); + state.start = state.pos; + readTmplToken(); +} + +// The tokenizer never parses regexes by default. Instead, the parser is responsible for +// instructing it to parse a regex when we see a slash at the start of an expression. +export function retokenizeSlashAsRegex() { + if (state.type === tt.assign) { + --state.pos; + } + readRegexp(); +} + +export function pushTypeContext(existingTokensInType) { + for (let i = state.tokens.length - existingTokensInType; i < state.tokens.length; i++) { + state.tokens[i].isType = true; + } + const oldIsType = state.isType; + state.isType = true; + return oldIsType; +} + +export function popTypeContext(oldIsType) { + state.isType = oldIsType; +} + +export function eat(type) { + if (match(type)) { + next(); + return true; + } else { + return false; + } +} + +export function eatTypeToken(tokenType) { + const oldIsType = state.isType; + state.isType = true; + eat(tokenType); + state.isType = oldIsType; +} + +export function match(type) { + return state.type === type; +} + +export function lookaheadType() { + const snapshot = state.snapshot(); + next(); + const type = state.type; + state.restoreFromSnapshot(snapshot); + return type; +} + +export class TypeAndKeyword { + + + constructor(type, contextualKeyword) { + this.type = type; + this.contextualKeyword = contextualKeyword; + } +} + +export function lookaheadTypeAndKeyword() { + const snapshot = state.snapshot(); + next(); + const type = state.type; + const contextualKeyword = state.contextualKeyword; + state.restoreFromSnapshot(snapshot); + return new TypeAndKeyword(type, contextualKeyword); +} + +export function nextTokenStart() { + return nextTokenStartSince(state.pos); +} + +export function nextTokenStartSince(pos) { + skipWhiteSpace.lastIndex = pos; + const skip = skipWhiteSpace.exec(input); + return pos + skip[0].length; +} + +export function lookaheadCharCode() { + return input.charCodeAt(nextTokenStart()); +} + +// Read a single token, updating the parser object's token-related +// properties. +export function nextToken() { + skipSpace(); + state.start = state.pos; + if (state.pos >= input.length) { + const tokens = state.tokens; + // We normally run past the end a bit, but if we're way past the end, avoid an infinite loop. + // Also check the token positions rather than the types since sometimes we rewrite the token + // type to something else. + if ( + tokens.length >= 2 && + tokens[tokens.length - 1].start >= input.length && + tokens[tokens.length - 2].start >= input.length + ) { + unexpected("Unexpectedly reached the end of input."); + } + finishToken(tt.eof); + return; + } + readToken(input.charCodeAt(state.pos)); +} + +function readToken(code) { + // Identifier or keyword. '\uXXXX' sequences are allowed in + // identifiers, so '\' also dispatches to that. + if ( + IS_IDENTIFIER_START[code] || + code === charCodes.backslash || + (code === charCodes.atSign && input.charCodeAt(state.pos + 1) === charCodes.atSign) + ) { + readWord(); + } else { + getTokenFromCode(code); + } +} + +function skipBlockComment() { + while ( + input.charCodeAt(state.pos) !== charCodes.asterisk || + input.charCodeAt(state.pos + 1) !== charCodes.slash + ) { + state.pos++; + if (state.pos > input.length) { + unexpected("Unterminated comment", state.pos - 2); + return; + } + } + state.pos += 2; +} + +export function skipLineComment(startSkip) { + let ch = input.charCodeAt((state.pos += startSkip)); + if (state.pos < input.length) { + while ( + ch !== charCodes.lineFeed && + ch !== charCodes.carriageReturn && + ch !== charCodes.lineSeparator && + ch !== charCodes.paragraphSeparator && + ++state.pos < input.length + ) { + ch = input.charCodeAt(state.pos); + } + } +} + +// Called at the start of the parse and after every token. Skips +// whitespace and comments. +export function skipSpace() { + while (state.pos < input.length) { + const ch = input.charCodeAt(state.pos); + switch (ch) { + case charCodes.carriageReturn: + if (input.charCodeAt(state.pos + 1) === charCodes.lineFeed) { + ++state.pos; + } + + case charCodes.lineFeed: + case charCodes.lineSeparator: + case charCodes.paragraphSeparator: + ++state.pos; + break; + + case charCodes.slash: + switch (input.charCodeAt(state.pos + 1)) { + case charCodes.asterisk: + state.pos += 2; + skipBlockComment(); + break; + + case charCodes.slash: + skipLineComment(2); + break; + + default: + return; + } + break; + + default: + if (IS_WHITESPACE[ch]) { + ++state.pos; + } else { + return; + } + } + } +} + +// Called at the end of every token. Sets various fields, and skips the space after the token, so +// that the next one's `start` will point at the right position. +export function finishToken( + type, + contextualKeyword = ContextualKeyword.NONE, +) { + state.end = state.pos; + state.type = type; + state.contextualKeyword = contextualKeyword; +} + +// ### Token reading + +// This is the function that is called to fetch the next token. It +// is somewhat obscure, because it works in character codes rather +// than characters, and because operator parsing has been inlined +// into it. +// +// All in the name of speed. +function readToken_dot() { + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar >= charCodes.digit0 && nextChar <= charCodes.digit9) { + readNumber(true); + return; + } + + if (nextChar === charCodes.dot && input.charCodeAt(state.pos + 2) === charCodes.dot) { + state.pos += 3; + finishToken(tt.ellipsis); + } else { + ++state.pos; + finishToken(tt.dot); + } +} + +function readToken_slash() { + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + } else { + finishOp(tt.slash, 1); + } +} + +function readToken_mult_modulo(code) { + // '%*' + let tokenType = code === charCodes.asterisk ? tt.star : tt.modulo; + let width = 1; + let nextChar = input.charCodeAt(state.pos + 1); + + // Exponentiation operator ** + if (code === charCodes.asterisk && nextChar === charCodes.asterisk) { + width++; + nextChar = input.charCodeAt(state.pos + 2); + tokenType = tt.exponent; + } + + // Match *= or %=, disallowing *=> which can be valid in flow. + if ( + nextChar === charCodes.equalsTo && + input.charCodeAt(state.pos + 2) !== charCodes.greaterThan + ) { + width++; + tokenType = tt.assign; + } + + finishOp(tokenType, width); +} + +function readToken_pipe_amp(code) { + // '|&' + const nextChar = input.charCodeAt(state.pos + 1); + + if (nextChar === code) { + if (input.charCodeAt(state.pos + 2) === charCodes.equalsTo) { + // ||= or &&= + finishOp(tt.assign, 3); + } else { + // || or && + finishOp(code === charCodes.verticalBar ? tt.logicalOR : tt.logicalAND, 2); + } + return; + } + + if (code === charCodes.verticalBar) { + // '|>' + if (nextChar === charCodes.greaterThan) { + finishOp(tt.pipeline, 2); + return; + } else if (nextChar === charCodes.rightCurlyBrace && isFlowEnabled) { + // '|}' + finishOp(tt.braceBarR, 2); + return; + } + } + + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + return; + } + + finishOp(code === charCodes.verticalBar ? tt.bitwiseOR : tt.bitwiseAND, 1); +} + +function readToken_caret() { + // '^' + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + } else { + finishOp(tt.bitwiseXOR, 1); + } +} + +function readToken_plus_min(code) { + // '+-' + const nextChar = input.charCodeAt(state.pos + 1); + + if (nextChar === code) { + // Tentatively call this a prefix operator, but it might be changed to postfix later. + finishOp(tt.preIncDec, 2); + return; + } + + if (nextChar === charCodes.equalsTo) { + finishOp(tt.assign, 2); + } else if (code === charCodes.plusSign) { + finishOp(tt.plus, 1); + } else { + finishOp(tt.minus, 1); + } +} + +function readToken_lt() { + const nextChar = input.charCodeAt(state.pos + 1); + + if (nextChar === charCodes.lessThan) { + if (input.charCodeAt(state.pos + 2) === charCodes.equalsTo) { + finishOp(tt.assign, 3); + return; + } + // We see <<, but need to be really careful about whether to treat it as a + // true left-shift or as two < tokens. + if (state.isType) { + // Within a type, << might come up in a snippet like `Array<() => void>`, + // so treat it as two < tokens. Importantly, this should only override << + // rather than other tokens like <= . If we treated <= as < in a type + // context, then the snippet `a as T <= 1` would incorrectly start parsing + // a type argument on T. We don't need to worry about `a as T << 1` + // because TypeScript disallows that syntax. + finishOp(tt.lessThan, 1); + } else { + // Outside a type, this might be a true left-shift operator, or it might + // still be two open-type-arg tokens, such as in `f<() => void>()`. We + // look at the token while considering the `f`, so we don't yet know that + // we're in a type context. In this case, we initially tokenize as a + // left-shift and correct after-the-fact as necessary in + // tsParseTypeArgumentsWithPossibleBitshift . + finishOp(tt.bitShiftL, 2); + } + return; + } + + if (nextChar === charCodes.equalsTo) { + // <= + finishOp(tt.relationalOrEqual, 2); + } else { + finishOp(tt.lessThan, 1); + } +} + +function readToken_gt() { + if (state.isType) { + // Avoid right-shift for things like `Array>` and + // greater-than-or-equal for things like `const a: Array=[];`. + finishOp(tt.greaterThan, 1); + return; + } + + const nextChar = input.charCodeAt(state.pos + 1); + + if (nextChar === charCodes.greaterThan) { + const size = input.charCodeAt(state.pos + 2) === charCodes.greaterThan ? 3 : 2; + if (input.charCodeAt(state.pos + size) === charCodes.equalsTo) { + finishOp(tt.assign, size + 1); + return; + } + finishOp(tt.bitShiftR, size); + return; + } + + if (nextChar === charCodes.equalsTo) { + // >= + finishOp(tt.relationalOrEqual, 2); + } else { + finishOp(tt.greaterThan, 1); + } +} + +/** + * Called after `as` expressions in TS; we're switching from a type to a + * non-type context, so a > token may actually be >= . This is needed because >= + * must be tokenized as a > in a type context because of code like + * `const x: Array=[];`, but `a as T >= 1` is a code example where it must be + * treated as >=. + * + * Notably, this only applies to >, not <. In a code snippet like `a as T <= 1`, + * we must NOT tokenize as <, or else the type parser will start parsing a type + * argument and fail. + */ +export function rescan_gt() { + if (state.type === tt.greaterThan) { + state.pos -= 1; + readToken_gt(); + } +} + +function readToken_eq_excl(code) { + // '=!' + const nextChar = input.charCodeAt(state.pos + 1); + if (nextChar === charCodes.equalsTo) { + finishOp(tt.equality, input.charCodeAt(state.pos + 2) === charCodes.equalsTo ? 3 : 2); + return; + } + if (code === charCodes.equalsTo && nextChar === charCodes.greaterThan) { + // '=>' + state.pos += 2; + finishToken(tt.arrow); + return; + } + finishOp(code === charCodes.equalsTo ? tt.eq : tt.bang, 1); +} + +function readToken_question() { + // '?' + const nextChar = input.charCodeAt(state.pos + 1); + const nextChar2 = input.charCodeAt(state.pos + 2); + if ( + nextChar === charCodes.questionMark && + // In Flow (but not TypeScript), ??string is a valid type that should be + // tokenized as two individual ? tokens. + !(isFlowEnabled && state.isType) + ) { + if (nextChar2 === charCodes.equalsTo) { + // '??=' + finishOp(tt.assign, 3); + } else { + // '??' + finishOp(tt.nullishCoalescing, 2); + } + } else if ( + nextChar === charCodes.dot && + !(nextChar2 >= charCodes.digit0 && nextChar2 <= charCodes.digit9) + ) { + // '.' not followed by a number + state.pos += 2; + finishToken(tt.questionDot); + } else { + ++state.pos; + finishToken(tt.question); + } +} + +export function getTokenFromCode(code) { + switch (code) { + case charCodes.numberSign: + ++state.pos; + finishToken(tt.hash); + return; + + // The interpretation of a dot depends on whether it is followed + // by a digit or another two dots. + + case charCodes.dot: + readToken_dot(); + return; + + // Punctuation tokens. + case charCodes.leftParenthesis: + ++state.pos; + finishToken(tt.parenL); + return; + case charCodes.rightParenthesis: + ++state.pos; + finishToken(tt.parenR); + return; + case charCodes.semicolon: + ++state.pos; + finishToken(tt.semi); + return; + case charCodes.comma: + ++state.pos; + finishToken(tt.comma); + return; + case charCodes.leftSquareBracket: + ++state.pos; + finishToken(tt.bracketL); + return; + case charCodes.rightSquareBracket: + ++state.pos; + finishToken(tt.bracketR); + return; + + case charCodes.leftCurlyBrace: + if (isFlowEnabled && input.charCodeAt(state.pos + 1) === charCodes.verticalBar) { + finishOp(tt.braceBarL, 2); + } else { + ++state.pos; + finishToken(tt.braceL); + } + return; + + case charCodes.rightCurlyBrace: + ++state.pos; + finishToken(tt.braceR); + return; + + case charCodes.colon: + if (input.charCodeAt(state.pos + 1) === charCodes.colon) { + finishOp(tt.doubleColon, 2); + } else { + ++state.pos; + finishToken(tt.colon); + } + return; + + case charCodes.questionMark: + readToken_question(); + return; + case charCodes.atSign: + ++state.pos; + finishToken(tt.at); + return; + + case charCodes.graveAccent: + ++state.pos; + finishToken(tt.backQuote); + return; + + case charCodes.digit0: { + const nextChar = input.charCodeAt(state.pos + 1); + // '0x', '0X', '0o', '0O', '0b', '0B' + if ( + nextChar === charCodes.lowercaseX || + nextChar === charCodes.uppercaseX || + nextChar === charCodes.lowercaseO || + nextChar === charCodes.uppercaseO || + nextChar === charCodes.lowercaseB || + nextChar === charCodes.uppercaseB + ) { + readRadixNumber(); + return; + } + } + // Anything else beginning with a digit is an integer, octal + // number, or float. + case charCodes.digit1: + case charCodes.digit2: + case charCodes.digit3: + case charCodes.digit4: + case charCodes.digit5: + case charCodes.digit6: + case charCodes.digit7: + case charCodes.digit8: + case charCodes.digit9: + readNumber(false); + return; + + // Quotes produce strings. + case charCodes.quotationMark: + case charCodes.apostrophe: + readString(code); + return; + + // Operators are parsed inline in tiny state machines. '=' (charCodes.equalsTo) is + // often referred to. `finishOp` simply skips the amount of + // characters it is given as second argument, and returns a token + // of the type given by its first argument. + + case charCodes.slash: + readToken_slash(); + return; + + case charCodes.percentSign: + case charCodes.asterisk: + readToken_mult_modulo(code); + return; + + case charCodes.verticalBar: + case charCodes.ampersand: + readToken_pipe_amp(code); + return; + + case charCodes.caret: + readToken_caret(); + return; + + case charCodes.plusSign: + case charCodes.dash: + readToken_plus_min(code); + return; + + case charCodes.lessThan: + readToken_lt(); + return; + + case charCodes.greaterThan: + readToken_gt(); + return; + + case charCodes.equalsTo: + case charCodes.exclamationMark: + readToken_eq_excl(code); + return; + + case charCodes.tilde: + finishOp(tt.tilde, 1); + return; + + default: + break; + } + + unexpected(`Unexpected character '${String.fromCharCode(code)}'`, state.pos); +} + +function finishOp(type, size) { + state.pos += size; + finishToken(type); +} + +function readRegexp() { + const start = state.pos; + let escaped = false; + let inClass = false; + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated regular expression", start); + return; + } + const code = input.charCodeAt(state.pos); + if (escaped) { + escaped = false; + } else { + if (code === charCodes.leftSquareBracket) { + inClass = true; + } else if (code === charCodes.rightSquareBracket && inClass) { + inClass = false; + } else if (code === charCodes.slash && !inClass) { + break; + } + escaped = code === charCodes.backslash; + } + ++state.pos; + } + ++state.pos; + // Need to use `skipWord` because '\uXXXX' sequences are allowed here (don't ask). + skipWord(); + + finishToken(tt.regexp); +} + +/** + * Read a decimal integer. Note that this can't be unified with the similar code + * in readRadixNumber (which also handles hex digits) because "e" needs to be + * the end of the integer so that we can properly handle scientific notation. + */ +function readInt() { + while (true) { + const code = input.charCodeAt(state.pos); + if ((code >= charCodes.digit0 && code <= charCodes.digit9) || code === charCodes.underscore) { + state.pos++; + } else { + break; + } + } +} + +function readRadixNumber() { + state.pos += 2; // 0x + + // Walk to the end of the number, allowing hex digits. + while (true) { + const code = input.charCodeAt(state.pos); + if ( + (code >= charCodes.digit0 && code <= charCodes.digit9) || + (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) || + (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) || + code === charCodes.underscore + ) { + state.pos++; + } else { + break; + } + } + + const nextChar = input.charCodeAt(state.pos); + if (nextChar === charCodes.lowercaseN) { + ++state.pos; + finishToken(tt.bigint); + } else { + finishToken(tt.num); + } +} + +// Read an integer, octal integer, or floating-point number. +function readNumber(startsWithDot) { + let isBigInt = false; + let isDecimal = false; + + if (!startsWithDot) { + readInt(); + } + + let nextChar = input.charCodeAt(state.pos); + if (nextChar === charCodes.dot) { + ++state.pos; + readInt(); + nextChar = input.charCodeAt(state.pos); + } + + if (nextChar === charCodes.uppercaseE || nextChar === charCodes.lowercaseE) { + nextChar = input.charCodeAt(++state.pos); + if (nextChar === charCodes.plusSign || nextChar === charCodes.dash) { + ++state.pos; + } + readInt(); + nextChar = input.charCodeAt(state.pos); + } + + if (nextChar === charCodes.lowercaseN) { + ++state.pos; + isBigInt = true; + } else if (nextChar === charCodes.lowercaseM) { + ++state.pos; + isDecimal = true; + } + + if (isBigInt) { + finishToken(tt.bigint); + return; + } + + if (isDecimal) { + finishToken(tt.decimal); + return; + } + + finishToken(tt.num); +} + +function readString(quote) { + state.pos++; + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated string constant"); + return; + } + const ch = input.charCodeAt(state.pos); + if (ch === charCodes.backslash) { + state.pos++; + } else if (ch === quote) { + break; + } + state.pos++; + } + state.pos++; + finishToken(tt.string); +} + +// Reads template string tokens. +function readTmplToken() { + for (;;) { + if (state.pos >= input.length) { + unexpected("Unterminated template"); + return; + } + const ch = input.charCodeAt(state.pos); + if ( + ch === charCodes.graveAccent || + (ch === charCodes.dollarSign && input.charCodeAt(state.pos + 1) === charCodes.leftCurlyBrace) + ) { + if (state.pos === state.start && match(tt.template)) { + if (ch === charCodes.dollarSign) { + state.pos += 2; + finishToken(tt.dollarBraceL); + return; + } else { + ++state.pos; + finishToken(tt.backQuote); + return; + } + } + finishToken(tt.template); + return; + } + if (ch === charCodes.backslash) { + state.pos++; + } + state.pos++; + } +} + +// Skip to the end of the current word. Note that this is the same as the snippet at the end of +// readWord, but calling skipWord from readWord seems to slightly hurt performance from some rough +// measurements. +export function skipWord() { + while (state.pos < input.length) { + const ch = input.charCodeAt(state.pos); + if (IS_IDENTIFIER_CHAR[ch]) { + state.pos++; + } else if (ch === charCodes.backslash) { + // \u + state.pos += 2; + if (input.charCodeAt(state.pos) === charCodes.leftCurlyBrace) { + while ( + state.pos < input.length && + input.charCodeAt(state.pos) !== charCodes.rightCurlyBrace + ) { + state.pos++; + } + state.pos++; + } + } else { + break; + } + } +} diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/keywords.js b/node_modules/sucrase/dist/esm/parser/tokenizer/keywords.js new file mode 100644 index 0000000..0dcf1b0 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/tokenizer/keywords.js @@ -0,0 +1,43 @@ +export var ContextualKeyword; (function (ContextualKeyword) { + const NONE = 0; ContextualKeyword[ContextualKeyword["NONE"] = NONE] = "NONE"; + const _abstract = NONE + 1; ContextualKeyword[ContextualKeyword["_abstract"] = _abstract] = "_abstract"; + const _accessor = _abstract + 1; ContextualKeyword[ContextualKeyword["_accessor"] = _accessor] = "_accessor"; + const _as = _accessor + 1; ContextualKeyword[ContextualKeyword["_as"] = _as] = "_as"; + const _assert = _as + 1; ContextualKeyword[ContextualKeyword["_assert"] = _assert] = "_assert"; + const _asserts = _assert + 1; ContextualKeyword[ContextualKeyword["_asserts"] = _asserts] = "_asserts"; + const _async = _asserts + 1; ContextualKeyword[ContextualKeyword["_async"] = _async] = "_async"; + const _await = _async + 1; ContextualKeyword[ContextualKeyword["_await"] = _await] = "_await"; + const _checks = _await + 1; ContextualKeyword[ContextualKeyword["_checks"] = _checks] = "_checks"; + const _constructor = _checks + 1; ContextualKeyword[ContextualKeyword["_constructor"] = _constructor] = "_constructor"; + const _declare = _constructor + 1; ContextualKeyword[ContextualKeyword["_declare"] = _declare] = "_declare"; + const _enum = _declare + 1; ContextualKeyword[ContextualKeyword["_enum"] = _enum] = "_enum"; + const _exports = _enum + 1; ContextualKeyword[ContextualKeyword["_exports"] = _exports] = "_exports"; + const _from = _exports + 1; ContextualKeyword[ContextualKeyword["_from"] = _from] = "_from"; + const _get = _from + 1; ContextualKeyword[ContextualKeyword["_get"] = _get] = "_get"; + const _global = _get + 1; ContextualKeyword[ContextualKeyword["_global"] = _global] = "_global"; + const _implements = _global + 1; ContextualKeyword[ContextualKeyword["_implements"] = _implements] = "_implements"; + const _infer = _implements + 1; ContextualKeyword[ContextualKeyword["_infer"] = _infer] = "_infer"; + const _interface = _infer + 1; ContextualKeyword[ContextualKeyword["_interface"] = _interface] = "_interface"; + const _is = _interface + 1; ContextualKeyword[ContextualKeyword["_is"] = _is] = "_is"; + const _keyof = _is + 1; ContextualKeyword[ContextualKeyword["_keyof"] = _keyof] = "_keyof"; + const _mixins = _keyof + 1; ContextualKeyword[ContextualKeyword["_mixins"] = _mixins] = "_mixins"; + const _module = _mixins + 1; ContextualKeyword[ContextualKeyword["_module"] = _module] = "_module"; + const _namespace = _module + 1; ContextualKeyword[ContextualKeyword["_namespace"] = _namespace] = "_namespace"; + const _of = _namespace + 1; ContextualKeyword[ContextualKeyword["_of"] = _of] = "_of"; + const _opaque = _of + 1; ContextualKeyword[ContextualKeyword["_opaque"] = _opaque] = "_opaque"; + const _out = _opaque + 1; ContextualKeyword[ContextualKeyword["_out"] = _out] = "_out"; + const _override = _out + 1; ContextualKeyword[ContextualKeyword["_override"] = _override] = "_override"; + const _private = _override + 1; ContextualKeyword[ContextualKeyword["_private"] = _private] = "_private"; + const _protected = _private + 1; ContextualKeyword[ContextualKeyword["_protected"] = _protected] = "_protected"; + const _proto = _protected + 1; ContextualKeyword[ContextualKeyword["_proto"] = _proto] = "_proto"; + const _public = _proto + 1; ContextualKeyword[ContextualKeyword["_public"] = _public] = "_public"; + const _readonly = _public + 1; ContextualKeyword[ContextualKeyword["_readonly"] = _readonly] = "_readonly"; + const _require = _readonly + 1; ContextualKeyword[ContextualKeyword["_require"] = _require] = "_require"; + const _satisfies = _require + 1; ContextualKeyword[ContextualKeyword["_satisfies"] = _satisfies] = "_satisfies"; + const _set = _satisfies + 1; ContextualKeyword[ContextualKeyword["_set"] = _set] = "_set"; + const _static = _set + 1; ContextualKeyword[ContextualKeyword["_static"] = _static] = "_static"; + const _symbol = _static + 1; ContextualKeyword[ContextualKeyword["_symbol"] = _symbol] = "_symbol"; + const _type = _symbol + 1; ContextualKeyword[ContextualKeyword["_type"] = _type] = "_type"; + const _unique = _type + 1; ContextualKeyword[ContextualKeyword["_unique"] = _unique] = "_unique"; + const _using = _unique + 1; ContextualKeyword[ContextualKeyword["_using"] = _using] = "_using"; +})(ContextualKeyword || (ContextualKeyword = {})); diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/readWord.js b/node_modules/sucrase/dist/esm/parser/tokenizer/readWord.js new file mode 100644 index 0000000..cf3df89 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/tokenizer/readWord.js @@ -0,0 +1,64 @@ +import {input, state} from "../traverser/base"; +import {charCodes} from "../util/charcodes"; +import {IS_IDENTIFIER_CHAR} from "../util/identifier"; +import {finishToken} from "./index"; +import {READ_WORD_TREE} from "./readWordTree"; +import {TokenType as tt} from "./types"; + +/** + * Read an identifier, producing either a name token or matching on one of the existing keywords. + * For performance, we pre-generate big decision tree that we traverse. Each node represents a + * prefix and has 27 values, where the first value is the token or contextual token, if any (-1 if + * not), and the other 26 values are the transitions to other nodes, or -1 to stop. + */ +export default function readWord() { + let treePos = 0; + let code = 0; + let pos = state.pos; + while (pos < input.length) { + code = input.charCodeAt(pos); + if (code < charCodes.lowercaseA || code > charCodes.lowercaseZ) { + break; + } + const next = READ_WORD_TREE[treePos + (code - charCodes.lowercaseA) + 1]; + if (next === -1) { + break; + } else { + treePos = next; + pos++; + } + } + + const keywordValue = READ_WORD_TREE[treePos]; + if (keywordValue > -1 && !IS_IDENTIFIER_CHAR[code]) { + state.pos = pos; + if (keywordValue & 1) { + finishToken(keywordValue >>> 1); + } else { + finishToken(tt.name, keywordValue >>> 1); + } + return; + } + + while (pos < input.length) { + const ch = input.charCodeAt(pos); + if (IS_IDENTIFIER_CHAR[ch]) { + pos++; + } else if (ch === charCodes.backslash) { + // \u + pos += 2; + if (input.charCodeAt(pos) === charCodes.leftCurlyBrace) { + while (pos < input.length && input.charCodeAt(pos) !== charCodes.rightCurlyBrace) { + pos++; + } + pos++; + } + } else if (ch === charCodes.atSign && input.charCodeAt(pos + 1) === charCodes.atSign) { + pos += 2; + } else { + break; + } + } + state.pos = pos; + finishToken(tt.name); +} diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/readWordTree.js b/node_modules/sucrase/dist/esm/parser/tokenizer/readWordTree.js new file mode 100644 index 0000000..ffb8cac --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/tokenizer/readWordTree.js @@ -0,0 +1,671 @@ +// Generated file, do not edit! Run "yarn generate" to re-generate this file. +import {ContextualKeyword} from "./keywords"; +import {TokenType as tt} from "./types"; + +// prettier-ignore +export const READ_WORD_TREE = new Int32Array([ + // "" + -1, 27, 783, 918, 1755, 2376, 2862, 3483, -1, 3699, -1, 4617, 4752, 4833, 5130, 5508, 5940, -1, 6480, 6939, 7749, 8181, 8451, 8613, -1, 8829, -1, + // "a" + -1, -1, 54, 243, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 432, -1, -1, -1, 675, -1, -1, -1, + // "ab" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 81, -1, -1, -1, -1, -1, -1, -1, + // "abs" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 108, -1, -1, -1, -1, -1, -1, + // "abst" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 135, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstr" + -1, 162, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstra" + -1, -1, -1, 189, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstrac" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 216, -1, -1, -1, -1, -1, -1, + // "abstract" + ContextualKeyword._abstract << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ac" + -1, -1, -1, 270, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "acc" + -1, -1, -1, -1, -1, 297, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "acce" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 324, -1, -1, -1, -1, -1, -1, -1, + // "acces" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 351, -1, -1, -1, -1, -1, -1, -1, + // "access" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 378, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "accesso" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 405, -1, -1, -1, -1, -1, -1, -1, -1, + // "accessor" + ContextualKeyword._accessor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "as" + ContextualKeyword._as << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 459, -1, -1, -1, -1, -1, 594, -1, + // "ass" + -1, -1, -1, -1, -1, 486, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asse" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 513, -1, -1, -1, -1, -1, -1, -1, -1, + // "asser" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 540, -1, -1, -1, -1, -1, -1, + // "assert" + ContextualKeyword._assert << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 567, -1, -1, -1, -1, -1, -1, -1, + // "asserts" + ContextualKeyword._asserts << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asy" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 621, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asyn" + -1, -1, -1, 648, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "async" + ContextualKeyword._async << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "aw" + -1, 702, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 729, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awai" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 756, -1, -1, -1, -1, -1, -1, + // "await" + ContextualKeyword._await << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "b" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 810, -1, -1, -1, -1, -1, -1, -1, -1, + // "br" + -1, -1, -1, -1, -1, 837, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "bre" + -1, 864, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "brea" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 891, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "break" + (tt._break << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "c" + -1, 945, -1, -1, -1, -1, -1, -1, 1107, -1, -1, -1, 1242, -1, -1, 1350, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ca" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 972, 1026, -1, -1, -1, -1, -1, -1, + // "cas" + -1, -1, -1, -1, -1, 999, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "case" + (tt._case << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cat" + -1, -1, -1, 1053, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catc" + -1, -1, -1, -1, -1, -1, -1, -1, 1080, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catch" + (tt._catch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ch" + -1, -1, -1, -1, -1, 1134, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "che" + -1, -1, -1, 1161, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "chec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1188, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "check" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1215, -1, -1, -1, -1, -1, -1, -1, + // "checks" + ContextualKeyword._checks << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cl" + -1, 1269, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1296, -1, -1, -1, -1, -1, -1, -1, + // "clas" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1323, -1, -1, -1, -1, -1, -1, -1, + // "class" + (tt._class << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "co" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1377, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "con" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1404, 1620, -1, -1, -1, -1, -1, -1, + // "cons" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1431, -1, -1, -1, -1, -1, -1, + // "const" + (tt._const << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1458, -1, -1, -1, -1, -1, -1, -1, -1, + // "constr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1485, -1, -1, -1, -1, -1, + // "constru" + -1, -1, -1, 1512, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "construc" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1539, -1, -1, -1, -1, -1, -1, + // "construct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1566, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1593, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructor" + ContextualKeyword._constructor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cont" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 1647, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "conti" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1674, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "contin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1701, -1, -1, -1, -1, -1, + // "continu" + -1, -1, -1, -1, -1, 1728, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "continue" + (tt._continue << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "d" + -1, -1, -1, -1, -1, 1782, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2349, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "de" + -1, -1, 1809, 1971, -1, -1, 2106, -1, -1, -1, -1, -1, 2241, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "deb" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1836, -1, -1, -1, -1, -1, + // "debu" + -1, -1, -1, -1, -1, -1, -1, 1863, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debug" + -1, -1, -1, -1, -1, -1, -1, 1890, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugg" + -1, -1, -1, -1, -1, 1917, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1944, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugger" + (tt._debugger << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1998, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decl" + -1, 2025, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2052, -1, -1, -1, -1, -1, -1, -1, -1, + // "declar" + -1, -1, -1, -1, -1, 2079, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "declare" + ContextualKeyword._declare << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "def" + -1, 2133, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2160, -1, -1, -1, -1, -1, + // "defau" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2187, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defaul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2214, -1, -1, -1, -1, -1, -1, + // "default" + (tt._default << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "del" + -1, -1, -1, -1, -1, 2268, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dele" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2295, -1, -1, -1, -1, -1, -1, + // "delet" + -1, -1, -1, -1, -1, 2322, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "delete" + (tt._delete << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "do" + (tt._do << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "e" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2403, -1, 2484, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2565, -1, -1, + // "el" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2430, -1, -1, -1, -1, -1, -1, -1, + // "els" + -1, -1, -1, -1, -1, 2457, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "else" + (tt._else << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "en" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2511, -1, -1, -1, -1, -1, + // "enu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2538, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "enum" + ContextualKeyword._enum << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ex" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2592, -1, -1, -1, 2727, -1, -1, -1, -1, -1, -1, + // "exp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2619, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "expo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2646, -1, -1, -1, -1, -1, -1, -1, -1, + // "expor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2673, -1, -1, -1, -1, -1, -1, + // "export" + (tt._export << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2700, -1, -1, -1, -1, -1, -1, -1, + // "exports" + ContextualKeyword._exports << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ext" + -1, -1, -1, -1, -1, 2754, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2781, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exten" + -1, -1, -1, -1, 2808, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "extend" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2835, -1, -1, -1, -1, -1, -1, -1, + // "extends" + (tt._extends << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "f" + -1, 2889, -1, -1, -1, -1, -1, -1, -1, 2997, -1, -1, -1, -1, -1, 3159, -1, -1, 3213, -1, -1, 3294, -1, -1, -1, -1, -1, + // "fa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2916, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fal" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2943, -1, -1, -1, -1, -1, -1, -1, + // "fals" + -1, -1, -1, -1, -1, 2970, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "false" + (tt._false << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3024, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fin" + -1, 3051, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fina" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3078, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "final" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3105, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "finall" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3132, -1, + // "finally" + (tt._finally << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3186, -1, -1, -1, -1, -1, -1, -1, -1, + // "for" + (tt._for << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3240, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3267, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "from" + ContextualKeyword._from << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3321, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fun" + -1, -1, -1, 3348, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "func" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3375, -1, -1, -1, -1, -1, -1, + // "funct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 3402, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3429, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functio" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3456, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "function" + (tt._function << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "g" + -1, -1, -1, -1, -1, 3510, -1, -1, -1, -1, -1, -1, 3564, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3537, -1, -1, -1, -1, -1, -1, + // "get" + ContextualKeyword._get << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "gl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3591, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glo" + -1, -1, 3618, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glob" + -1, 3645, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "globa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3672, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "global" + ContextualKeyword._global << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "i" + -1, -1, -1, -1, -1, -1, 3726, -1, -1, -1, -1, -1, -1, 3753, 4077, -1, -1, -1, -1, 4590, -1, -1, -1, -1, -1, -1, -1, + // "if" + (tt._if << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "im" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3780, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3807, -1, -1, 3996, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impl" + -1, -1, -1, -1, -1, 3834, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imple" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3861, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implem" + -1, -1, -1, -1, -1, 3888, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impleme" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3915, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implemen" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3942, -1, -1, -1, -1, -1, -1, + // "implement" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3969, -1, -1, -1, -1, -1, -1, -1, + // "implements" + ContextualKeyword._implements << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4023, -1, -1, -1, -1, -1, -1, -1, -1, + // "impor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4050, -1, -1, -1, -1, -1, -1, + // "import" + (tt._import << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "in" + (tt._in << 1) + 1, -1, -1, -1, -1, -1, 4104, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4185, 4401, -1, -1, -1, -1, -1, -1, + // "inf" + -1, -1, -1, -1, -1, 4131, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "infe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4158, -1, -1, -1, -1, -1, -1, -1, -1, + // "infer" + ContextualKeyword._infer << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ins" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4212, -1, -1, -1, -1, -1, -1, + // "inst" + -1, 4239, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "insta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4266, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instan" + -1, -1, -1, 4293, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanc" + -1, -1, -1, -1, -1, 4320, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instance" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4347, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceo" + -1, -1, -1, -1, -1, -1, 4374, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceof" + (tt._instanceof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "int" + -1, -1, -1, -1, -1, 4428, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "inte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4455, -1, -1, -1, -1, -1, -1, -1, -1, + // "inter" + -1, -1, -1, -1, -1, -1, 4482, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interf" + -1, 4509, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfa" + -1, -1, -1, 4536, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfac" + -1, -1, -1, -1, -1, 4563, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interface" + ContextualKeyword._interface << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "is" + ContextualKeyword._is << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "k" + -1, -1, -1, -1, -1, 4644, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ke" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4671, -1, + // "key" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4698, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyo" + -1, -1, -1, -1, -1, -1, 4725, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyof" + ContextualKeyword._keyof << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "l" + -1, -1, -1, -1, -1, 4779, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "le" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4806, -1, -1, -1, -1, -1, -1, + // "let" + (tt._let << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "m" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4860, -1, -1, -1, -1, -1, 4995, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4887, -1, -1, + // "mix" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4914, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4941, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4968, -1, -1, -1, -1, -1, -1, -1, + // "mixins" + ContextualKeyword._mixins << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mo" + -1, -1, -1, -1, 5022, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mod" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5049, -1, -1, -1, -1, -1, + // "modu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5076, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "modul" + -1, -1, -1, -1, -1, 5103, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "module" + ContextualKeyword._module << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "n" + -1, 5157, -1, -1, -1, 5373, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5427, -1, -1, -1, -1, -1, + // "na" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5184, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nam" + -1, -1, -1, -1, -1, 5211, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "name" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5238, -1, -1, -1, -1, -1, -1, -1, + // "names" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5265, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namesp" + -1, 5292, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespa" + -1, -1, -1, 5319, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespac" + -1, -1, -1, -1, -1, 5346, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespace" + ContextualKeyword._namespace << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ne" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5400, -1, -1, -1, + // "new" + (tt._new << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5454, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5481, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "null" + (tt._null << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "o" + -1, -1, -1, -1, -1, -1, 5535, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5562, -1, -1, -1, -1, 5697, 5751, -1, -1, -1, -1, + // "of" + ContextualKeyword._of << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "op" + -1, 5589, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5616, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5643, -1, -1, -1, -1, -1, + // "opaqu" + -1, -1, -1, -1, -1, 5670, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaque" + ContextualKeyword._opaque << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ou" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5724, -1, -1, -1, -1, -1, -1, + // "out" + ContextualKeyword._out << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ov" + -1, -1, -1, -1, -1, 5778, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ove" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5805, -1, -1, -1, -1, -1, -1, -1, -1, + // "over" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5832, -1, -1, -1, -1, -1, -1, -1, -1, + // "overr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5859, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "overri" + -1, -1, -1, -1, 5886, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "overrid" + -1, -1, -1, -1, -1, 5913, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "override" + ContextualKeyword._override << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "p" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5967, -1, -1, 6345, -1, -1, -1, -1, -1, + // "pr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5994, -1, -1, -1, -1, -1, 6129, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pri" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6021, -1, -1, -1, -1, + // "priv" + -1, 6048, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "priva" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6075, -1, -1, -1, -1, -1, -1, + // "privat" + -1, -1, -1, -1, -1, 6102, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "private" + ContextualKeyword._private << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6156, -1, -1, -1, -1, -1, -1, + // "prot" + -1, -1, -1, -1, -1, 6183, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6318, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "prote" + -1, -1, -1, 6210, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6237, -1, -1, -1, -1, -1, -1, + // "protect" + -1, -1, -1, -1, -1, 6264, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protecte" + -1, -1, -1, -1, 6291, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protected" + ContextualKeyword._protected << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "proto" + ContextualKeyword._proto << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pu" + -1, -1, 6372, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pub" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6399, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6426, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publi" + -1, -1, -1, 6453, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "public" + ContextualKeyword._public << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "r" + -1, -1, -1, -1, -1, 6507, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "re" + -1, 6534, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6696, -1, -1, 6831, -1, -1, -1, -1, -1, -1, + // "rea" + -1, -1, -1, -1, 6561, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "read" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6588, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "reado" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6615, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readon" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6642, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readonl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6669, -1, + // "readonly" + ContextualKeyword._readonly << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "req" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6723, -1, -1, -1, -1, -1, + // "requ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6750, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "requi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6777, -1, -1, -1, -1, -1, -1, -1, -1, + // "requir" + -1, -1, -1, -1, -1, 6804, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "require" + ContextualKeyword._require << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ret" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6858, -1, -1, -1, -1, -1, + // "retu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6885, -1, -1, -1, -1, -1, -1, -1, -1, + // "retur" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6912, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "return" + (tt._return << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "s" + -1, 6966, -1, -1, -1, 7182, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7236, 7371, -1, 7479, -1, 7614, -1, + // "sa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6993, -1, -1, -1, -1, -1, -1, + // "sat" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7020, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sati" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7047, -1, -1, -1, -1, -1, -1, -1, + // "satis" + -1, -1, -1, -1, -1, -1, 7074, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "satisf" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7101, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "satisfi" + -1, -1, -1, -1, -1, 7128, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "satisfie" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7155, -1, -1, -1, -1, -1, -1, -1, + // "satisfies" + ContextualKeyword._satisfies << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "se" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7209, -1, -1, -1, -1, -1, -1, + // "set" + ContextualKeyword._set << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "st" + -1, 7263, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7290, -1, -1, -1, -1, -1, -1, + // "stat" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7317, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "stati" + -1, -1, -1, 7344, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "static" + ContextualKeyword._static << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "su" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7398, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sup" + -1, -1, -1, -1, -1, 7425, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "supe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7452, -1, -1, -1, -1, -1, -1, -1, -1, + // "super" + (tt._super << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sw" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7506, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "swi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7533, -1, -1, -1, -1, -1, -1, + // "swit" + -1, -1, -1, 7560, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switc" + -1, -1, -1, -1, -1, -1, -1, -1, 7587, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switch" + (tt._switch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sy" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7641, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sym" + -1, -1, 7668, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "symb" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7695, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "symbo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7722, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "symbol" + ContextualKeyword._symbol << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "t" + -1, -1, -1, -1, -1, -1, -1, -1, 7776, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7938, -1, -1, -1, -1, -1, -1, 8046, -1, + // "th" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7803, -1, -1, -1, -1, -1, -1, -1, -1, 7857, -1, -1, -1, -1, -1, -1, -1, -1, + // "thi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7830, -1, -1, -1, -1, -1, -1, -1, + // "this" + (tt._this << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7884, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7911, -1, -1, -1, + // "throw" + (tt._throw << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "tr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7965, -1, -1, -1, 8019, -1, + // "tru" + -1, -1, -1, -1, -1, 7992, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "true" + (tt._true << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "try" + (tt._try << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ty" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8073, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typ" + -1, -1, -1, -1, -1, 8100, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "type" + ContextualKeyword._type << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8127, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeo" + -1, -1, -1, -1, -1, -1, 8154, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeof" + (tt._typeof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "u" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8208, -1, -1, -1, -1, 8343, -1, -1, -1, -1, -1, -1, -1, + // "un" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8235, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uni" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8262, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uniq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8289, -1, -1, -1, -1, -1, + // "uniqu" + -1, -1, -1, -1, -1, 8316, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "unique" + ContextualKeyword._unique << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "us" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8370, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "usi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8397, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "usin" + -1, -1, -1, -1, -1, -1, -1, 8424, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "using" + ContextualKeyword._using << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "v" + -1, 8478, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8532, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "va" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8505, -1, -1, -1, -1, -1, -1, -1, -1, + // "var" + (tt._var << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "vo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8559, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "voi" + -1, -1, -1, -1, 8586, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "void" + (tt._void << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "w" + -1, -1, -1, -1, -1, -1, -1, -1, 8640, 8748, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wh" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8667, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8694, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whil" + -1, -1, -1, -1, -1, 8721, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "while" + (tt._while << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8775, -1, -1, -1, -1, -1, -1, + // "wit" + -1, -1, -1, -1, -1, -1, -1, -1, 8802, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "with" + (tt._with << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "y" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8856, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yi" + -1, -1, -1, -1, -1, 8883, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yie" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8910, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yiel" + -1, -1, -1, -1, 8937, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yield" + (tt._yield << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]); diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/state.js b/node_modules/sucrase/dist/esm/parser/tokenizer/state.js new file mode 100644 index 0000000..940cde0 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/tokenizer/state.js @@ -0,0 +1,106 @@ + +import {ContextualKeyword} from "./keywords"; +import { TokenType as tt} from "./types"; + +export class Scope { + + + + + constructor(startTokenIndex, endTokenIndex, isFunctionScope) { + this.startTokenIndex = startTokenIndex; + this.endTokenIndex = endTokenIndex; + this.isFunctionScope = isFunctionScope; + } +} + +export class StateSnapshot { + constructor( + potentialArrowAt, + noAnonFunctionType, + inDisallowConditionalTypesContext, + tokensLength, + scopesLength, + pos, + type, + contextualKeyword, + start, + end, + isType, + scopeDepth, + error, + ) {;this.potentialArrowAt = potentialArrowAt;this.noAnonFunctionType = noAnonFunctionType;this.inDisallowConditionalTypesContext = inDisallowConditionalTypesContext;this.tokensLength = tokensLength;this.scopesLength = scopesLength;this.pos = pos;this.type = type;this.contextualKeyword = contextualKeyword;this.start = start;this.end = end;this.isType = isType;this.scopeDepth = scopeDepth;this.error = error;} +} + +export default class State {constructor() { State.prototype.__init.call(this);State.prototype.__init2.call(this);State.prototype.__init3.call(this);State.prototype.__init4.call(this);State.prototype.__init5.call(this);State.prototype.__init6.call(this);State.prototype.__init7.call(this);State.prototype.__init8.call(this);State.prototype.__init9.call(this);State.prototype.__init10.call(this);State.prototype.__init11.call(this);State.prototype.__init12.call(this);State.prototype.__init13.call(this); } + // Used to signify the start of a potential arrow function + __init() {this.potentialArrowAt = -1} + + // Used by Flow to handle an edge case involving function type parsing. + __init2() {this.noAnonFunctionType = false} + + // Used by TypeScript to handle ambiguities when parsing conditional types. + __init3() {this.inDisallowConditionalTypesContext = false} + + // Token store. + __init4() {this.tokens = []} + + // Array of all observed scopes, ordered by their ending position. + __init5() {this.scopes = []} + + // The current position of the tokenizer in the input. + __init6() {this.pos = 0} + + // Information about the current token. + __init7() {this.type = tt.eof} + __init8() {this.contextualKeyword = ContextualKeyword.NONE} + __init9() {this.start = 0} + __init10() {this.end = 0} + + __init11() {this.isType = false} + __init12() {this.scopeDepth = 0} + + /** + * If the parser is in an error state, then the token is always tt.eof and all functions can + * keep executing but should be written so they don't get into an infinite loop in this situation. + * + * This approach, combined with the ability to snapshot and restore state, allows us to implement + * backtracking without exceptions and without needing to explicitly propagate error states + * everywhere. + */ + __init13() {this.error = null} + + snapshot() { + return new StateSnapshot( + this.potentialArrowAt, + this.noAnonFunctionType, + this.inDisallowConditionalTypesContext, + this.tokens.length, + this.scopes.length, + this.pos, + this.type, + this.contextualKeyword, + this.start, + this.end, + this.isType, + this.scopeDepth, + this.error, + ); + } + + restoreFromSnapshot(snapshot) { + this.potentialArrowAt = snapshot.potentialArrowAt; + this.noAnonFunctionType = snapshot.noAnonFunctionType; + this.inDisallowConditionalTypesContext = snapshot.inDisallowConditionalTypesContext; + this.tokens.length = snapshot.tokensLength; + this.scopes.length = snapshot.scopesLength; + this.pos = snapshot.pos; + this.type = snapshot.type; + this.contextualKeyword = snapshot.contextualKeyword; + this.start = snapshot.start; + this.end = snapshot.end; + this.isType = snapshot.isType; + this.scopeDepth = snapshot.scopeDepth; + this.error = snapshot.error; + } +} diff --git a/node_modules/sucrase/dist/esm/parser/tokenizer/types.js b/node_modules/sucrase/dist/esm/parser/tokenizer/types.js new file mode 100644 index 0000000..9746ad6 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/tokenizer/types.js @@ -0,0 +1,361 @@ +// Generated file, do not edit! Run "yarn generate" to re-generate this file. +/* istanbul ignore file */ +/** + * Enum of all token types, with bit fields to signify meaningful properties. + */ +export var TokenType; (function (TokenType) { + // Precedence 0 means not an operator; otherwise it is a positive number up to 12. + const PRECEDENCE_MASK = 0xf; TokenType[TokenType["PRECEDENCE_MASK"] = PRECEDENCE_MASK] = "PRECEDENCE_MASK"; + const IS_KEYWORD = 1 << 4; TokenType[TokenType["IS_KEYWORD"] = IS_KEYWORD] = "IS_KEYWORD"; + const IS_ASSIGN = 1 << 5; TokenType[TokenType["IS_ASSIGN"] = IS_ASSIGN] = "IS_ASSIGN"; + const IS_RIGHT_ASSOCIATIVE = 1 << 6; TokenType[TokenType["IS_RIGHT_ASSOCIATIVE"] = IS_RIGHT_ASSOCIATIVE] = "IS_RIGHT_ASSOCIATIVE"; + const IS_PREFIX = 1 << 7; TokenType[TokenType["IS_PREFIX"] = IS_PREFIX] = "IS_PREFIX"; + const IS_POSTFIX = 1 << 8; TokenType[TokenType["IS_POSTFIX"] = IS_POSTFIX] = "IS_POSTFIX"; + const IS_EXPRESSION_START = 1 << 9; TokenType[TokenType["IS_EXPRESSION_START"] = IS_EXPRESSION_START] = "IS_EXPRESSION_START"; + + const num = 512; TokenType[TokenType["num"] = num] = "num"; // num startsExpr + const bigint = 1536; TokenType[TokenType["bigint"] = bigint] = "bigint"; // bigint startsExpr + const decimal = 2560; TokenType[TokenType["decimal"] = decimal] = "decimal"; // decimal startsExpr + const regexp = 3584; TokenType[TokenType["regexp"] = regexp] = "regexp"; // regexp startsExpr + const string = 4608; TokenType[TokenType["string"] = string] = "string"; // string startsExpr + const name = 5632; TokenType[TokenType["name"] = name] = "name"; // name startsExpr + const eof = 6144; TokenType[TokenType["eof"] = eof] = "eof"; // eof + const bracketL = 7680; TokenType[TokenType["bracketL"] = bracketL] = "bracketL"; // [ startsExpr + const bracketR = 8192; TokenType[TokenType["bracketR"] = bracketR] = "bracketR"; // ] + const braceL = 9728; TokenType[TokenType["braceL"] = braceL] = "braceL"; // { startsExpr + const braceBarL = 10752; TokenType[TokenType["braceBarL"] = braceBarL] = "braceBarL"; // {| startsExpr + const braceR = 11264; TokenType[TokenType["braceR"] = braceR] = "braceR"; // } + const braceBarR = 12288; TokenType[TokenType["braceBarR"] = braceBarR] = "braceBarR"; // |} + const parenL = 13824; TokenType[TokenType["parenL"] = parenL] = "parenL"; // ( startsExpr + const parenR = 14336; TokenType[TokenType["parenR"] = parenR] = "parenR"; // ) + const comma = 15360; TokenType[TokenType["comma"] = comma] = "comma"; // , + const semi = 16384; TokenType[TokenType["semi"] = semi] = "semi"; // ; + const colon = 17408; TokenType[TokenType["colon"] = colon] = "colon"; // : + const doubleColon = 18432; TokenType[TokenType["doubleColon"] = doubleColon] = "doubleColon"; // :: + const dot = 19456; TokenType[TokenType["dot"] = dot] = "dot"; // . + const question = 20480; TokenType[TokenType["question"] = question] = "question"; // ? + const questionDot = 21504; TokenType[TokenType["questionDot"] = questionDot] = "questionDot"; // ?. + const arrow = 22528; TokenType[TokenType["arrow"] = arrow] = "arrow"; // => + const template = 23552; TokenType[TokenType["template"] = template] = "template"; // template + const ellipsis = 24576; TokenType[TokenType["ellipsis"] = ellipsis] = "ellipsis"; // ... + const backQuote = 25600; TokenType[TokenType["backQuote"] = backQuote] = "backQuote"; // ` + const dollarBraceL = 27136; TokenType[TokenType["dollarBraceL"] = dollarBraceL] = "dollarBraceL"; // ${ startsExpr + const at = 27648; TokenType[TokenType["at"] = at] = "at"; // @ + const hash = 29184; TokenType[TokenType["hash"] = hash] = "hash"; // # startsExpr + const eq = 29728; TokenType[TokenType["eq"] = eq] = "eq"; // = isAssign + const assign = 30752; TokenType[TokenType["assign"] = assign] = "assign"; // _= isAssign + const preIncDec = 32640; TokenType[TokenType["preIncDec"] = preIncDec] = "preIncDec"; // ++/-- prefix postfix startsExpr + const postIncDec = 33664; TokenType[TokenType["postIncDec"] = postIncDec] = "postIncDec"; // ++/-- prefix postfix startsExpr + const bang = 34432; TokenType[TokenType["bang"] = bang] = "bang"; // ! prefix startsExpr + const tilde = 35456; TokenType[TokenType["tilde"] = tilde] = "tilde"; // ~ prefix startsExpr + const pipeline = 35841; TokenType[TokenType["pipeline"] = pipeline] = "pipeline"; // |> prec:1 + const nullishCoalescing = 36866; TokenType[TokenType["nullishCoalescing"] = nullishCoalescing] = "nullishCoalescing"; // ?? prec:2 + const logicalOR = 37890; TokenType[TokenType["logicalOR"] = logicalOR] = "logicalOR"; // || prec:2 + const logicalAND = 38915; TokenType[TokenType["logicalAND"] = logicalAND] = "logicalAND"; // && prec:3 + const bitwiseOR = 39940; TokenType[TokenType["bitwiseOR"] = bitwiseOR] = "bitwiseOR"; // | prec:4 + const bitwiseXOR = 40965; TokenType[TokenType["bitwiseXOR"] = bitwiseXOR] = "bitwiseXOR"; // ^ prec:5 + const bitwiseAND = 41990; TokenType[TokenType["bitwiseAND"] = bitwiseAND] = "bitwiseAND"; // & prec:6 + const equality = 43015; TokenType[TokenType["equality"] = equality] = "equality"; // ==/!= prec:7 + const lessThan = 44040; TokenType[TokenType["lessThan"] = lessThan] = "lessThan"; // < prec:8 + const greaterThan = 45064; TokenType[TokenType["greaterThan"] = greaterThan] = "greaterThan"; // > prec:8 + const relationalOrEqual = 46088; TokenType[TokenType["relationalOrEqual"] = relationalOrEqual] = "relationalOrEqual"; // <=/>= prec:8 + const bitShiftL = 47113; TokenType[TokenType["bitShiftL"] = bitShiftL] = "bitShiftL"; // << prec:9 + const bitShiftR = 48137; TokenType[TokenType["bitShiftR"] = bitShiftR] = "bitShiftR"; // >>/>>> prec:9 + const plus = 49802; TokenType[TokenType["plus"] = plus] = "plus"; // + prec:10 prefix startsExpr + const minus = 50826; TokenType[TokenType["minus"] = minus] = "minus"; // - prec:10 prefix startsExpr + const modulo = 51723; TokenType[TokenType["modulo"] = modulo] = "modulo"; // % prec:11 startsExpr + const star = 52235; TokenType[TokenType["star"] = star] = "star"; // * prec:11 + const slash = 53259; TokenType[TokenType["slash"] = slash] = "slash"; // / prec:11 + const exponent = 54348; TokenType[TokenType["exponent"] = exponent] = "exponent"; // ** prec:12 rightAssociative + const jsxName = 55296; TokenType[TokenType["jsxName"] = jsxName] = "jsxName"; // jsxName + const jsxText = 56320; TokenType[TokenType["jsxText"] = jsxText] = "jsxText"; // jsxText + const jsxEmptyText = 57344; TokenType[TokenType["jsxEmptyText"] = jsxEmptyText] = "jsxEmptyText"; // jsxEmptyText + const jsxTagStart = 58880; TokenType[TokenType["jsxTagStart"] = jsxTagStart] = "jsxTagStart"; // jsxTagStart startsExpr + const jsxTagEnd = 59392; TokenType[TokenType["jsxTagEnd"] = jsxTagEnd] = "jsxTagEnd"; // jsxTagEnd + const typeParameterStart = 60928; TokenType[TokenType["typeParameterStart"] = typeParameterStart] = "typeParameterStart"; // typeParameterStart startsExpr + const nonNullAssertion = 61440; TokenType[TokenType["nonNullAssertion"] = nonNullAssertion] = "nonNullAssertion"; // nonNullAssertion + const _break = 62480; TokenType[TokenType["_break"] = _break] = "_break"; // break keyword + const _case = 63504; TokenType[TokenType["_case"] = _case] = "_case"; // case keyword + const _catch = 64528; TokenType[TokenType["_catch"] = _catch] = "_catch"; // catch keyword + const _continue = 65552; TokenType[TokenType["_continue"] = _continue] = "_continue"; // continue keyword + const _debugger = 66576; TokenType[TokenType["_debugger"] = _debugger] = "_debugger"; // debugger keyword + const _default = 67600; TokenType[TokenType["_default"] = _default] = "_default"; // default keyword + const _do = 68624; TokenType[TokenType["_do"] = _do] = "_do"; // do keyword + const _else = 69648; TokenType[TokenType["_else"] = _else] = "_else"; // else keyword + const _finally = 70672; TokenType[TokenType["_finally"] = _finally] = "_finally"; // finally keyword + const _for = 71696; TokenType[TokenType["_for"] = _for] = "_for"; // for keyword + const _function = 73232; TokenType[TokenType["_function"] = _function] = "_function"; // function keyword startsExpr + const _if = 73744; TokenType[TokenType["_if"] = _if] = "_if"; // if keyword + const _return = 74768; TokenType[TokenType["_return"] = _return] = "_return"; // return keyword + const _switch = 75792; TokenType[TokenType["_switch"] = _switch] = "_switch"; // switch keyword + const _throw = 77456; TokenType[TokenType["_throw"] = _throw] = "_throw"; // throw keyword prefix startsExpr + const _try = 77840; TokenType[TokenType["_try"] = _try] = "_try"; // try keyword + const _var = 78864; TokenType[TokenType["_var"] = _var] = "_var"; // var keyword + const _let = 79888; TokenType[TokenType["_let"] = _let] = "_let"; // let keyword + const _const = 80912; TokenType[TokenType["_const"] = _const] = "_const"; // const keyword + const _while = 81936; TokenType[TokenType["_while"] = _while] = "_while"; // while keyword + const _with = 82960; TokenType[TokenType["_with"] = _with] = "_with"; // with keyword + const _new = 84496; TokenType[TokenType["_new"] = _new] = "_new"; // new keyword startsExpr + const _this = 85520; TokenType[TokenType["_this"] = _this] = "_this"; // this keyword startsExpr + const _super = 86544; TokenType[TokenType["_super"] = _super] = "_super"; // super keyword startsExpr + const _class = 87568; TokenType[TokenType["_class"] = _class] = "_class"; // class keyword startsExpr + const _extends = 88080; TokenType[TokenType["_extends"] = _extends] = "_extends"; // extends keyword + const _export = 89104; TokenType[TokenType["_export"] = _export] = "_export"; // export keyword + const _import = 90640; TokenType[TokenType["_import"] = _import] = "_import"; // import keyword startsExpr + const _yield = 91664; TokenType[TokenType["_yield"] = _yield] = "_yield"; // yield keyword startsExpr + const _null = 92688; TokenType[TokenType["_null"] = _null] = "_null"; // null keyword startsExpr + const _true = 93712; TokenType[TokenType["_true"] = _true] = "_true"; // true keyword startsExpr + const _false = 94736; TokenType[TokenType["_false"] = _false] = "_false"; // false keyword startsExpr + const _in = 95256; TokenType[TokenType["_in"] = _in] = "_in"; // in prec:8 keyword + const _instanceof = 96280; TokenType[TokenType["_instanceof"] = _instanceof] = "_instanceof"; // instanceof prec:8 keyword + const _typeof = 97936; TokenType[TokenType["_typeof"] = _typeof] = "_typeof"; // typeof keyword prefix startsExpr + const _void = 98960; TokenType[TokenType["_void"] = _void] = "_void"; // void keyword prefix startsExpr + const _delete = 99984; TokenType[TokenType["_delete"] = _delete] = "_delete"; // delete keyword prefix startsExpr + const _async = 100880; TokenType[TokenType["_async"] = _async] = "_async"; // async keyword startsExpr + const _get = 101904; TokenType[TokenType["_get"] = _get] = "_get"; // get keyword startsExpr + const _set = 102928; TokenType[TokenType["_set"] = _set] = "_set"; // set keyword startsExpr + const _declare = 103952; TokenType[TokenType["_declare"] = _declare] = "_declare"; // declare keyword startsExpr + const _readonly = 104976; TokenType[TokenType["_readonly"] = _readonly] = "_readonly"; // readonly keyword startsExpr + const _abstract = 106000; TokenType[TokenType["_abstract"] = _abstract] = "_abstract"; // abstract keyword startsExpr + const _static = 107024; TokenType[TokenType["_static"] = _static] = "_static"; // static keyword startsExpr + const _public = 107536; TokenType[TokenType["_public"] = _public] = "_public"; // public keyword + const _private = 108560; TokenType[TokenType["_private"] = _private] = "_private"; // private keyword + const _protected = 109584; TokenType[TokenType["_protected"] = _protected] = "_protected"; // protected keyword + const _override = 110608; TokenType[TokenType["_override"] = _override] = "_override"; // override keyword + const _as = 112144; TokenType[TokenType["_as"] = _as] = "_as"; // as keyword startsExpr + const _enum = 113168; TokenType[TokenType["_enum"] = _enum] = "_enum"; // enum keyword startsExpr + const _type = 114192; TokenType[TokenType["_type"] = _type] = "_type"; // type keyword startsExpr + const _implements = 115216; TokenType[TokenType["_implements"] = _implements] = "_implements"; // implements keyword startsExpr +})(TokenType || (TokenType = {})); +export function formatTokenType(tokenType) { + switch (tokenType) { + case TokenType.num: + return "num"; + case TokenType.bigint: + return "bigint"; + case TokenType.decimal: + return "decimal"; + case TokenType.regexp: + return "regexp"; + case TokenType.string: + return "string"; + case TokenType.name: + return "name"; + case TokenType.eof: + return "eof"; + case TokenType.bracketL: + return "["; + case TokenType.bracketR: + return "]"; + case TokenType.braceL: + return "{"; + case TokenType.braceBarL: + return "{|"; + case TokenType.braceR: + return "}"; + case TokenType.braceBarR: + return "|}"; + case TokenType.parenL: + return "("; + case TokenType.parenR: + return ")"; + case TokenType.comma: + return ","; + case TokenType.semi: + return ";"; + case TokenType.colon: + return ":"; + case TokenType.doubleColon: + return "::"; + case TokenType.dot: + return "."; + case TokenType.question: + return "?"; + case TokenType.questionDot: + return "?."; + case TokenType.arrow: + return "=>"; + case TokenType.template: + return "template"; + case TokenType.ellipsis: + return "..."; + case TokenType.backQuote: + return "`"; + case TokenType.dollarBraceL: + return "${"; + case TokenType.at: + return "@"; + case TokenType.hash: + return "#"; + case TokenType.eq: + return "="; + case TokenType.assign: + return "_="; + case TokenType.preIncDec: + return "++/--"; + case TokenType.postIncDec: + return "++/--"; + case TokenType.bang: + return "!"; + case TokenType.tilde: + return "~"; + case TokenType.pipeline: + return "|>"; + case TokenType.nullishCoalescing: + return "??"; + case TokenType.logicalOR: + return "||"; + case TokenType.logicalAND: + return "&&"; + case TokenType.bitwiseOR: + return "|"; + case TokenType.bitwiseXOR: + return "^"; + case TokenType.bitwiseAND: + return "&"; + case TokenType.equality: + return "==/!="; + case TokenType.lessThan: + return "<"; + case TokenType.greaterThan: + return ">"; + case TokenType.relationalOrEqual: + return "<=/>="; + case TokenType.bitShiftL: + return "<<"; + case TokenType.bitShiftR: + return ">>/>>>"; + case TokenType.plus: + return "+"; + case TokenType.minus: + return "-"; + case TokenType.modulo: + return "%"; + case TokenType.star: + return "*"; + case TokenType.slash: + return "/"; + case TokenType.exponent: + return "**"; + case TokenType.jsxName: + return "jsxName"; + case TokenType.jsxText: + return "jsxText"; + case TokenType.jsxEmptyText: + return "jsxEmptyText"; + case TokenType.jsxTagStart: + return "jsxTagStart"; + case TokenType.jsxTagEnd: + return "jsxTagEnd"; + case TokenType.typeParameterStart: + return "typeParameterStart"; + case TokenType.nonNullAssertion: + return "nonNullAssertion"; + case TokenType._break: + return "break"; + case TokenType._case: + return "case"; + case TokenType._catch: + return "catch"; + case TokenType._continue: + return "continue"; + case TokenType._debugger: + return "debugger"; + case TokenType._default: + return "default"; + case TokenType._do: + return "do"; + case TokenType._else: + return "else"; + case TokenType._finally: + return "finally"; + case TokenType._for: + return "for"; + case TokenType._function: + return "function"; + case TokenType._if: + return "if"; + case TokenType._return: + return "return"; + case TokenType._switch: + return "switch"; + case TokenType._throw: + return "throw"; + case TokenType._try: + return "try"; + case TokenType._var: + return "var"; + case TokenType._let: + return "let"; + case TokenType._const: + return "const"; + case TokenType._while: + return "while"; + case TokenType._with: + return "with"; + case TokenType._new: + return "new"; + case TokenType._this: + return "this"; + case TokenType._super: + return "super"; + case TokenType._class: + return "class"; + case TokenType._extends: + return "extends"; + case TokenType._export: + return "export"; + case TokenType._import: + return "import"; + case TokenType._yield: + return "yield"; + case TokenType._null: + return "null"; + case TokenType._true: + return "true"; + case TokenType._false: + return "false"; + case TokenType._in: + return "in"; + case TokenType._instanceof: + return "instanceof"; + case TokenType._typeof: + return "typeof"; + case TokenType._void: + return "void"; + case TokenType._delete: + return "delete"; + case TokenType._async: + return "async"; + case TokenType._get: + return "get"; + case TokenType._set: + return "set"; + case TokenType._declare: + return "declare"; + case TokenType._readonly: + return "readonly"; + case TokenType._abstract: + return "abstract"; + case TokenType._static: + return "static"; + case TokenType._public: + return "public"; + case TokenType._private: + return "private"; + case TokenType._protected: + return "protected"; + case TokenType._override: + return "override"; + case TokenType._as: + return "as"; + case TokenType._enum: + return "enum"; + case TokenType._type: + return "type"; + case TokenType._implements: + return "implements"; + default: + return ""; + } +} diff --git a/node_modules/sucrase/dist/esm/parser/traverser/base.js b/node_modules/sucrase/dist/esm/parser/traverser/base.js new file mode 100644 index 0000000..df24ff7 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/traverser/base.js @@ -0,0 +1,60 @@ +import State from "../tokenizer/state"; +import {charCodes} from "../util/charcodes"; + +export let isJSXEnabled; +export let isTypeScriptEnabled; +export let isFlowEnabled; +export let state; +export let input; +export let nextContextId; + +export function getNextContextId() { + return nextContextId++; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function augmentError(error) { + if ("pos" in error) { + const loc = locationForIndex(error.pos); + error.message += ` (${loc.line}:${loc.column})`; + error.loc = loc; + } + return error; +} + +export class Loc { + + + constructor(line, column) { + this.line = line; + this.column = column; + } +} + +export function locationForIndex(pos) { + let line = 1; + let column = 1; + for (let i = 0; i < pos; i++) { + if (input.charCodeAt(i) === charCodes.lineFeed) { + line++; + column = 1; + } else { + column++; + } + } + return new Loc(line, column); +} + +export function initParser( + inputCode, + isJSXEnabledArg, + isTypeScriptEnabledArg, + isFlowEnabledArg, +) { + input = inputCode; + state = new State(); + nextContextId = 1; + isJSXEnabled = isJSXEnabledArg; + isTypeScriptEnabled = isTypeScriptEnabledArg; + isFlowEnabled = isFlowEnabledArg; +} diff --git a/node_modules/sucrase/dist/esm/parser/traverser/expression.js b/node_modules/sucrase/dist/esm/parser/traverser/expression.js new file mode 100644 index 0000000..aa6717f --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/traverser/expression.js @@ -0,0 +1,1022 @@ +/* eslint max-len: 0 */ + +// A recursive descent parser operates by defining functions for all +// syntactic elements, and recursively calling those, each function +// advancing the input stream and returning an AST node. Precedence +// of constructs (for example, the fact that `!x[1]` means `!(x[1])` +// instead of `(!x)[1]` is handled by the fact that the parser +// function that parses unary prefix operators is called first, and +// in turn calls the function that parses `[]` subscripts — that +// way, it'll receive the node for `x[1]` already parsed, and wraps +// *that* in the unary operator node. +// +// Acorn uses an [operator precedence parser][opp] to handle binary +// operator precedence, because it is much more compact than using +// the technique outlined above, which uses different, nesting +// functions to specify precedence, for all of the ten binary +// precedence levels that JavaScript defines. +// +// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser + +import { + flowParseArrow, + flowParseFunctionBodyAndFinish, + flowParseMaybeAssign, + flowParseSubscript, + flowParseSubscripts, + flowParseVariance, + flowStartParseAsyncArrowFromCallExpression, + flowStartParseNewArguments, + flowStartParseObjPropValue, +} from "../plugins/flow"; +import {jsxParseElement} from "../plugins/jsx/index"; +import {typedParseConditional, typedParseParenItem} from "../plugins/types"; +import { + tsParseArrow, + tsParseFunctionBodyAndFinish, + tsParseMaybeAssign, + tsParseSubscript, + tsParseType, + tsParseTypeAssertion, + tsStartParseAsyncArrowFromCallExpression, + tsStartParseObjPropValue, +} from "../plugins/typescript"; +import { + eat, + IdentifierRole, + lookaheadCharCode, + lookaheadType, + match, + next, + nextTemplateToken, + popTypeContext, + pushTypeContext, + rescan_gt, + retokenizeSlashAsRegex, +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {Scope} from "../tokenizer/state"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {charCodes} from "../util/charcodes"; +import {IS_IDENTIFIER_START} from "../util/identifier"; +import {getNextContextId, isFlowEnabled, isJSXEnabled, isTypeScriptEnabled, state} from "./base"; +import { + markPriorBindingIdentifier, + parseBindingIdentifier, + parseMaybeDefault, + parseRest, + parseSpread, +} from "./lval"; +import { + parseBlock, + parseBlockBody, + parseClass, + parseDecorators, + parseFunction, + parseFunctionParams, +} from "./statement"; +import { + canInsertSemicolon, + eatContextual, + expect, + expectContextual, + hasFollowingLineBreak, + hasPrecedingLineBreak, + isContextual, + unexpected, +} from "./util"; + +export class StopState { + + constructor(stop) { + this.stop = stop; + } +} + +// ### Expression parsing + +// These nest, from the most general expression type at the top to +// 'atomic', nondivisible expression types at the bottom. Most of +// the functions will simply let the function (s) below them parse, +// and, *if* the syntactic construct they handle is present, wrap +// the AST node that the inner parser gave them in another node. +export function parseExpression(noIn = false) { + parseMaybeAssign(noIn); + if (match(tt.comma)) { + while (eat(tt.comma)) { + parseMaybeAssign(noIn); + } + } +} + +/** + * noIn is used when parsing a for loop so that we don't interpret a following "in" as the binary + * operatior. + * isWithinParens is used to indicate that we're parsing something that might be a comma expression + * or might be an arrow function or might be a Flow type assertion (which requires explicit parens). + * In these cases, we should allow : and ?: after the initial "left" part. + */ +export function parseMaybeAssign(noIn = false, isWithinParens = false) { + if (isTypeScriptEnabled) { + return tsParseMaybeAssign(noIn, isWithinParens); + } else if (isFlowEnabled) { + return flowParseMaybeAssign(noIn, isWithinParens); + } else { + return baseParseMaybeAssign(noIn, isWithinParens); + } +} + +// Parse an assignment expression. This includes applications of +// operators like `+=`. +// Returns true if the expression was an arrow function. +export function baseParseMaybeAssign(noIn, isWithinParens) { + if (match(tt._yield)) { + parseYield(); + return false; + } + + if (match(tt.parenL) || match(tt.name) || match(tt._yield)) { + state.potentialArrowAt = state.start; + } + + const wasArrow = parseMaybeConditional(noIn); + if (isWithinParens) { + parseParenItem(); + } + if (state.type & TokenType.IS_ASSIGN) { + next(); + parseMaybeAssign(noIn); + return false; + } + return wasArrow; +} + +// Parse a ternary conditional (`?:`) operator. +// Returns true if the expression was an arrow function. +function parseMaybeConditional(noIn) { + const wasArrow = parseExprOps(noIn); + if (wasArrow) { + return true; + } + parseConditional(noIn); + return false; +} + +function parseConditional(noIn) { + if (isTypeScriptEnabled || isFlowEnabled) { + typedParseConditional(noIn); + } else { + baseParseConditional(noIn); + } +} + +export function baseParseConditional(noIn) { + if (eat(tt.question)) { + parseMaybeAssign(); + expect(tt.colon); + parseMaybeAssign(noIn); + } +} + +// Start the precedence parser. +// Returns true if this was an arrow function +function parseExprOps(noIn) { + const startTokenIndex = state.tokens.length; + const wasArrow = parseMaybeUnary(); + if (wasArrow) { + return true; + } + parseExprOp(startTokenIndex, -1, noIn); + return false; +} + +// Parse binary operators with the operator precedence parsing +// algorithm. `left` is the left-hand side of the operator. +// `minPrec` provides context that allows the function to stop and +// defer further parser to one of its callers when it encounters an +// operator that has a lower precedence than the set it is parsing. +function parseExprOp(startTokenIndex, minPrec, noIn) { + if ( + isTypeScriptEnabled && + (tt._in & TokenType.PRECEDENCE_MASK) > minPrec && + !hasPrecedingLineBreak() && + (eatContextual(ContextualKeyword._as) || eatContextual(ContextualKeyword._satisfies)) + ) { + const oldIsType = pushTypeContext(1); + tsParseType(); + popTypeContext(oldIsType); + rescan_gt(); + parseExprOp(startTokenIndex, minPrec, noIn); + return; + } + + const prec = state.type & TokenType.PRECEDENCE_MASK; + if (prec > 0 && (!noIn || !match(tt._in))) { + if (prec > minPrec) { + const op = state.type; + next(); + if (op === tt.nullishCoalescing) { + state.tokens[state.tokens.length - 1].nullishStartIndex = startTokenIndex; + } + + const rhsStartTokenIndex = state.tokens.length; + parseMaybeUnary(); + // Extend the right operand of this operator if possible. + parseExprOp(rhsStartTokenIndex, op & TokenType.IS_RIGHT_ASSOCIATIVE ? prec - 1 : prec, noIn); + if (op === tt.nullishCoalescing) { + state.tokens[startTokenIndex].numNullishCoalesceStarts++; + state.tokens[state.tokens.length - 1].numNullishCoalesceEnds++; + } + // Continue with any future operator holding this expression as the left operand. + parseExprOp(startTokenIndex, minPrec, noIn); + } + } +} + +// Parse unary operators, both prefix and postfix. +// Returns true if this was an arrow function. +export function parseMaybeUnary() { + if (isTypeScriptEnabled && !isJSXEnabled && eat(tt.lessThan)) { + tsParseTypeAssertion(); + return false; + } + if ( + isContextual(ContextualKeyword._module) && + lookaheadCharCode() === charCodes.leftCurlyBrace && + !hasFollowingLineBreak() + ) { + parseModuleExpression(); + return false; + } + if (state.type & TokenType.IS_PREFIX) { + next(); + parseMaybeUnary(); + return false; + } + + const wasArrow = parseExprSubscripts(); + if (wasArrow) { + return true; + } + while (state.type & TokenType.IS_POSTFIX && !canInsertSemicolon()) { + // The tokenizer calls everything a preincrement, so make it a postincrement when + // we see it in that context. + if (state.type === tt.preIncDec) { + state.type = tt.postIncDec; + } + next(); + } + return false; +} + +// Parse call, dot, and `[]`-subscript expressions. +// Returns true if this was an arrow function. +export function parseExprSubscripts() { + const startTokenIndex = state.tokens.length; + const wasArrow = parseExprAtom(); + if (wasArrow) { + return true; + } + parseSubscripts(startTokenIndex); + // If there was any optional chain operation, the start token would be marked + // as such, so also mark the end now. + if (state.tokens.length > startTokenIndex && state.tokens[startTokenIndex].isOptionalChainStart) { + state.tokens[state.tokens.length - 1].isOptionalChainEnd = true; + } + return false; +} + +function parseSubscripts(startTokenIndex, noCalls = false) { + if (isFlowEnabled) { + flowParseSubscripts(startTokenIndex, noCalls); + } else { + baseParseSubscripts(startTokenIndex, noCalls); + } +} + +export function baseParseSubscripts(startTokenIndex, noCalls = false) { + const stopState = new StopState(false); + do { + parseSubscript(startTokenIndex, noCalls, stopState); + } while (!stopState.stop && !state.error); +} + +function parseSubscript(startTokenIndex, noCalls, stopState) { + if (isTypeScriptEnabled) { + tsParseSubscript(startTokenIndex, noCalls, stopState); + } else if (isFlowEnabled) { + flowParseSubscript(startTokenIndex, noCalls, stopState); + } else { + baseParseSubscript(startTokenIndex, noCalls, stopState); + } +} + +/** Set 'state.stop = true' to indicate that we should stop parsing subscripts. */ +export function baseParseSubscript( + startTokenIndex, + noCalls, + stopState, +) { + if (!noCalls && eat(tt.doubleColon)) { + parseNoCallExpr(); + stopState.stop = true; + // Propagate startTokenIndex so that `a::b?.()` will keep `a` as the first token. We may want + // to revisit this in the future when fully supporting bind syntax. + parseSubscripts(startTokenIndex, noCalls); + } else if (match(tt.questionDot)) { + state.tokens[startTokenIndex].isOptionalChainStart = true; + if (noCalls && lookaheadType() === tt.parenL) { + stopState.stop = true; + return; + } + next(); + state.tokens[state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + + if (eat(tt.bracketL)) { + parseExpression(); + expect(tt.bracketR); + } else if (eat(tt.parenL)) { + parseCallExpressionArguments(); + } else { + parseMaybePrivateName(); + } + } else if (eat(tt.dot)) { + state.tokens[state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + parseMaybePrivateName(); + } else if (eat(tt.bracketL)) { + state.tokens[state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + parseExpression(); + expect(tt.bracketR); + } else if (!noCalls && match(tt.parenL)) { + if (atPossibleAsync()) { + // We see "async", but it's possible it's a usage of the name "async". Parse as if it's a + // function call, and if we see an arrow later, backtrack and re-parse as a parameter list. + const snapshot = state.snapshot(); + const asyncStartTokenIndex = state.tokens.length; + next(); + state.tokens[state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + + const callContextId = getNextContextId(); + + state.tokens[state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + state.tokens[state.tokens.length - 1].contextId = callContextId; + + if (shouldParseAsyncArrow()) { + // We hit an arrow, so backtrack and start again parsing function parameters. + state.restoreFromSnapshot(snapshot); + stopState.stop = true; + state.scopeDepth++; + + parseFunctionParams(); + parseAsyncArrowFromCallExpression(asyncStartTokenIndex); + } + } else { + next(); + state.tokens[state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + const callContextId = getNextContextId(); + state.tokens[state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + state.tokens[state.tokens.length - 1].contextId = callContextId; + } + } else if (match(tt.backQuote)) { + // Tagged template expression. + parseTemplate(); + } else { + stopState.stop = true; + } +} + +export function atPossibleAsync() { + // This was made less strict than the original version to avoid passing around nodes, but it + // should be safe to have rare false positives here. + return ( + state.tokens[state.tokens.length - 1].contextualKeyword === ContextualKeyword._async && + !canInsertSemicolon() + ); +} + +export function parseCallExpressionArguments() { + let first = true; + while (!eat(tt.parenR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(tt.parenR)) { + break; + } + } + + parseExprListItem(false); + } +} + +function shouldParseAsyncArrow() { + return match(tt.colon) || match(tt.arrow); +} + +function parseAsyncArrowFromCallExpression(startTokenIndex) { + if (isTypeScriptEnabled) { + tsStartParseAsyncArrowFromCallExpression(); + } else if (isFlowEnabled) { + flowStartParseAsyncArrowFromCallExpression(); + } + expect(tt.arrow); + parseArrowExpression(startTokenIndex); +} + +// Parse a no-call expression (like argument of `new` or `::` operators). + +function parseNoCallExpr() { + const startTokenIndex = state.tokens.length; + parseExprAtom(); + parseSubscripts(startTokenIndex, true); +} + +// Parse an atomic expression — either a single token that is an +// expression, an expression started by a keyword like `function` or +// `new`, or an expression wrapped in punctuation like `()`, `[]`, +// or `{}`. +// Returns true if the parsed expression was an arrow function. +export function parseExprAtom() { + if (eat(tt.modulo)) { + // V8 intrinsic expression. Just parse the identifier, and the function invocation is parsed + // naturally. + parseIdentifier(); + return false; + } + + if (match(tt.jsxText) || match(tt.jsxEmptyText)) { + parseLiteral(); + return false; + } else if (match(tt.lessThan) && isJSXEnabled) { + state.type = tt.jsxTagStart; + jsxParseElement(); + next(); + return false; + } + + const canBeArrow = state.potentialArrowAt === state.start; + switch (state.type) { + case tt.slash: + case tt.assign: + retokenizeSlashAsRegex(); + // Fall through. + + case tt._super: + case tt._this: + case tt.regexp: + case tt.num: + case tt.bigint: + case tt.decimal: + case tt.string: + case tt._null: + case tt._true: + case tt._false: + next(); + return false; + + case tt._import: + next(); + if (match(tt.dot)) { + // import.meta + state.tokens[state.tokens.length - 1].type = tt.name; + next(); + parseIdentifier(); + } + return false; + + case tt.name: { + const startTokenIndex = state.tokens.length; + const functionStart = state.start; + const contextualKeyword = state.contextualKeyword; + parseIdentifier(); + if (contextualKeyword === ContextualKeyword._await) { + parseAwait(); + return false; + } else if ( + contextualKeyword === ContextualKeyword._async && + match(tt._function) && + !canInsertSemicolon() + ) { + next(); + parseFunction(functionStart, false); + return false; + } else if ( + canBeArrow && + contextualKeyword === ContextualKeyword._async && + !canInsertSemicolon() && + match(tt.name) + ) { + state.scopeDepth++; + parseBindingIdentifier(false); + expect(tt.arrow); + // let foo = async bar => {}; + parseArrowExpression(startTokenIndex); + return true; + } else if (match(tt._do) && !canInsertSemicolon()) { + next(); + parseBlock(); + return false; + } + + if (canBeArrow && !canInsertSemicolon() && match(tt.arrow)) { + state.scopeDepth++; + markPriorBindingIdentifier(false); + expect(tt.arrow); + parseArrowExpression(startTokenIndex); + return true; + } + + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.Access; + return false; + } + + case tt._do: { + next(); + parseBlock(); + return false; + } + + case tt.parenL: { + const wasArrow = parseParenAndDistinguishExpression(canBeArrow); + return wasArrow; + } + + case tt.bracketL: + next(); + parseExprList(tt.bracketR, true); + return false; + + case tt.braceL: + parseObj(false, false); + return false; + + case tt._function: + parseFunctionExpression(); + return false; + + case tt.at: + parseDecorators(); + // Fall through. + + case tt._class: + parseClass(false); + return false; + + case tt._new: + parseNew(); + return false; + + case tt.backQuote: + parseTemplate(); + return false; + + case tt.doubleColon: { + next(); + parseNoCallExpr(); + return false; + } + + case tt.hash: { + const code = lookaheadCharCode(); + if (IS_IDENTIFIER_START[code] || code === charCodes.backslash) { + parseMaybePrivateName(); + } else { + next(); + } + // Smart pipeline topic reference. + return false; + } + + default: + unexpected(); + return false; + } +} + +function parseMaybePrivateName() { + eat(tt.hash); + parseIdentifier(); +} + +function parseFunctionExpression() { + const functionStart = state.start; + parseIdentifier(); + if (eat(tt.dot)) { + // function.sent + parseIdentifier(); + } + parseFunction(functionStart, false); +} + +export function parseLiteral() { + next(); +} + +export function parseParenExpression() { + expect(tt.parenL); + parseExpression(); + expect(tt.parenR); +} + +// Returns true if this was an arrow expression. +function parseParenAndDistinguishExpression(canBeArrow) { + // Assume this is a normal parenthesized expression, but if we see an arrow, we'll bail and + // start over as a parameter list. + const snapshot = state.snapshot(); + + const startTokenIndex = state.tokens.length; + expect(tt.parenL); + + let first = true; + + while (!match(tt.parenR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (match(tt.parenR)) { + break; + } + } + + if (match(tt.ellipsis)) { + parseRest(false /* isBlockScope */); + parseParenItem(); + break; + } else { + parseMaybeAssign(false, true); + } + } + + expect(tt.parenR); + + if (canBeArrow && shouldParseArrow()) { + const wasArrow = parseArrow(); + if (wasArrow) { + // It was an arrow function this whole time, so start over and parse it as params so that we + // get proper token annotations. + state.restoreFromSnapshot(snapshot); + state.scopeDepth++; + // Don't specify a context ID because arrow functions don't need a context ID. + parseFunctionParams(); + parseArrow(); + parseArrowExpression(startTokenIndex); + if (state.error) { + // Nevermind! This must have been something that looks very much like an + // arrow function but where its "parameter list" isn't actually a valid + // parameter list. Force non-arrow parsing. + // See https://github.com/alangpierce/sucrase/issues/666 for an example. + state.restoreFromSnapshot(snapshot); + parseParenAndDistinguishExpression(false); + return false; + } + return true; + } + } + + return false; +} + +function shouldParseArrow() { + return match(tt.colon) || !canInsertSemicolon(); +} + +// Returns whether there was an arrow token. +export function parseArrow() { + if (isTypeScriptEnabled) { + return tsParseArrow(); + } else if (isFlowEnabled) { + return flowParseArrow(); + } else { + return eat(tt.arrow); + } +} + +function parseParenItem() { + if (isTypeScriptEnabled || isFlowEnabled) { + typedParseParenItem(); + } +} + +// New's precedence is slightly tricky. It must allow its argument to +// be a `[]` or dot subscript expression, but not a call — at least, +// not without wrapping it in parentheses. Thus, it uses the noCalls +// argument to parseSubscripts to prevent it from consuming the +// argument list. +function parseNew() { + expect(tt._new); + if (eat(tt.dot)) { + // new.target + parseIdentifier(); + return; + } + parseNewCallee(); + if (isFlowEnabled) { + flowStartParseNewArguments(); + } + if (eat(tt.parenL)) { + parseExprList(tt.parenR); + } +} + +function parseNewCallee() { + parseNoCallExpr(); + eat(tt.questionDot); +} + +export function parseTemplate() { + // Finish `, read quasi + nextTemplateToken(); + // Finish quasi, read ${ + nextTemplateToken(); + while (!match(tt.backQuote) && !state.error) { + expect(tt.dollarBraceL); + parseExpression(); + // Finish }, read quasi + nextTemplateToken(); + // Finish quasi, read either ${ or ` + nextTemplateToken(); + } + next(); +} + +// Parse an object literal or binding pattern. +export function parseObj(isPattern, isBlockScope) { + // Attach a context ID to the object open and close brace and each object key. + const contextId = getNextContextId(); + let first = true; + + next(); + state.tokens[state.tokens.length - 1].contextId = contextId; + + while (!eat(tt.braceR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(tt.braceR)) { + break; + } + } + + let isGenerator = false; + if (match(tt.ellipsis)) { + const previousIndex = state.tokens.length; + parseSpread(); + if (isPattern) { + // Mark role when the only thing being spread over is an identifier. + if (state.tokens.length === previousIndex + 2) { + markPriorBindingIdentifier(isBlockScope); + } + if (eat(tt.braceR)) { + break; + } + } + continue; + } + + if (!isPattern) { + isGenerator = eat(tt.star); + } + + if (!isPattern && isContextual(ContextualKeyword._async)) { + if (isGenerator) unexpected(); + + parseIdentifier(); + if ( + match(tt.colon) || + match(tt.parenL) || + match(tt.braceR) || + match(tt.eq) || + match(tt.comma) + ) { + // This is a key called "async" rather than an async function. + } else { + if (match(tt.star)) { + next(); + isGenerator = true; + } + parsePropertyName(contextId); + } + } else { + parsePropertyName(contextId); + } + + parseObjPropValue(isPattern, isBlockScope, contextId); + } + + state.tokens[state.tokens.length - 1].contextId = contextId; +} + +function isGetterOrSetterMethod(isPattern) { + // We go off of the next and don't bother checking if the node key is actually "get" or "set". + // This lets us avoid generating a node, and should only make the validation worse. + return ( + !isPattern && + (match(tt.string) || // get "string"() {} + match(tt.num) || // get 1() {} + match(tt.bracketL) || // get ["string"]() {} + match(tt.name) || // get foo() {} + !!(state.type & TokenType.IS_KEYWORD)) // get debugger() {} + ); +} + +// Returns true if this was a method. +function parseObjectMethod(isPattern, objectContextId) { + // We don't need to worry about modifiers because object methods can't have optional bodies, so + // the start will never be used. + const functionStart = state.start; + if (match(tt.parenL)) { + if (isPattern) unexpected(); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + + if (isGetterOrSetterMethod(isPattern)) { + parsePropertyName(objectContextId); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + return false; +} + +function parseObjectProperty(isPattern, isBlockScope) { + if (eat(tt.colon)) { + if (isPattern) { + parseMaybeDefault(isBlockScope); + } else { + parseMaybeAssign(false); + } + return; + } + + // Since there's no colon, we assume this is an object shorthand. + + // If we're in a destructuring, we've now discovered that the key was actually an assignee, so + // we need to tag it as a declaration with the appropriate scope. Otherwise, we might need to + // transform it on access, so mark it as a normal object shorthand. + let identifierRole; + if (isPattern) { + if (state.scopeDepth === 0) { + identifierRole = IdentifierRole.ObjectShorthandTopLevelDeclaration; + } else if (isBlockScope) { + identifierRole = IdentifierRole.ObjectShorthandBlockScopedDeclaration; + } else { + identifierRole = IdentifierRole.ObjectShorthandFunctionScopedDeclaration; + } + } else { + identifierRole = IdentifierRole.ObjectShorthand; + } + state.tokens[state.tokens.length - 1].identifierRole = identifierRole; + + // Regardless of whether we know this to be a pattern or if we're in an ambiguous context, allow + // parsing as if there's a default value. + parseMaybeDefault(isBlockScope, true); +} + +function parseObjPropValue( + isPattern, + isBlockScope, + objectContextId, +) { + if (isTypeScriptEnabled) { + tsStartParseObjPropValue(); + } else if (isFlowEnabled) { + flowStartParseObjPropValue(); + } + const wasMethod = parseObjectMethod(isPattern, objectContextId); + if (!wasMethod) { + parseObjectProperty(isPattern, isBlockScope); + } +} + +export function parsePropertyName(objectContextId) { + if (isFlowEnabled) { + flowParseVariance(); + } + if (eat(tt.bracketL)) { + state.tokens[state.tokens.length - 1].contextId = objectContextId; + parseMaybeAssign(); + expect(tt.bracketR); + state.tokens[state.tokens.length - 1].contextId = objectContextId; + } else { + if (match(tt.num) || match(tt.string) || match(tt.bigint) || match(tt.decimal)) { + parseExprAtom(); + } else { + parseMaybePrivateName(); + } + + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ObjectKey; + state.tokens[state.tokens.length - 1].contextId = objectContextId; + } +} + +// Parse object or class method. +export function parseMethod(functionStart, isConstructor) { + const funcContextId = getNextContextId(); + + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + const allowModifiers = isConstructor; // For TypeScript parameter properties + parseFunctionParams(allowModifiers, funcContextId); + parseFunctionBodyAndFinish(functionStart, funcContextId); + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, true)); + state.scopeDepth--; +} + +// Parse arrow function expression. +// If the parameters are provided, they will be converted to an +// assignable list. +export function parseArrowExpression(startTokenIndex) { + parseFunctionBody(true); + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, true)); + state.scopeDepth--; +} + +export function parseFunctionBodyAndFinish(functionStart, funcContextId = 0) { + if (isTypeScriptEnabled) { + tsParseFunctionBodyAndFinish(functionStart, funcContextId); + } else if (isFlowEnabled) { + flowParseFunctionBodyAndFinish(funcContextId); + } else { + parseFunctionBody(false, funcContextId); + } +} + +export function parseFunctionBody(allowExpression, funcContextId = 0) { + const isExpression = allowExpression && !match(tt.braceL); + + if (isExpression) { + parseMaybeAssign(); + } else { + parseBlock(true /* isFunctionScope */, funcContextId); + } +} + +// Parses a comma-separated list of expressions, and returns them as +// an array. `close` is the token type that ends the list, and +// `allowEmpty` can be turned on to allow subsequent commas with +// nothing in between them to be parsed as `null` (which is needed +// for array literals). + +function parseExprList(close, allowEmpty = false) { + let first = true; + while (!eat(close) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(close)) break; + } + parseExprListItem(allowEmpty); + } +} + +function parseExprListItem(allowEmpty) { + if (allowEmpty && match(tt.comma)) { + // Empty item; nothing more to parse for this item. + } else if (match(tt.ellipsis)) { + parseSpread(); + parseParenItem(); + } else if (match(tt.question)) { + // Partial function application proposal. + next(); + } else { + parseMaybeAssign(false, true); + } +} + +// Parse the next token as an identifier. +export function parseIdentifier() { + next(); + state.tokens[state.tokens.length - 1].type = tt.name; +} + +// Parses await expression inside async function. +function parseAwait() { + parseMaybeUnary(); +} + +// Parses yield expression inside generator. +function parseYield() { + next(); + if (!match(tt.semi) && !canInsertSemicolon()) { + eat(tt.star); + parseMaybeAssign(); + } +} + +// https://github.com/tc39/proposal-js-module-blocks +function parseModuleExpression() { + expectContextual(ContextualKeyword._module); + expect(tt.braceL); + // For now, just call parseBlockBody to parse the block. In the future when we + // implement full support, we'll want to emit scopes and possibly other + // information. + parseBlockBody(tt.braceR); +} diff --git a/node_modules/sucrase/dist/esm/parser/traverser/index.js b/node_modules/sucrase/dist/esm/parser/traverser/index.js new file mode 100644 index 0000000..eb8c990 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/traverser/index.js @@ -0,0 +1,18 @@ + +import {nextToken, skipLineComment} from "../tokenizer/index"; +import {charCodes} from "../util/charcodes"; +import {input, state} from "./base"; +import {parseTopLevel} from "./statement"; + +export function parseFile() { + // If enabled, skip leading hashbang line. + if ( + state.pos === 0 && + input.charCodeAt(0) === charCodes.numberSign && + input.charCodeAt(1) === charCodes.exclamationMark + ) { + skipLineComment(2); + } + nextToken(); + return parseTopLevel(); +} diff --git a/node_modules/sucrase/dist/esm/parser/traverser/lval.js b/node_modules/sucrase/dist/esm/parser/traverser/lval.js new file mode 100644 index 0000000..f5c4855 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/traverser/lval.js @@ -0,0 +1,159 @@ +import {flowParseAssignableListItemTypes} from "../plugins/flow"; +import {tsParseAssignableListItemTypes, tsParseModifiers} from "../plugins/typescript"; +import { + eat, + IdentifierRole, + match, + next, + popTypeContext, + pushTypeContext, +} from "../tokenizer/index"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {TokenType, TokenType as tt} from "../tokenizer/types"; +import {isFlowEnabled, isTypeScriptEnabled, state} from "./base"; +import {parseIdentifier, parseMaybeAssign, parseObj} from "./expression"; +import {expect, unexpected} from "./util"; + +export function parseSpread() { + next(); + parseMaybeAssign(false); +} + +export function parseRest(isBlockScope) { + next(); + parseBindingAtom(isBlockScope); +} + +export function parseBindingIdentifier(isBlockScope) { + parseIdentifier(); + markPriorBindingIdentifier(isBlockScope); +} + +export function parseImportedIdentifier() { + parseIdentifier(); + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportDeclaration; +} + +export function markPriorBindingIdentifier(isBlockScope) { + let identifierRole; + if (state.scopeDepth === 0) { + identifierRole = IdentifierRole.TopLevelDeclaration; + } else if (isBlockScope) { + identifierRole = IdentifierRole.BlockScopedDeclaration; + } else { + identifierRole = IdentifierRole.FunctionScopedDeclaration; + } + state.tokens[state.tokens.length - 1].identifierRole = identifierRole; +} + +// Parses lvalue (assignable) atom. +export function parseBindingAtom(isBlockScope) { + switch (state.type) { + case tt._this: { + // In TypeScript, "this" may be the name of a parameter, so allow it. + const oldIsType = pushTypeContext(0); + next(); + popTypeContext(oldIsType); + return; + } + + case tt._yield: + case tt.name: { + state.type = tt.name; + parseBindingIdentifier(isBlockScope); + return; + } + + case tt.bracketL: { + next(); + parseBindingList(tt.bracketR, isBlockScope, true /* allowEmpty */); + return; + } + + case tt.braceL: + parseObj(true, isBlockScope); + return; + + default: + unexpected(); + } +} + +export function parseBindingList( + close, + isBlockScope, + allowEmpty = false, + allowModifiers = false, + contextId = 0, +) { + let first = true; + + let hasRemovedComma = false; + const firstItemTokenIndex = state.tokens.length; + + while (!eat(close) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + state.tokens[state.tokens.length - 1].contextId = contextId; + // After a "this" type in TypeScript, we need to set the following comma (if any) to also be + // a type token so that it will be removed. + if (!hasRemovedComma && state.tokens[firstItemTokenIndex].isType) { + state.tokens[state.tokens.length - 1].isType = true; + hasRemovedComma = true; + } + } + if (allowEmpty && match(tt.comma)) { + // Empty item; nothing further to parse for this item. + } else if (eat(close)) { + break; + } else if (match(tt.ellipsis)) { + parseRest(isBlockScope); + parseAssignableListItemTypes(); + // Support rest element trailing commas allowed by TypeScript <2.9. + eat(TokenType.comma); + expect(close); + break; + } else { + parseAssignableListItem(allowModifiers, isBlockScope); + } + } +} + +function parseAssignableListItem(allowModifiers, isBlockScope) { + if (allowModifiers) { + tsParseModifiers([ + ContextualKeyword._public, + ContextualKeyword._protected, + ContextualKeyword._private, + ContextualKeyword._readonly, + ContextualKeyword._override, + ]); + } + + parseMaybeDefault(isBlockScope); + parseAssignableListItemTypes(); + parseMaybeDefault(isBlockScope, true /* leftAlreadyParsed */); +} + +function parseAssignableListItemTypes() { + if (isFlowEnabled) { + flowParseAssignableListItemTypes(); + } else if (isTypeScriptEnabled) { + tsParseAssignableListItemTypes(); + } +} + +// Parses assignment pattern around given atom if possible. +export function parseMaybeDefault(isBlockScope, leftAlreadyParsed = false) { + if (!leftAlreadyParsed) { + parseBindingAtom(isBlockScope); + } + if (!eat(tt.eq)) { + return; + } + const eqIndex = state.tokens.length - 1; + parseMaybeAssign(); + state.tokens[eqIndex].rhsEndIndex = state.tokens.length; +} diff --git a/node_modules/sucrase/dist/esm/parser/traverser/statement.js b/node_modules/sucrase/dist/esm/parser/traverser/statement.js new file mode 100644 index 0000000..325a717 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/traverser/statement.js @@ -0,0 +1,1281 @@ +/* eslint max-len: 0 */ + +import {File} from "../index"; +import { + flowAfterParseClassSuper, + flowAfterParseVarHead, + flowParseExportDeclaration, + flowParseExportStar, + flowParseIdentifierStatement, + flowParseImportSpecifier, + flowParseTypeAnnotation, + flowParseTypeParameterDeclaration, + flowShouldDisallowExportDefaultSpecifier, + flowShouldParseExportDeclaration, + flowShouldParseExportStar, + flowStartParseFunctionParams, + flowStartParseImportSpecifiers, + flowTryParseExportDefaultExpression, + flowTryParseStatement, +} from "../plugins/flow"; +import { + tsAfterParseClassSuper, + tsAfterParseVarHead, + tsIsDeclarationStart, + tsParseExportDeclaration, + tsParseExportSpecifier, + tsParseIdentifierStatement, + tsParseImportEqualsDeclaration, + tsParseImportSpecifier, + tsParseMaybeDecoratorArguments, + tsParseModifiers, + tsStartParseFunctionParams, + tsTryParseClassMemberWithIsStatic, + tsTryParseExport, + tsTryParseExportDefaultExpression, + tsTryParseStatementContent, + tsTryParseTypeAnnotation, + tsTryParseTypeParameters, +} from "../plugins/typescript"; +import { + eat, + eatTypeToken, + IdentifierRole, + lookaheadType, + lookaheadTypeAndKeyword, + match, + next, + nextTokenStart, + nextTokenStartSince, + popTypeContext, + pushTypeContext, +} from "../tokenizer"; +import {ContextualKeyword} from "../tokenizer/keywords"; +import {Scope} from "../tokenizer/state"; +import { TokenType as tt} from "../tokenizer/types"; +import {charCodes} from "../util/charcodes"; +import {getNextContextId, input, isFlowEnabled, isTypeScriptEnabled, state} from "./base"; +import { + parseCallExpressionArguments, + parseExprAtom, + parseExpression, + parseExprSubscripts, + parseFunctionBodyAndFinish, + parseIdentifier, + parseMaybeAssign, + parseMethod, + parseObj, + parseParenExpression, + parsePropertyName, +} from "./expression"; +import { + parseBindingAtom, + parseBindingIdentifier, + parseBindingList, + parseImportedIdentifier, +} from "./lval"; +import { + canInsertSemicolon, + eatContextual, + expect, + expectContextual, + hasFollowingLineBreak, + hasPrecedingLineBreak, + isContextual, + isLineTerminator, + isLookaheadContextual, + semicolon, + unexpected, +} from "./util"; + +export function parseTopLevel() { + parseBlockBody(tt.eof); + state.scopes.push(new Scope(0, state.tokens.length, true)); + if (state.scopeDepth !== 0) { + throw new Error(`Invalid scope depth at end of file: ${state.scopeDepth}`); + } + return new File(state.tokens, state.scopes); +} + +// Parse a single statement. +// +// If expecting a statement and finding a slash operator, parse a +// regular expression literal. This is to handle cases like +// `if (foo) /blah/.exec(foo)`, where looking at the previous token +// does not help. + +export function parseStatement(declaration) { + if (isFlowEnabled) { + if (flowTryParseStatement()) { + return; + } + } + if (match(tt.at)) { + parseDecorators(); + } + parseStatementContent(declaration); +} + +function parseStatementContent(declaration) { + if (isTypeScriptEnabled) { + if (tsTryParseStatementContent()) { + return; + } + } + + const starttype = state.type; + + // Most types of statements are recognized by the keyword they + // start with. Many are trivial to parse, some require a bit of + // complexity. + + switch (starttype) { + case tt._break: + case tt._continue: + parseBreakContinueStatement(); + return; + case tt._debugger: + parseDebuggerStatement(); + return; + case tt._do: + parseDoStatement(); + return; + case tt._for: + parseForStatement(); + return; + case tt._function: + if (lookaheadType() === tt.dot) break; + if (!declaration) unexpected(); + parseFunctionStatement(); + return; + + case tt._class: + if (!declaration) unexpected(); + parseClass(true); + return; + + case tt._if: + parseIfStatement(); + return; + case tt._return: + parseReturnStatement(); + return; + case tt._switch: + parseSwitchStatement(); + return; + case tt._throw: + parseThrowStatement(); + return; + case tt._try: + parseTryStatement(); + return; + + case tt._let: + case tt._const: + if (!declaration) unexpected(); // NOTE: falls through to _var + + case tt._var: + parseVarStatement(starttype !== tt._var); + return; + + case tt._while: + parseWhileStatement(); + return; + case tt.braceL: + parseBlock(); + return; + case tt.semi: + parseEmptyStatement(); + return; + case tt._export: + case tt._import: { + const nextType = lookaheadType(); + if (nextType === tt.parenL || nextType === tt.dot) { + break; + } + next(); + if (starttype === tt._import) { + parseImport(); + } else { + parseExport(); + } + return; + } + case tt.name: + if (state.contextualKeyword === ContextualKeyword._async) { + const functionStart = state.start; + // peek ahead and see if next token is a function + const snapshot = state.snapshot(); + next(); + if (match(tt._function) && !canInsertSemicolon()) { + expect(tt._function); + parseFunction(functionStart, true); + return; + } else { + state.restoreFromSnapshot(snapshot); + } + } else if ( + state.contextualKeyword === ContextualKeyword._using && + !hasFollowingLineBreak() && + // Statements like `using[0]` and `using in foo` aren't actual using + // declarations. + lookaheadType() === tt.name + ) { + parseVarStatement(true); + return; + } + default: + // Do nothing. + break; + } + + // If the statement does not start with a statement keyword or a + // brace, it's an ExpressionStatement or LabeledStatement. We + // simply start parsing an expression, and afterwards, if the + // next token is a colon and the expression was a simple + // Identifier node, we switch to interpreting it as a label. + const initialTokensLength = state.tokens.length; + parseExpression(); + let simpleName = null; + if (state.tokens.length === initialTokensLength + 1) { + const token = state.tokens[state.tokens.length - 1]; + if (token.type === tt.name) { + simpleName = token.contextualKeyword; + } + } + if (simpleName == null) { + semicolon(); + return; + } + if (eat(tt.colon)) { + parseLabeledStatement(); + } else { + // This was an identifier, so we might want to handle flow/typescript-specific cases. + parseIdentifierStatement(simpleName); + } +} + +export function parseDecorators() { + while (match(tt.at)) { + parseDecorator(); + } +} + +function parseDecorator() { + next(); + if (eat(tt.parenL)) { + parseExpression(); + expect(tt.parenR); + } else { + parseIdentifier(); + while (eat(tt.dot)) { + parseIdentifier(); + } + parseMaybeDecoratorArguments(); + } +} + +function parseMaybeDecoratorArguments() { + if (isTypeScriptEnabled) { + tsParseMaybeDecoratorArguments(); + } else { + baseParseMaybeDecoratorArguments(); + } +} + +export function baseParseMaybeDecoratorArguments() { + if (eat(tt.parenL)) { + parseCallExpressionArguments(); + } +} + +function parseBreakContinueStatement() { + next(); + if (!isLineTerminator()) { + parseIdentifier(); + semicolon(); + } +} + +function parseDebuggerStatement() { + next(); + semicolon(); +} + +function parseDoStatement() { + next(); + parseStatement(false); + expect(tt._while); + parseParenExpression(); + eat(tt.semi); +} + +function parseForStatement() { + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + parseAmbiguousForStatement(); + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, false)); + state.scopeDepth--; +} + +/** + * Determine if this token is a `using` declaration (explicit resource + * management) as part of a loop. + * https://github.com/tc39/proposal-explicit-resource-management + */ +function isUsingInLoop() { + if (!isContextual(ContextualKeyword._using)) { + return false; + } + // This must be `for (using of`, where `using` is the name of the loop + // variable. + if (isLookaheadContextual(ContextualKeyword._of)) { + return false; + } + return true; +} + +// Disambiguating between a `for` and a `for`/`in` or `for`/`of` +// loop is non-trivial. Basically, we have to parse the init `var` +// statement or expression, disallowing the `in` operator (see +// the second parameter to `parseExpression`), and then check +// whether the next token is `in` or `of`. When there is no init +// part (semicolon immediately after the opening parenthesis), it +// is a regular `for` loop. +function parseAmbiguousForStatement() { + next(); + + let forAwait = false; + if (isContextual(ContextualKeyword._await)) { + forAwait = true; + next(); + } + expect(tt.parenL); + + if (match(tt.semi)) { + if (forAwait) { + unexpected(); + } + parseFor(); + return; + } + + if (match(tt._var) || match(tt._let) || match(tt._const) || isUsingInLoop()) { + next(); + parseVar(true, state.type !== tt._var); + if (match(tt._in) || isContextual(ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + parseFor(); + return; + } + + parseExpression(true); + if (match(tt._in) || isContextual(ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + if (forAwait) { + unexpected(); + } + parseFor(); +} + +function parseFunctionStatement() { + const functionStart = state.start; + next(); + parseFunction(functionStart, true); +} + +function parseIfStatement() { + next(); + parseParenExpression(); + parseStatement(false); + if (eat(tt._else)) { + parseStatement(false); + } +} + +function parseReturnStatement() { + next(); + + // In `return` (and `break`/`continue`), the keywords with + // optional arguments, we eagerly look for a semicolon or the + // possibility to insert one. + + if (!isLineTerminator()) { + parseExpression(); + semicolon(); + } +} + +function parseSwitchStatement() { + next(); + parseParenExpression(); + state.scopeDepth++; + const startTokenIndex = state.tokens.length; + expect(tt.braceL); + + // Don't bother validation; just go through any sequence of cases, defaults, and statements. + while (!match(tt.braceR) && !state.error) { + if (match(tt._case) || match(tt._default)) { + const isCase = match(tt._case); + next(); + if (isCase) { + parseExpression(); + } + expect(tt.colon); + } else { + parseStatement(true); + } + } + next(); // Closing brace + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, false)); + state.scopeDepth--; +} + +function parseThrowStatement() { + next(); + parseExpression(); + semicolon(); +} + +function parseCatchClauseParam() { + parseBindingAtom(true /* isBlockScope */); + + if (isTypeScriptEnabled) { + tsTryParseTypeAnnotation(); + } +} + +function parseTryStatement() { + next(); + + parseBlock(); + + if (match(tt._catch)) { + next(); + let catchBindingStartTokenIndex = null; + if (match(tt.parenL)) { + state.scopeDepth++; + catchBindingStartTokenIndex = state.tokens.length; + expect(tt.parenL); + parseCatchClauseParam(); + expect(tt.parenR); + } + parseBlock(); + if (catchBindingStartTokenIndex != null) { + // We need a special scope for the catch binding which includes the binding itself and the + // catch block. + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(catchBindingStartTokenIndex, endTokenIndex, false)); + state.scopeDepth--; + } + } + if (eat(tt._finally)) { + parseBlock(); + } +} + +export function parseVarStatement(isBlockScope) { + next(); + parseVar(false, isBlockScope); + semicolon(); +} + +function parseWhileStatement() { + next(); + parseParenExpression(); + parseStatement(false); +} + +function parseEmptyStatement() { + next(); +} + +function parseLabeledStatement() { + parseStatement(true); +} + +/** + * Parse a statement starting with an identifier of the given name. Subclasses match on the name + * to handle statements like "declare". + */ +function parseIdentifierStatement(contextualKeyword) { + if (isTypeScriptEnabled) { + tsParseIdentifierStatement(contextualKeyword); + } else if (isFlowEnabled) { + flowParseIdentifierStatement(contextualKeyword); + } else { + semicolon(); + } +} + +// Parse a semicolon-enclosed block of statements. +export function parseBlock(isFunctionScope = false, contextId = 0) { + const startTokenIndex = state.tokens.length; + state.scopeDepth++; + expect(tt.braceL); + if (contextId) { + state.tokens[state.tokens.length - 1].contextId = contextId; + } + parseBlockBody(tt.braceR); + if (contextId) { + state.tokens[state.tokens.length - 1].contextId = contextId; + } + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, isFunctionScope)); + state.scopeDepth--; +} + +export function parseBlockBody(end) { + while (!eat(end) && !state.error) { + parseStatement(true); + } +} + +// Parse a regular `for` loop. The disambiguation code in +// `parseStatement` will already have parsed the init statement or +// expression. + +function parseFor() { + expect(tt.semi); + if (!match(tt.semi)) { + parseExpression(); + } + expect(tt.semi); + if (!match(tt.parenR)) { + parseExpression(); + } + expect(tt.parenR); + parseStatement(false); +} + +// Parse a `for`/`in` and `for`/`of` loop, which are almost +// same from parser's perspective. + +function parseForIn(forAwait) { + if (forAwait) { + eatContextual(ContextualKeyword._of); + } else { + next(); + } + parseExpression(); + expect(tt.parenR); + parseStatement(false); +} + +// Parse a list of variable declarations. + +function parseVar(isFor, isBlockScope) { + while (true) { + parseVarHead(isBlockScope); + if (eat(tt.eq)) { + const eqIndex = state.tokens.length - 1; + parseMaybeAssign(isFor); + state.tokens[eqIndex].rhsEndIndex = state.tokens.length; + } + if (!eat(tt.comma)) { + break; + } + } +} + +function parseVarHead(isBlockScope) { + parseBindingAtom(isBlockScope); + if (isTypeScriptEnabled) { + tsAfterParseVarHead(); + } else if (isFlowEnabled) { + flowAfterParseVarHead(); + } +} + +// Parse a function declaration or literal (depending on the +// `isStatement` parameter). + +export function parseFunction( + functionStart, + isStatement, + optionalId = false, +) { + if (match(tt.star)) { + next(); + } + + if (isStatement && !optionalId && !match(tt.name) && !match(tt._yield)) { + unexpected(); + } + + let nameScopeStartTokenIndex = null; + + if (match(tt.name)) { + // Expression-style functions should limit their name's scope to the function body, so we make + // a new function scope to enforce that. + if (!isStatement) { + nameScopeStartTokenIndex = state.tokens.length; + state.scopeDepth++; + } + parseBindingIdentifier(false); + } + + const startTokenIndex = state.tokens.length; + state.scopeDepth++; + parseFunctionParams(); + parseFunctionBodyAndFinish(functionStart); + const endTokenIndex = state.tokens.length; + // In addition to the block scope of the function body, we need a separate function-style scope + // that includes the params. + state.scopes.push(new Scope(startTokenIndex, endTokenIndex, true)); + state.scopeDepth--; + if (nameScopeStartTokenIndex !== null) { + state.scopes.push(new Scope(nameScopeStartTokenIndex, endTokenIndex, true)); + state.scopeDepth--; + } +} + +export function parseFunctionParams( + allowModifiers = false, + funcContextId = 0, +) { + if (isTypeScriptEnabled) { + tsStartParseFunctionParams(); + } else if (isFlowEnabled) { + flowStartParseFunctionParams(); + } + + expect(tt.parenL); + if (funcContextId) { + state.tokens[state.tokens.length - 1].contextId = funcContextId; + } + parseBindingList( + tt.parenR, + false /* isBlockScope */, + false /* allowEmpty */, + allowModifiers, + funcContextId, + ); + if (funcContextId) { + state.tokens[state.tokens.length - 1].contextId = funcContextId; + } +} + +// Parse a class declaration or literal (depending on the +// `isStatement` parameter). + +export function parseClass(isStatement, optionalId = false) { + // Put a context ID on the class keyword, the open-brace, and the close-brace, so that later + // code can easily navigate to meaningful points on the class. + const contextId = getNextContextId(); + + next(); + state.tokens[state.tokens.length - 1].contextId = contextId; + state.tokens[state.tokens.length - 1].isExpression = !isStatement; + // Like with functions, we declare a special "name scope" from the start of the name to the end + // of the class, but only with expression-style classes, to represent the fact that the name is + // available to the body of the class but not an outer declaration. + let nameScopeStartTokenIndex = null; + if (!isStatement) { + nameScopeStartTokenIndex = state.tokens.length; + state.scopeDepth++; + } + parseClassId(isStatement, optionalId); + parseClassSuper(); + const openBraceIndex = state.tokens.length; + parseClassBody(contextId); + if (state.error) { + return; + } + state.tokens[openBraceIndex].contextId = contextId; + state.tokens[state.tokens.length - 1].contextId = contextId; + if (nameScopeStartTokenIndex !== null) { + const endTokenIndex = state.tokens.length; + state.scopes.push(new Scope(nameScopeStartTokenIndex, endTokenIndex, false)); + state.scopeDepth--; + } +} + +function isClassProperty() { + return match(tt.eq) || match(tt.semi) || match(tt.braceR) || match(tt.bang) || match(tt.colon); +} + +function isClassMethod() { + return match(tt.parenL) || match(tt.lessThan); +} + +function parseClassBody(classContextId) { + expect(tt.braceL); + + while (!eat(tt.braceR) && !state.error) { + if (eat(tt.semi)) { + continue; + } + + if (match(tt.at)) { + parseDecorator(); + continue; + } + const memberStart = state.start; + parseClassMember(memberStart, classContextId); + } +} + +function parseClassMember(memberStart, classContextId) { + if (isTypeScriptEnabled) { + tsParseModifiers([ + ContextualKeyword._declare, + ContextualKeyword._public, + ContextualKeyword._protected, + ContextualKeyword._private, + ContextualKeyword._override, + ]); + } + let isStatic = false; + if (match(tt.name) && state.contextualKeyword === ContextualKeyword._static) { + parseIdentifier(); // eats 'static' + if (isClassMethod()) { + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } else if (isClassProperty()) { + parseClassProperty(); + return; + } + // otherwise something static + state.tokens[state.tokens.length - 1].type = tt._static; + isStatic = true; + + if (match(tt.braceL)) { + // This is a static block. Mark the word "static" with the class context ID for class element + // detection and parse as a regular block. + state.tokens[state.tokens.length - 1].contextId = classContextId; + parseBlock(); + return; + } + } + + parseClassMemberWithIsStatic(memberStart, isStatic, classContextId); +} + +function parseClassMemberWithIsStatic( + memberStart, + isStatic, + classContextId, +) { + if (isTypeScriptEnabled) { + if (tsTryParseClassMemberWithIsStatic(isStatic)) { + return; + } + } + if (eat(tt.star)) { + // a generator + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } + + // Get the identifier name so we can tell if it's actually a keyword like "async", "get", or + // "set". + parseClassPropertyName(classContextId); + let isConstructor = false; + const token = state.tokens[state.tokens.length - 1]; + // We allow "constructor" as either an identifier or a string. + if (token.contextualKeyword === ContextualKeyword._constructor) { + isConstructor = true; + } + parsePostMemberNameModifiers(); + + if (isClassMethod()) { + parseClassMethod(memberStart, isConstructor); + } else if (isClassProperty()) { + parseClassProperty(); + } else if (token.contextualKeyword === ContextualKeyword._async && !isLineTerminator()) { + state.tokens[state.tokens.length - 1].type = tt._async; + // an async method + const isGenerator = match(tt.star); + if (isGenerator) { + next(); + } + + // The so-called parsed name would have been "async": get the real name. + parseClassPropertyName(classContextId); + parsePostMemberNameModifiers(); + parseClassMethod(memberStart, false /* isConstructor */); + } else if ( + (token.contextualKeyword === ContextualKeyword._get || + token.contextualKeyword === ContextualKeyword._set) && + !(isLineTerminator() && match(tt.star)) + ) { + if (token.contextualKeyword === ContextualKeyword._get) { + state.tokens[state.tokens.length - 1].type = tt._get; + } else { + state.tokens[state.tokens.length - 1].type = tt._set; + } + // `get\n*` is an uninitialized property named 'get' followed by a generator. + // a getter or setter + // The so-called parsed name would have been "get/set": get the real name. + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + } else if (token.contextualKeyword === ContextualKeyword._accessor && !isLineTerminator()) { + parseClassPropertyName(classContextId); + parseClassProperty(); + } else if (isLineTerminator()) { + // an uninitialized class property (due to ASI, since we don't otherwise recognize the next token) + parseClassProperty(); + } else { + unexpected(); + } +} + +function parseClassMethod(functionStart, isConstructor) { + if (isTypeScriptEnabled) { + tsTryParseTypeParameters(); + } else if (isFlowEnabled) { + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + } + parseMethod(functionStart, isConstructor); +} + +// Return the name of the class property, if it is a simple identifier. +export function parseClassPropertyName(classContextId) { + parsePropertyName(classContextId); +} + +export function parsePostMemberNameModifiers() { + if (isTypeScriptEnabled) { + const oldIsType = pushTypeContext(0); + eat(tt.question); + popTypeContext(oldIsType); + } +} + +export function parseClassProperty() { + if (isTypeScriptEnabled) { + eatTypeToken(tt.bang); + tsTryParseTypeAnnotation(); + } else if (isFlowEnabled) { + if (match(tt.colon)) { + flowParseTypeAnnotation(); + } + } + + if (match(tt.eq)) { + const equalsTokenIndex = state.tokens.length; + next(); + parseMaybeAssign(); + state.tokens[equalsTokenIndex].rhsEndIndex = state.tokens.length; + } + semicolon(); +} + +function parseClassId(isStatement, optionalId = false) { + if ( + isTypeScriptEnabled && + (!isStatement || optionalId) && + isContextual(ContextualKeyword._implements) + ) { + return; + } + + if (match(tt.name)) { + parseBindingIdentifier(true); + } + + if (isTypeScriptEnabled) { + tsTryParseTypeParameters(); + } else if (isFlowEnabled) { + if (match(tt.lessThan)) { + flowParseTypeParameterDeclaration(); + } + } +} + +// Returns true if there was a superclass. +function parseClassSuper() { + let hasSuper = false; + if (eat(tt._extends)) { + parseExprSubscripts(); + hasSuper = true; + } else { + hasSuper = false; + } + if (isTypeScriptEnabled) { + tsAfterParseClassSuper(hasSuper); + } else if (isFlowEnabled) { + flowAfterParseClassSuper(hasSuper); + } +} + +// Parses module export declaration. + +export function parseExport() { + const exportIndex = state.tokens.length - 1; + if (isTypeScriptEnabled) { + if (tsTryParseExport()) { + return; + } + } + // export * from '...' + if (shouldParseExportStar()) { + parseExportStar(); + } else if (isExportDefaultSpecifier()) { + // export default from + parseIdentifier(); + if (match(tt.comma) && lookaheadType() === tt.star) { + expect(tt.comma); + expect(tt.star); + expectContextual(ContextualKeyword._as); + parseIdentifier(); + } else { + parseExportSpecifiersMaybe(); + } + parseExportFrom(); + } else if (eat(tt._default)) { + // export default ... + parseExportDefaultExpression(); + } else if (shouldParseExportDeclaration()) { + parseExportDeclaration(); + } else { + // export { x, y as z } [from '...'] + parseExportSpecifiers(); + parseExportFrom(); + } + state.tokens[exportIndex].rhsEndIndex = state.tokens.length; +} + +function parseExportDefaultExpression() { + if (isTypeScriptEnabled) { + if (tsTryParseExportDefaultExpression()) { + return; + } + } + if (isFlowEnabled) { + if (flowTryParseExportDefaultExpression()) { + return; + } + } + const functionStart = state.start; + if (eat(tt._function)) { + parseFunction(functionStart, true, true); + } else if (isContextual(ContextualKeyword._async) && lookaheadType() === tt._function) { + // async function declaration + eatContextual(ContextualKeyword._async); + eat(tt._function); + parseFunction(functionStart, true, true); + } else if (match(tt._class)) { + parseClass(true, true); + } else if (match(tt.at)) { + parseDecorators(); + parseClass(true, true); + } else { + parseMaybeAssign(); + semicolon(); + } +} + +function parseExportDeclaration() { + if (isTypeScriptEnabled) { + tsParseExportDeclaration(); + } else if (isFlowEnabled) { + flowParseExportDeclaration(); + } else { + parseStatement(true); + } +} + +function isExportDefaultSpecifier() { + if (isTypeScriptEnabled && tsIsDeclarationStart()) { + return false; + } else if (isFlowEnabled && flowShouldDisallowExportDefaultSpecifier()) { + return false; + } + if (match(tt.name)) { + return state.contextualKeyword !== ContextualKeyword._async; + } + + if (!match(tt._default)) { + return false; + } + + const _next = nextTokenStart(); + const lookahead = lookaheadTypeAndKeyword(); + const hasFrom = + lookahead.type === tt.name && lookahead.contextualKeyword === ContextualKeyword._from; + if (lookahead.type === tt.comma) { + return true; + } + // lookahead again when `export default from` is seen + if (hasFrom) { + const nextAfterFrom = input.charCodeAt(nextTokenStartSince(_next + 4)); + return nextAfterFrom === charCodes.quotationMark || nextAfterFrom === charCodes.apostrophe; + } + return false; +} + +function parseExportSpecifiersMaybe() { + if (eat(tt.comma)) { + parseExportSpecifiers(); + } +} + +export function parseExportFrom() { + if (eatContextual(ContextualKeyword._from)) { + parseExprAtom(); + maybeParseImportAssertions(); + } + semicolon(); +} + +function shouldParseExportStar() { + if (isFlowEnabled) { + return flowShouldParseExportStar(); + } else { + return match(tt.star); + } +} + +function parseExportStar() { + if (isFlowEnabled) { + flowParseExportStar(); + } else { + baseParseExportStar(); + } +} + +export function baseParseExportStar() { + expect(tt.star); + + if (isContextual(ContextualKeyword._as)) { + parseExportNamespace(); + } else { + parseExportFrom(); + } +} + +function parseExportNamespace() { + next(); + state.tokens[state.tokens.length - 1].type = tt._as; + parseIdentifier(); + parseExportSpecifiersMaybe(); + parseExportFrom(); +} + +function shouldParseExportDeclaration() { + return ( + (isTypeScriptEnabled && tsIsDeclarationStart()) || + (isFlowEnabled && flowShouldParseExportDeclaration()) || + state.type === tt._var || + state.type === tt._const || + state.type === tt._let || + state.type === tt._function || + state.type === tt._class || + isContextual(ContextualKeyword._async) || + match(tt.at) + ); +} + +// Parses a comma-separated list of module exports. +export function parseExportSpecifiers() { + let first = true; + + // export { x, y as z } [from '...'] + expect(tt.braceL); + + while (!eat(tt.braceR) && !state.error) { + if (first) { + first = false; + } else { + expect(tt.comma); + if (eat(tt.braceR)) { + break; + } + } + parseExportSpecifier(); + } +} + +function parseExportSpecifier() { + if (isTypeScriptEnabled) { + tsParseExportSpecifier(); + return; + } + parseIdentifier(); + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ExportAccess; + if (eatContextual(ContextualKeyword._as)) { + parseIdentifier(); + } +} + +/** + * Starting at the `module` token in an import, determine if it was truly an + * import reflection token or just looks like one. + * + * Returns true for: + * import module foo from "foo"; + * import module from from "foo"; + * + * Returns false for: + * import module from "foo"; + * import module, {bar} from "foo"; + */ +function isImportReflection() { + const snapshot = state.snapshot(); + expectContextual(ContextualKeyword._module); + if (eatContextual(ContextualKeyword._from)) { + if (isContextual(ContextualKeyword._from)) { + state.restoreFromSnapshot(snapshot); + return true; + } else { + state.restoreFromSnapshot(snapshot); + return false; + } + } else if (match(tt.comma)) { + state.restoreFromSnapshot(snapshot); + return false; + } else { + state.restoreFromSnapshot(snapshot); + return true; + } +} + +/** + * Eat the "module" token from the import reflection proposal. + * https://github.com/tc39/proposal-import-reflection + */ +function parseMaybeImportReflection() { + // isImportReflection does snapshot/restore, so only run it if we see the word + // "module". + if (isContextual(ContextualKeyword._module) && isImportReflection()) { + next(); + } +} + +// Parses import declaration. + +export function parseImport() { + if (isTypeScriptEnabled && match(tt.name) && lookaheadType() === tt.eq) { + tsParseImportEqualsDeclaration(); + return; + } + if (isTypeScriptEnabled && isContextual(ContextualKeyword._type)) { + const lookahead = lookaheadTypeAndKeyword(); + if (lookahead.type === tt.name && lookahead.contextualKeyword !== ContextualKeyword._from) { + // One of these `import type` cases: + // import type T = require('T'); + // import type A from 'A'; + expectContextual(ContextualKeyword._type); + if (lookaheadType() === tt.eq) { + tsParseImportEqualsDeclaration(); + return; + } + // If this is an `import type...from` statement, then we already ate the + // type token, so proceed to the regular import parser. + } else if (lookahead.type === tt.star || lookahead.type === tt.braceL) { + // One of these `import type` cases, in which case we can eat the type token + // and proceed as normal: + // import type * as A from 'A'; + // import type {a} from 'A'; + expectContextual(ContextualKeyword._type); + } + // Otherwise, we are importing the name "type". + } + + // import '...' + if (match(tt.string)) { + parseExprAtom(); + } else { + parseMaybeImportReflection(); + parseImportSpecifiers(); + expectContextual(ContextualKeyword._from); + parseExprAtom(); + } + maybeParseImportAssertions(); + semicolon(); +} + +// eslint-disable-next-line no-unused-vars +function shouldParseDefaultImport() { + return match(tt.name); +} + +function parseImportSpecifierLocal() { + parseImportedIdentifier(); +} + +// Parses a comma-separated list of module imports. +function parseImportSpecifiers() { + if (isFlowEnabled) { + flowStartParseImportSpecifiers(); + } + + let first = true; + if (shouldParseDefaultImport()) { + // import defaultObj, { x, y as z } from '...' + parseImportSpecifierLocal(); + + if (!eat(tt.comma)) return; + } + + if (match(tt.star)) { + next(); + expectContextual(ContextualKeyword._as); + + parseImportSpecifierLocal(); + + return; + } + + expect(tt.braceL); + while (!eat(tt.braceR) && !state.error) { + if (first) { + first = false; + } else { + // Detect an attempt to deep destructure + if (eat(tt.colon)) { + unexpected( + "ES2015 named imports do not destructure. Use another statement for destructuring after the import.", + ); + } + + expect(tt.comma); + if (eat(tt.braceR)) { + break; + } + } + + parseImportSpecifier(); + } +} + +function parseImportSpecifier() { + if (isTypeScriptEnabled) { + tsParseImportSpecifier(); + return; + } + if (isFlowEnabled) { + flowParseImportSpecifier(); + return; + } + parseImportedIdentifier(); + if (isContextual(ContextualKeyword._as)) { + state.tokens[state.tokens.length - 1].identifierRole = IdentifierRole.ImportAccess; + next(); + parseImportedIdentifier(); + } +} + +/** + * Parse import assertions like `assert {type: "json"}`. + * + * Import assertions technically have their own syntax, but are always parseable + * as a plain JS object, so just do that for simplicity. + */ +function maybeParseImportAssertions() { + if (isContextual(ContextualKeyword._assert) && !hasPrecedingLineBreak()) { + next(); + parseObj(false, false); + } +} diff --git a/node_modules/sucrase/dist/esm/parser/traverser/util.js b/node_modules/sucrase/dist/esm/parser/traverser/util.js new file mode 100644 index 0000000..6a2b2d9 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/traverser/util.js @@ -0,0 +1,104 @@ +import {eat, finishToken, lookaheadTypeAndKeyword, match, nextTokenStart} from "../tokenizer/index"; + +import {formatTokenType, TokenType as tt} from "../tokenizer/types"; +import {charCodes} from "../util/charcodes"; +import {input, state} from "./base"; + +// ## Parser utilities + +// Tests whether parsed token is a contextual keyword. +export function isContextual(contextualKeyword) { + return state.contextualKeyword === contextualKeyword; +} + +export function isLookaheadContextual(contextualKeyword) { + const l = lookaheadTypeAndKeyword(); + return l.type === tt.name && l.contextualKeyword === contextualKeyword; +} + +// Consumes contextual keyword if possible. +export function eatContextual(contextualKeyword) { + return state.contextualKeyword === contextualKeyword && eat(tt.name); +} + +// Asserts that following token is given contextual keyword. +export function expectContextual(contextualKeyword) { + if (!eatContextual(contextualKeyword)) { + unexpected(); + } +} + +// Test whether a semicolon can be inserted at the current position. +export function canInsertSemicolon() { + return match(tt.eof) || match(tt.braceR) || hasPrecedingLineBreak(); +} + +export function hasPrecedingLineBreak() { + const prevToken = state.tokens[state.tokens.length - 1]; + const lastTokEnd = prevToken ? prevToken.end : 0; + for (let i = lastTokEnd; i < state.start; i++) { + const code = input.charCodeAt(i); + if ( + code === charCodes.lineFeed || + code === charCodes.carriageReturn || + code === 0x2028 || + code === 0x2029 + ) { + return true; + } + } + return false; +} + +export function hasFollowingLineBreak() { + const nextStart = nextTokenStart(); + for (let i = state.end; i < nextStart; i++) { + const code = input.charCodeAt(i); + if ( + code === charCodes.lineFeed || + code === charCodes.carriageReturn || + code === 0x2028 || + code === 0x2029 + ) { + return true; + } + } + return false; +} + +export function isLineTerminator() { + return eat(tt.semi) || canInsertSemicolon(); +} + +// Consume a semicolon, or, failing that, see if we are allowed to +// pretend that there is a semicolon at this position. +export function semicolon() { + if (!isLineTerminator()) { + unexpected('Unexpected token, expected ";"'); + } +} + +// Expect a token of a given type. If found, consume it, otherwise, +// raise an unexpected token error at given pos. +export function expect(type) { + const matched = eat(type); + if (!matched) { + unexpected(`Unexpected token, expected "${formatTokenType(type)}"`); + } +} + +/** + * Transition the parser to an error state. All code needs to be written to naturally unwind in this + * state, which allows us to backtrack without exceptions and without error plumbing everywhere. + */ +export function unexpected(message = "Unexpected token", pos = state.start) { + if (state.error) { + return; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const err = new SyntaxError(message); + err.pos = pos; + state.error = err; + state.pos = input.length; + finishToken(tt.eof); +} diff --git a/node_modules/sucrase/dist/esm/parser/util/charcodes.js b/node_modules/sucrase/dist/esm/parser/util/charcodes.js new file mode 100644 index 0000000..36ea667 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/util/charcodes.js @@ -0,0 +1,115 @@ +export var charCodes; (function (charCodes) { + const backSpace = 8; charCodes[charCodes["backSpace"] = backSpace] = "backSpace"; + const lineFeed = 10; charCodes[charCodes["lineFeed"] = lineFeed] = "lineFeed"; // '\n' + const tab = 9; charCodes[charCodes["tab"] = tab] = "tab"; // '\t' + const carriageReturn = 13; charCodes[charCodes["carriageReturn"] = carriageReturn] = "carriageReturn"; // '\r' + const shiftOut = 14; charCodes[charCodes["shiftOut"] = shiftOut] = "shiftOut"; + const space = 32; charCodes[charCodes["space"] = space] = "space"; + const exclamationMark = 33; charCodes[charCodes["exclamationMark"] = exclamationMark] = "exclamationMark"; // '!' + const quotationMark = 34; charCodes[charCodes["quotationMark"] = quotationMark] = "quotationMark"; // '"' + const numberSign = 35; charCodes[charCodes["numberSign"] = numberSign] = "numberSign"; // '#' + const dollarSign = 36; charCodes[charCodes["dollarSign"] = dollarSign] = "dollarSign"; // '$' + const percentSign = 37; charCodes[charCodes["percentSign"] = percentSign] = "percentSign"; // '%' + const ampersand = 38; charCodes[charCodes["ampersand"] = ampersand] = "ampersand"; // '&' + const apostrophe = 39; charCodes[charCodes["apostrophe"] = apostrophe] = "apostrophe"; // ''' + const leftParenthesis = 40; charCodes[charCodes["leftParenthesis"] = leftParenthesis] = "leftParenthesis"; // '(' + const rightParenthesis = 41; charCodes[charCodes["rightParenthesis"] = rightParenthesis] = "rightParenthesis"; // ')' + const asterisk = 42; charCodes[charCodes["asterisk"] = asterisk] = "asterisk"; // '*' + const plusSign = 43; charCodes[charCodes["plusSign"] = plusSign] = "plusSign"; // '+' + const comma = 44; charCodes[charCodes["comma"] = comma] = "comma"; // ',' + const dash = 45; charCodes[charCodes["dash"] = dash] = "dash"; // '-' + const dot = 46; charCodes[charCodes["dot"] = dot] = "dot"; // '.' + const slash = 47; charCodes[charCodes["slash"] = slash] = "slash"; // '/' + const digit0 = 48; charCodes[charCodes["digit0"] = digit0] = "digit0"; // '0' + const digit1 = 49; charCodes[charCodes["digit1"] = digit1] = "digit1"; // '1' + const digit2 = 50; charCodes[charCodes["digit2"] = digit2] = "digit2"; // '2' + const digit3 = 51; charCodes[charCodes["digit3"] = digit3] = "digit3"; // '3' + const digit4 = 52; charCodes[charCodes["digit4"] = digit4] = "digit4"; // '4' + const digit5 = 53; charCodes[charCodes["digit5"] = digit5] = "digit5"; // '5' + const digit6 = 54; charCodes[charCodes["digit6"] = digit6] = "digit6"; // '6' + const digit7 = 55; charCodes[charCodes["digit7"] = digit7] = "digit7"; // '7' + const digit8 = 56; charCodes[charCodes["digit8"] = digit8] = "digit8"; // '8' + const digit9 = 57; charCodes[charCodes["digit9"] = digit9] = "digit9"; // '9' + const colon = 58; charCodes[charCodes["colon"] = colon] = "colon"; // ':' + const semicolon = 59; charCodes[charCodes["semicolon"] = semicolon] = "semicolon"; // ';' + const lessThan = 60; charCodes[charCodes["lessThan"] = lessThan] = "lessThan"; // '<' + const equalsTo = 61; charCodes[charCodes["equalsTo"] = equalsTo] = "equalsTo"; // '=' + const greaterThan = 62; charCodes[charCodes["greaterThan"] = greaterThan] = "greaterThan"; // '>' + const questionMark = 63; charCodes[charCodes["questionMark"] = questionMark] = "questionMark"; // '?' + const atSign = 64; charCodes[charCodes["atSign"] = atSign] = "atSign"; // '@' + const uppercaseA = 65; charCodes[charCodes["uppercaseA"] = uppercaseA] = "uppercaseA"; // 'A' + const uppercaseB = 66; charCodes[charCodes["uppercaseB"] = uppercaseB] = "uppercaseB"; // 'B' + const uppercaseC = 67; charCodes[charCodes["uppercaseC"] = uppercaseC] = "uppercaseC"; // 'C' + const uppercaseD = 68; charCodes[charCodes["uppercaseD"] = uppercaseD] = "uppercaseD"; // 'D' + const uppercaseE = 69; charCodes[charCodes["uppercaseE"] = uppercaseE] = "uppercaseE"; // 'E' + const uppercaseF = 70; charCodes[charCodes["uppercaseF"] = uppercaseF] = "uppercaseF"; // 'F' + const uppercaseG = 71; charCodes[charCodes["uppercaseG"] = uppercaseG] = "uppercaseG"; // 'G' + const uppercaseH = 72; charCodes[charCodes["uppercaseH"] = uppercaseH] = "uppercaseH"; // 'H' + const uppercaseI = 73; charCodes[charCodes["uppercaseI"] = uppercaseI] = "uppercaseI"; // 'I' + const uppercaseJ = 74; charCodes[charCodes["uppercaseJ"] = uppercaseJ] = "uppercaseJ"; // 'J' + const uppercaseK = 75; charCodes[charCodes["uppercaseK"] = uppercaseK] = "uppercaseK"; // 'K' + const uppercaseL = 76; charCodes[charCodes["uppercaseL"] = uppercaseL] = "uppercaseL"; // 'L' + const uppercaseM = 77; charCodes[charCodes["uppercaseM"] = uppercaseM] = "uppercaseM"; // 'M' + const uppercaseN = 78; charCodes[charCodes["uppercaseN"] = uppercaseN] = "uppercaseN"; // 'N' + const uppercaseO = 79; charCodes[charCodes["uppercaseO"] = uppercaseO] = "uppercaseO"; // 'O' + const uppercaseP = 80; charCodes[charCodes["uppercaseP"] = uppercaseP] = "uppercaseP"; // 'P' + const uppercaseQ = 81; charCodes[charCodes["uppercaseQ"] = uppercaseQ] = "uppercaseQ"; // 'Q' + const uppercaseR = 82; charCodes[charCodes["uppercaseR"] = uppercaseR] = "uppercaseR"; // 'R' + const uppercaseS = 83; charCodes[charCodes["uppercaseS"] = uppercaseS] = "uppercaseS"; // 'S' + const uppercaseT = 84; charCodes[charCodes["uppercaseT"] = uppercaseT] = "uppercaseT"; // 'T' + const uppercaseU = 85; charCodes[charCodes["uppercaseU"] = uppercaseU] = "uppercaseU"; // 'U' + const uppercaseV = 86; charCodes[charCodes["uppercaseV"] = uppercaseV] = "uppercaseV"; // 'V' + const uppercaseW = 87; charCodes[charCodes["uppercaseW"] = uppercaseW] = "uppercaseW"; // 'W' + const uppercaseX = 88; charCodes[charCodes["uppercaseX"] = uppercaseX] = "uppercaseX"; // 'X' + const uppercaseY = 89; charCodes[charCodes["uppercaseY"] = uppercaseY] = "uppercaseY"; // 'Y' + const uppercaseZ = 90; charCodes[charCodes["uppercaseZ"] = uppercaseZ] = "uppercaseZ"; // 'Z' + const leftSquareBracket = 91; charCodes[charCodes["leftSquareBracket"] = leftSquareBracket] = "leftSquareBracket"; // '[' + const backslash = 92; charCodes[charCodes["backslash"] = backslash] = "backslash"; // '\ ' + const rightSquareBracket = 93; charCodes[charCodes["rightSquareBracket"] = rightSquareBracket] = "rightSquareBracket"; // ']' + const caret = 94; charCodes[charCodes["caret"] = caret] = "caret"; // '^' + const underscore = 95; charCodes[charCodes["underscore"] = underscore] = "underscore"; // '_' + const graveAccent = 96; charCodes[charCodes["graveAccent"] = graveAccent] = "graveAccent"; // '`' + const lowercaseA = 97; charCodes[charCodes["lowercaseA"] = lowercaseA] = "lowercaseA"; // 'a' + const lowercaseB = 98; charCodes[charCodes["lowercaseB"] = lowercaseB] = "lowercaseB"; // 'b' + const lowercaseC = 99; charCodes[charCodes["lowercaseC"] = lowercaseC] = "lowercaseC"; // 'c' + const lowercaseD = 100; charCodes[charCodes["lowercaseD"] = lowercaseD] = "lowercaseD"; // 'd' + const lowercaseE = 101; charCodes[charCodes["lowercaseE"] = lowercaseE] = "lowercaseE"; // 'e' + const lowercaseF = 102; charCodes[charCodes["lowercaseF"] = lowercaseF] = "lowercaseF"; // 'f' + const lowercaseG = 103; charCodes[charCodes["lowercaseG"] = lowercaseG] = "lowercaseG"; // 'g' + const lowercaseH = 104; charCodes[charCodes["lowercaseH"] = lowercaseH] = "lowercaseH"; // 'h' + const lowercaseI = 105; charCodes[charCodes["lowercaseI"] = lowercaseI] = "lowercaseI"; // 'i' + const lowercaseJ = 106; charCodes[charCodes["lowercaseJ"] = lowercaseJ] = "lowercaseJ"; // 'j' + const lowercaseK = 107; charCodes[charCodes["lowercaseK"] = lowercaseK] = "lowercaseK"; // 'k' + const lowercaseL = 108; charCodes[charCodes["lowercaseL"] = lowercaseL] = "lowercaseL"; // 'l' + const lowercaseM = 109; charCodes[charCodes["lowercaseM"] = lowercaseM] = "lowercaseM"; // 'm' + const lowercaseN = 110; charCodes[charCodes["lowercaseN"] = lowercaseN] = "lowercaseN"; // 'n' + const lowercaseO = 111; charCodes[charCodes["lowercaseO"] = lowercaseO] = "lowercaseO"; // 'o' + const lowercaseP = 112; charCodes[charCodes["lowercaseP"] = lowercaseP] = "lowercaseP"; // 'p' + const lowercaseQ = 113; charCodes[charCodes["lowercaseQ"] = lowercaseQ] = "lowercaseQ"; // 'q' + const lowercaseR = 114; charCodes[charCodes["lowercaseR"] = lowercaseR] = "lowercaseR"; // 'r' + const lowercaseS = 115; charCodes[charCodes["lowercaseS"] = lowercaseS] = "lowercaseS"; // 's' + const lowercaseT = 116; charCodes[charCodes["lowercaseT"] = lowercaseT] = "lowercaseT"; // 't' + const lowercaseU = 117; charCodes[charCodes["lowercaseU"] = lowercaseU] = "lowercaseU"; // 'u' + const lowercaseV = 118; charCodes[charCodes["lowercaseV"] = lowercaseV] = "lowercaseV"; // 'v' + const lowercaseW = 119; charCodes[charCodes["lowercaseW"] = lowercaseW] = "lowercaseW"; // 'w' + const lowercaseX = 120; charCodes[charCodes["lowercaseX"] = lowercaseX] = "lowercaseX"; // 'x' + const lowercaseY = 121; charCodes[charCodes["lowercaseY"] = lowercaseY] = "lowercaseY"; // 'y' + const lowercaseZ = 122; charCodes[charCodes["lowercaseZ"] = lowercaseZ] = "lowercaseZ"; // 'z' + const leftCurlyBrace = 123; charCodes[charCodes["leftCurlyBrace"] = leftCurlyBrace] = "leftCurlyBrace"; // '{' + const verticalBar = 124; charCodes[charCodes["verticalBar"] = verticalBar] = "verticalBar"; // '|' + const rightCurlyBrace = 125; charCodes[charCodes["rightCurlyBrace"] = rightCurlyBrace] = "rightCurlyBrace"; // '}' + const tilde = 126; charCodes[charCodes["tilde"] = tilde] = "tilde"; // '~' + const nonBreakingSpace = 160; charCodes[charCodes["nonBreakingSpace"] = nonBreakingSpace] = "nonBreakingSpace"; + // eslint-disable-next-line no-irregular-whitespace + const oghamSpaceMark = 5760; charCodes[charCodes["oghamSpaceMark"] = oghamSpaceMark] = "oghamSpaceMark"; // ' ' + const lineSeparator = 8232; charCodes[charCodes["lineSeparator"] = lineSeparator] = "lineSeparator"; + const paragraphSeparator = 8233; charCodes[charCodes["paragraphSeparator"] = paragraphSeparator] = "paragraphSeparator"; +})(charCodes || (charCodes = {})); + +export function isDigit(code) { + return ( + (code >= charCodes.digit0 && code <= charCodes.digit9) || + (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) || + (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) + ); +} diff --git a/node_modules/sucrase/dist/esm/parser/util/identifier.js b/node_modules/sucrase/dist/esm/parser/util/identifier.js new file mode 100644 index 0000000..33a6bb1 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/util/identifier.js @@ -0,0 +1,34 @@ +import {charCodes} from "./charcodes"; +import {WHITESPACE_CHARS} from "./whitespace"; + +function computeIsIdentifierChar(code) { + if (code < 48) return code === 36; + if (code < 58) return true; + if (code < 65) return false; + if (code < 91) return true; + if (code < 97) return code === 95; + if (code < 123) return true; + if (code < 128) return false; + throw new Error("Should not be called with non-ASCII char code."); +} + +export const IS_IDENTIFIER_CHAR = new Uint8Array(65536); +for (let i = 0; i < 128; i++) { + IS_IDENTIFIER_CHAR[i] = computeIsIdentifierChar(i) ? 1 : 0; +} +for (let i = 128; i < 65536; i++) { + IS_IDENTIFIER_CHAR[i] = 1; +} +// Aside from whitespace and newlines, all characters outside the ASCII space are either +// identifier characters or invalid. Since we're not performing code validation, we can just +// treat all invalid characters as identifier characters. +for (const whitespaceChar of WHITESPACE_CHARS) { + IS_IDENTIFIER_CHAR[whitespaceChar] = 0; +} +IS_IDENTIFIER_CHAR[0x2028] = 0; +IS_IDENTIFIER_CHAR[0x2029] = 0; + +export const IS_IDENTIFIER_START = IS_IDENTIFIER_CHAR.slice(); +for (let numChar = charCodes.digit0; numChar <= charCodes.digit9; numChar++) { + IS_IDENTIFIER_START[numChar] = 0; +} diff --git a/node_modules/sucrase/dist/esm/parser/util/whitespace.js b/node_modules/sucrase/dist/esm/parser/util/whitespace.js new file mode 100644 index 0000000..303b8a6 --- /dev/null +++ b/node_modules/sucrase/dist/esm/parser/util/whitespace.js @@ -0,0 +1,33 @@ +import {charCodes} from "./charcodes"; + +// https://tc39.github.io/ecma262/#sec-white-space +export const WHITESPACE_CHARS = [ + 0x0009, + 0x000b, + 0x000c, + charCodes.space, + charCodes.nonBreakingSpace, + charCodes.oghamSpaceMark, + 0x2000, // EN QUAD + 0x2001, // EM QUAD + 0x2002, // EN SPACE + 0x2003, // EM SPACE + 0x2004, // THREE-PER-EM SPACE + 0x2005, // FOUR-PER-EM SPACE + 0x2006, // SIX-PER-EM SPACE + 0x2007, // FIGURE SPACE + 0x2008, // PUNCTUATION SPACE + 0x2009, // THIN SPACE + 0x200a, // HAIR SPACE + 0x202f, // NARROW NO-BREAK SPACE + 0x205f, // MEDIUM MATHEMATICAL SPACE + 0x3000, // IDEOGRAPHIC SPACE + 0xfeff, // ZERO WIDTH NO-BREAK SPACE +]; + +export const skipWhiteSpace = /(?:\s|\/\/.*|\/\*[^]*?\*\/)*/g; + +export const IS_WHITESPACE = new Uint8Array(65536); +for (const char of WHITESPACE_CHARS) { + IS_WHITESPACE[char] = 1; +} diff --git a/node_modules/sucrase/dist/esm/register.js b/node_modules/sucrase/dist/esm/register.js new file mode 100644 index 0000000..55a00a9 --- /dev/null +++ b/node_modules/sucrase/dist/esm/register.js @@ -0,0 +1,83 @@ +import * as pirates from "pirates"; + +import { transform} from "./index"; + + + + + + + + +export function addHook( + extension, + options, + hookOptions, +) { + return pirates.addHook( + (code, filePath) => { + const {code: transformedCode, sourceMap} = transform(code, { + ...options, + sourceMapOptions: {compiledFilename: filePath}, + filePath, + }); + const mapBase64 = Buffer.from(JSON.stringify(sourceMap)).toString("base64"); + const suffix = `//# sourceMappingURL=data:application/json;charset=utf-8;base64,${mapBase64}`; + return `${transformedCode}\n${suffix}`; + }, + {...hookOptions, exts: [extension]}, + ); +} + +export function registerJS(hookOptions) { + return addHook(".js", {transforms: ["imports", "flow", "jsx"]}, hookOptions); +} + +export function registerJSX(hookOptions) { + return addHook(".jsx", {transforms: ["imports", "flow", "jsx"]}, hookOptions); +} + +export function registerTS(hookOptions) { + return addHook(".ts", {transforms: ["imports", "typescript"]}, hookOptions); +} + +export function registerTSX(hookOptions) { + return addHook(".tsx", {transforms: ["imports", "typescript", "jsx"]}, hookOptions); +} + +export function registerTSLegacyModuleInterop(hookOptions) { + return addHook( + ".ts", + { + transforms: ["imports", "typescript"], + enableLegacyTypeScriptModuleInterop: true, + }, + hookOptions, + ); +} + +export function registerTSXLegacyModuleInterop(hookOptions) { + return addHook( + ".tsx", + { + transforms: ["imports", "typescript", "jsx"], + enableLegacyTypeScriptModuleInterop: true, + }, + hookOptions, + ); +} + +export function registerAll(hookOptions) { + const reverts = [ + registerJS(hookOptions), + registerJSX(hookOptions), + registerTS(hookOptions), + registerTSX(hookOptions), + ]; + + return () => { + for (const fn of reverts) { + fn(); + } + }; +} diff --git a/node_modules/sucrase/dist/esm/transformers/CJSImportTransformer.js b/node_modules/sucrase/dist/esm/transformers/CJSImportTransformer.js new file mode 100644 index 0000000..93b65a8 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/CJSImportTransformer.js @@ -0,0 +1,859 @@ + + + +import {IdentifierRole, isDeclaration, isObjectShorthandDeclaration} from "../parser/tokenizer"; +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + +import elideImportEquals from "../util/elideImportEquals"; +import getDeclarationInfo, { + + EMPTY_DECLARATION_INFO, +} from "../util/getDeclarationInfo"; +import getImportExportSpecifierInfo from "../util/getImportExportSpecifierInfo"; +import {removeMaybeImportAssertion} from "../util/removeMaybeImportAssertion"; +import shouldElideDefaultExport from "../util/shouldElideDefaultExport"; + + +import Transformer from "./Transformer"; + +/** + * Class for editing import statements when we are transforming to commonjs. + */ +export default class CJSImportTransformer extends Transformer { + __init() {this.hadExport = false} + __init2() {this.hadNamedExport = false} + __init3() {this.hadDefaultExport = false} + + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + helperManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + enableLegacyTypeScriptModuleInterop, + isTypeScriptTransformEnabled, + preserveDynamicImport, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.helperManager = helperManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.enableLegacyBabel5ModuleInterop = enableLegacyBabel5ModuleInterop;this.enableLegacyTypeScriptModuleInterop = enableLegacyTypeScriptModuleInterop;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;this.preserveDynamicImport = preserveDynamicImport;CJSImportTransformer.prototype.__init.call(this);CJSImportTransformer.prototype.__init2.call(this);CJSImportTransformer.prototype.__init3.call(this);; + this.declarationInfo = isTypeScriptTransformEnabled + ? getDeclarationInfo(tokens) + : EMPTY_DECLARATION_INFO; + } + + getPrefixCode() { + let prefix = ""; + if (this.hadExport) { + prefix += 'Object.defineProperty(exports, "__esModule", {value: true});'; + } + return prefix; + } + + getSuffixCode() { + if (this.enableLegacyBabel5ModuleInterop && this.hadDefaultExport && !this.hadNamedExport) { + return "\nmodule.exports = exports.default;\n"; + } + return ""; + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(tt._import, tt.name, tt.eq)) { + return this.processImportEquals(); + } + if (this.tokens.matches1(tt._import)) { + this.processImport(); + return true; + } + if (this.tokens.matches2(tt._export, tt.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if (this.tokens.matches1(tt._export) && !this.tokens.currentToken().isType) { + this.hadExport = true; + return this.processExport(); + } + if (this.tokens.matches2(tt.name, tt.postIncDec)) { + // Fall through to normal identifier matching if this doesn't apply. + if (this.processPostIncDec()) { + return true; + } + } + if (this.tokens.matches1(tt.name) || this.tokens.matches1(tt.jsxName)) { + return this.processIdentifier(); + } + if (this.tokens.matches1(tt.eq)) { + return this.processAssignment(); + } + if (this.tokens.matches1(tt.assign)) { + return this.processComplexAssignment(); + } + if (this.tokens.matches1(tt.preIncDec)) { + return this.processPreIncDec(); + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.importProcessor.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + elideImportEquals(this.tokens); + } else { + // Otherwise, switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + /** + * Transform this: + * import foo, {bar} from 'baz'; + * into + * var _baz = require('baz'); var _baz2 = _interopRequireDefault(_baz); + * + * The import code was already generated in the import preprocessing step, so + * we just need to look it up. + */ + processImport() { + if (this.tokens.matches2(tt._import, tt.parenL)) { + if (this.preserveDynamicImport) { + // Bail out, only making progress for this one token. + this.tokens.copyToken(); + return; + } + const requireWrapper = this.enableLegacyTypeScriptModuleInterop + ? "" + : `${this.helperManager.getHelperName("interopRequireWildcard")}(`; + this.tokens.replaceToken(`Promise.resolve().then(() => ${requireWrapper}require`); + const contextId = this.tokens.currentToken().contextId; + if (contextId == null) { + throw new Error("Expected context ID on dynamic import invocation."); + } + this.tokens.copyToken(); + while (!this.tokens.matchesContextIdAndLabel(tt.parenR, contextId)) { + this.rootTransformer.processToken(); + } + this.tokens.replaceToken(requireWrapper ? ")))" : "))"); + return; + } + + const wasOnlyTypes = this.removeImportAndDetectIfType(); + + if (wasOnlyTypes) { + this.tokens.removeToken(); + } else { + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + this.tokens.appendCode(this.importProcessor.claimImportCode(path)); + } + removeMaybeImportAssertion(this.tokens); + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + + /** + * Erase this import, and return true if it was either of the form "import type" or contained only + * "type" named imports. Such imports should not even do a side-effect import. + * + * The position should end at the import string. + */ + removeImportAndDetectIfType() { + this.tokens.removeInitialToken(); + if ( + this.tokens.matchesContextual(ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, tt.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + this.removeRemainingImport(); + return true; + } + + if (this.tokens.matches1(tt.name) || this.tokens.matches1(tt.star)) { + // We have a default import or namespace import, so there must be some + // non-type import. + this.removeRemainingImport(); + return false; + } + + if (this.tokens.matches1(tt.string)) { + // This is a bare import, so we should proceed with the import. + return false; + } + + let foundNonType = false; + while (!this.tokens.matches1(tt.string)) { + // Check if any named imports are of the form "foo" or "foo as bar", with + // no leading "type". + if ((!foundNonType && this.tokens.matches1(tt.braceL)) || this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + if ( + this.tokens.matches2(tt.name, tt.comma) || + this.tokens.matches2(tt.name, tt.braceR) || + this.tokens.matches4(tt.name, tt.name, tt.name, tt.comma) || + this.tokens.matches4(tt.name, tt.name, tt.name, tt.braceR) + ) { + foundNonType = true; + } + } + this.tokens.removeToken(); + } + return !foundNonType; + } + + removeRemainingImport() { + while (!this.tokens.matches1(tt.string)) { + this.tokens.removeToken(); + } + } + + processIdentifier() { + const token = this.tokens.currentToken(); + if (token.shadowsGlobal) { + return false; + } + + if (token.identifierRole === IdentifierRole.ObjectShorthand) { + return this.processObjectShorthand(); + } + + if (token.identifierRole !== IdentifierRole.Access) { + return false; + } + const replacement = this.importProcessor.getIdentifierReplacement( + this.tokens.identifierNameForToken(token), + ); + if (!replacement) { + return false; + } + // Tolerate any number of closing parens while looking for an opening paren + // that indicates a function call. + let possibleOpenParenIndex = this.tokens.currentIndex() + 1; + while ( + possibleOpenParenIndex < this.tokens.tokens.length && + this.tokens.tokens[possibleOpenParenIndex].type === tt.parenR + ) { + possibleOpenParenIndex++; + } + // Avoid treating imported functions as methods of their `exports` object + // by using `(0, f)` when the identifier is in a paren expression. Else + // use `Function.prototype.call` when the identifier is a guaranteed + // function call. When using `call`, pass undefined as the context. + if (this.tokens.tokens[possibleOpenParenIndex].type === tt.parenL) { + if ( + this.tokens.tokenAtRelativeIndex(1).type === tt.parenL && + this.tokens.tokenAtRelativeIndex(-1).type !== tt._new + ) { + this.tokens.replaceToken(`${replacement}.call(void 0, `); + // Remove the old paren. + this.tokens.removeToken(); + // Balance out the new paren. + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + } else { + // See here: http://2ality.com/2015/12/references.html + this.tokens.replaceToken(`(0, ${replacement})`); + } + } else { + this.tokens.replaceToken(replacement); + } + return true; + } + + processObjectShorthand() { + const identifier = this.tokens.identifierName(); + const replacement = this.importProcessor.getIdentifierReplacement(identifier); + if (!replacement) { + return false; + } + this.tokens.replaceToken(`${identifier}: ${replacement}`); + return true; + } + + processExport() { + if ( + this.tokens.matches2(tt._export, tt._enum) || + this.tokens.matches3(tt._export, tt._const, tt._enum) + ) { + // Let the TypeScript transform handle it. + return false; + } + if (this.tokens.matches2(tt._export, tt._default)) { + this.hadDefaultExport = true; + if (this.tokens.matches3(tt._export, tt._default, tt._enum)) { + // Flow export default enums need some special handling, so handle them + // in that tranform rather than this one. + return false; + } + this.processExportDefault(); + return true; + } + this.hadNamedExport = true; + if ( + this.tokens.matches2(tt._export, tt._var) || + this.tokens.matches2(tt._export, tt._let) || + this.tokens.matches2(tt._export, tt._const) + ) { + this.processExportVar(); + return true; + } else if ( + this.tokens.matches2(tt._export, tt._function) || + // export async function + this.tokens.matches3(tt._export, tt.name, tt._function) + ) { + this.processExportFunction(); + return true; + } else if ( + this.tokens.matches2(tt._export, tt._class) || + this.tokens.matches3(tt._export, tt._abstract, tt._class) || + this.tokens.matches2(tt._export, tt.at) + ) { + this.processExportClass(); + return true; + } else if (this.tokens.matches2(tt._export, tt.braceL)) { + this.processExportBindings(); + return true; + } else if (this.tokens.matches2(tt._export, tt.star)) { + this.processExportStar(); + return true; + } else if ( + this.tokens.matches2(tt._export, tt.name) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, ContextualKeyword._type) + ) { + // export type {a}; + // export type {a as b}; + // export type {a} from './b'; + // export type * from './b'; + // export type * as ns from './b'; + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(tt.braceL)) { + while (!this.tokens.matches1(tt.braceR)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + } else { + // * + this.tokens.removeToken(); + if (this.tokens.matches1(tt._as)) { + // as + this.tokens.removeToken(); + // ns + this.tokens.removeToken(); + } + } + // Remove type re-export `... } from './T'` + if ( + this.tokens.matchesContextual(ContextualKeyword._from) && + this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, tt.string) + ) { + this.tokens.removeToken(); + this.tokens.removeToken(); + removeMaybeImportAssertion(this.tokens); + } + return true; + } else { + throw new Error("Unrecognized export syntax."); + } + } + + processAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + // If the LHS is a type identifier, this must be a declaration like `let a: b = c;`, + // with `b` as the identifier, so nothing needs to be done in that case. + if (identifierToken.isType || identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, tt.dot)) { + return false; + } + if (index >= 2 && [tt._var, tt._let, tt._const].includes(this.tokens.tokens[index - 2].type)) { + // Declarations don't need an extra assignment. This doesn't avoid the + // assignment for comma-separated declarations, but it's still correct + // since the assignment is just redundant. + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.copyToken(); + this.tokens.appendCode(` ${assignmentSnippet} =`); + return true; + } + + /** + * Process something like `a += 3`, where `a` might be an exported value. + */ + processComplexAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + if (identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, tt.dot)) { + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(` = ${assignmentSnippet}`); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `++a`, where `a` might be an exported value. + */ + processPreIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + // Ignore things like ++a.b and ++a[b] and ++a().b. + if ( + index + 2 < this.tokens.tokens.length && + (this.tokens.matches1AtIndex(index + 2, tt.dot) || + this.tokens.matches1AtIndex(index + 2, tt.bracketL) || + this.tokens.matches1AtIndex(index + 2, tt.parenL)) + ) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(`${assignmentSnippet} = `); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `a++`, where `a` might be an exported value. + * This starts at the `a`, not at the `++`. + */ + processPostIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index]; + const operatorToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== tt.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 1 && this.tokens.matches1AtIndex(index - 1, tt.dot)) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + const operatorCode = this.tokens.rawCodeForToken(operatorToken); + // We might also replace the identifier with something like exports.x, so + // do that replacement here as well. + const base = this.importProcessor.getIdentifierReplacement(identifierName) || identifierName; + if (operatorCode === "++") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} + 1, ${base} - 1)`); + } else if (operatorCode === "--") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} - 1, ${base} + 1)`); + } else { + throw new Error(`Unexpected operator: ${operatorCode}`); + } + this.tokens.removeToken(); + return true; + } + + processExportDefault() { + if ( + this.tokens.matches4(tt._export, tt._default, tt._function, tt.name) || + // export default async function + (this.tokens.matches5(tt._export, tt._default, tt.name, tt._function, tt.name) && + this.tokens.matchesContextualAtIndex( + this.tokens.currentIndex() + 2, + ContextualKeyword._async, + )) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + // Named function export case: change it to a top-level function + // declaration followed by exports statement. + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.default = ${name};`); + } else if ( + this.tokens.matches4(tt._export, tt._default, tt._class, tt.name) || + this.tokens.matches5(tt._export, tt._default, tt._abstract, tt._class, tt.name) || + this.tokens.matches3(tt._export, tt._default, tt.at) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.copyDecorators(); + if (this.tokens.matches1(tt._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.default = ${name};`); + // After this point, this is a plain "export default E" statement. + } else if ( + shouldElideDefaultExport(this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else if (this.reactHotLoaderTransformer) { + // We need to assign E to a variable. Change "export default E" to + // "let _default; exports.default = _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; exports.`); + this.tokens.copyToken(); + this.tokens.appendCode(` = ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + } else { + // Change "export default E" to "exports.default = E" + this.tokens.replaceToken("exports."); + this.tokens.copyToken(); + this.tokens.appendCode(" ="); + } + } + + copyDecorators() { + while (this.tokens.matches1(tt.at)) { + this.tokens.copyToken(); + if (this.tokens.matches1(tt.parenL)) { + this.tokens.copyExpectedToken(tt.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + } else { + this.tokens.copyExpectedToken(tt.name); + while (this.tokens.matches1(tt.dot)) { + this.tokens.copyExpectedToken(tt.dot); + this.tokens.copyExpectedToken(tt.name); + } + if (this.tokens.matches1(tt.parenL)) { + this.tokens.copyExpectedToken(tt.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + } + } + } + } + + /** + * Transform a declaration like `export var`, `export let`, or `export const`. + */ + processExportVar() { + if (this.isSimpleExportVar()) { + this.processSimpleExportVar(); + } else { + this.processComplexExportVar(); + } + } + + /** + * Determine if the export is of the form: + * export var/let/const [varName] = [expr]; + * In other words, determine if function name inference might apply. + */ + isSimpleExportVar() { + let tokenIndex = this.tokens.currentIndex(); + // export + tokenIndex++; + // var/let/const + tokenIndex++; + if (!this.tokens.matches1AtIndex(tokenIndex, tt.name)) { + return false; + } + tokenIndex++; + while (tokenIndex < this.tokens.tokens.length && this.tokens.tokens[tokenIndex].isType) { + tokenIndex++; + } + if (!this.tokens.matches1AtIndex(tokenIndex, tt.eq)) { + return false; + } + return true; + } + + /** + * Transform an `export var` declaration initializing a single variable. + * + * For example, this: + * export const f = () => {}; + * becomes this: + * const f = () => {}; exports.f = f; + * + * The variable is unused (e.g. exports.f has the true value of the export). + * We need to produce an assignment of this form so that the function will + * have an inferred name of "f", which wouldn't happen in the more general + * case below. + */ + processSimpleExportVar() { + // export + this.tokens.removeInitialToken(); + // var/let/const + this.tokens.copyToken(); + const varName = this.tokens.identifierName(); + // x: number -> x + while (!this.tokens.matches1(tt.eq)) { + this.rootTransformer.processToken(); + } + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(`; exports.${varName} = ${varName}`); + } + + /** + * Transform normal declaration exports, including handling destructuring. + * For example, this: + * export const {x: [a = 2, b], c} = d; + * becomes this: + * ({x: [exports.a = 2, exports.b], c: exports.c} = d;) + */ + processComplexExportVar() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + const needsParens = this.tokens.matches1(tt.braceL); + if (needsParens) { + this.tokens.appendCode("("); + } + + let depth = 0; + while (true) { + if ( + this.tokens.matches1(tt.braceL) || + this.tokens.matches1(tt.dollarBraceL) || + this.tokens.matches1(tt.bracketL) + ) { + depth++; + this.tokens.copyToken(); + } else if (this.tokens.matches1(tt.braceR) || this.tokens.matches1(tt.bracketR)) { + depth--; + this.tokens.copyToken(); + } else if ( + depth === 0 && + !this.tokens.matches1(tt.name) && + !this.tokens.currentToken().isType + ) { + break; + } else if (this.tokens.matches1(tt.eq)) { + // Default values might have assignments in the RHS that we want to ignore, so skip past + // them. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + } else { + const token = this.tokens.currentToken(); + if (isDeclaration(token)) { + const name = this.tokens.identifierName(); + let replacement = this.importProcessor.getIdentifierReplacement(name); + if (replacement === null) { + throw new Error(`Expected a replacement for ${name} in \`export var\` syntax.`); + } + if (isObjectShorthandDeclaration(token)) { + replacement = `${name}: ${replacement}`; + } + this.tokens.replaceToken(replacement); + } else { + this.rootTransformer.processToken(); + } + } + } + + if (needsParens) { + // Seek to the end of the RHS. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(")"); + } + } + + /** + * Transform this: + * export function foo() {} + * into this: + * function foo() {} exports.foo = foo; + */ + processExportFunction() { + this.tokens.replaceToken(""); + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Skip past a function with a name and return that name. + */ + processNamedFunction() { + if (this.tokens.matches1(tt._function)) { + this.tokens.copyToken(); + } else if (this.tokens.matches2(tt.name, tt._function)) { + if (!this.tokens.matchesContextual(ContextualKeyword._async)) { + throw new Error("Expected async keyword in function export."); + } + this.tokens.copyToken(); + this.tokens.copyToken(); + } + if (this.tokens.matches1(tt.star)) { + this.tokens.copyToken(); + } + if (!this.tokens.matches1(tt.name)) { + throw new Error("Expected identifier for exported function name."); + } + const name = this.tokens.identifierName(); + this.tokens.copyToken(); + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + } + this.tokens.copyExpectedToken(tt.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + this.rootTransformer.processPossibleTypeRange(); + this.tokens.copyExpectedToken(tt.braceL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.braceR); + return name; + } + + /** + * Transform this: + * export class A {} + * into this: + * class A {} exports.A = A; + */ + processExportClass() { + this.tokens.removeInitialToken(); + this.copyDecorators(); + if (this.tokens.matches1(tt._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Transform this: + * export {a, b as c}; + * into this: + * exports.a = a; exports.c = b; + * + * OR + * + * Transform this: + * export {a, b as c} from './foo'; + * into the pre-generated Object.defineProperty code from the ImportProcessor. + * + * For the first case, if the TypeScript transform is enabled, we need to skip + * exports that are only defined as types. + */ + processExportBindings() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + + const exportStatements = []; + while (true) { + if (this.tokens.matches1(tt.braceR)) { + this.tokens.removeToken(); + break; + } + + const specifierInfo = getImportExportSpecifierInfo(this.tokens); + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.removeToken(); + } + if (!specifierInfo.isType && !this.shouldElideExportedIdentifier(specifierInfo.leftName)) { + const localName = specifierInfo.leftName; + const exportedName = specifierInfo.rightName; + const newLocalName = this.importProcessor.getIdentifierReplacement(localName); + exportStatements.push(`exports.${exportedName} = ${newLocalName || localName};`); + } + + if (this.tokens.matches1(tt.braceR)) { + this.tokens.removeToken(); + break; + } + if (this.tokens.matches2(tt.comma, tt.braceR)) { + this.tokens.removeToken(); + this.tokens.removeToken(); + break; + } else if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.currentToken())}`); + } + } + + if (this.tokens.matchesContextual(ContextualKeyword._from)) { + // This is an export...from, so throw away the normal named export code + // and use the Object.defineProperty code from ImportProcessor. + this.tokens.removeToken(); + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + removeMaybeImportAssertion(this.tokens); + } else { + // This is a normal named export, so use that. + this.tokens.appendCode(exportStatements.join(" ")); + } + + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + + processExportStar() { + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(tt.string)) { + this.tokens.removeToken(); + } + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + removeMaybeImportAssertion(this.tokens); + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + + shouldElideExportedIdentifier(name) { + return this.isTypeScriptTransformEnabled && !this.declarationInfo.valueDeclarations.has(name); + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/ESMImportTransformer.js b/node_modules/sucrase/dist/esm/transformers/ESMImportTransformer.js new file mode 100644 index 0000000..4f738f8 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/ESMImportTransformer.js @@ -0,0 +1,363 @@ + + + +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + +import elideImportEquals from "../util/elideImportEquals"; +import getDeclarationInfo, { + + EMPTY_DECLARATION_INFO, +} from "../util/getDeclarationInfo"; +import getImportExportSpecifierInfo from "../util/getImportExportSpecifierInfo"; +import {getNonTypeIdentifiers} from "../util/getNonTypeIdentifiers"; +import {removeMaybeImportAssertion} from "../util/removeMaybeImportAssertion"; +import shouldElideDefaultExport from "../util/shouldElideDefaultExport"; + +import Transformer from "./Transformer"; + +/** + * Class for editing import statements when we are keeping the code as ESM. We still need to remove + * type-only imports in TypeScript and Flow. + */ +export default class ESMImportTransformer extends Transformer { + + + + + constructor( + tokens, + nameManager, + helperManager, + reactHotLoaderTransformer, + isTypeScriptTransformEnabled, + options, + ) { + super();this.tokens = tokens;this.nameManager = nameManager;this.helperManager = helperManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;; + this.nonTypeIdentifiers = isTypeScriptTransformEnabled + ? getNonTypeIdentifiers(tokens, options) + : new Set(); + this.declarationInfo = isTypeScriptTransformEnabled + ? getDeclarationInfo(tokens) + : EMPTY_DECLARATION_INFO; + this.injectCreateRequireForImportRequire = Boolean(options.injectCreateRequireForImportRequire); + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(tt._import, tt.name, tt.eq)) { + return this.processImportEquals(); + } + if ( + this.tokens.matches4(tt._import, tt.name, tt.name, tt.eq) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, ContextualKeyword._type) + ) { + // import type T = require('T') + this.tokens.removeInitialToken(); + // This construct is always exactly 8 tokens long, so remove the 7 remaining tokens. + for (let i = 0; i < 7; i++) { + this.tokens.removeToken(); + } + return true; + } + if (this.tokens.matches2(tt._export, tt.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if ( + this.tokens.matches5(tt._export, tt._import, tt.name, tt.name, tt.eq) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 2, ContextualKeyword._type) + ) { + // export import type T = require('T') + this.tokens.removeInitialToken(); + // This construct is always exactly 9 tokens long, so remove the 8 remaining tokens. + for (let i = 0; i < 8; i++) { + this.tokens.removeToken(); + } + return true; + } + if (this.tokens.matches1(tt._import)) { + return this.processImport(); + } + if (this.tokens.matches2(tt._export, tt._default)) { + return this.processExportDefault(); + } + if (this.tokens.matches2(tt._export, tt.braceL)) { + return this.processNamedExports(); + } + if ( + this.tokens.matches2(tt._export, tt.name) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, ContextualKeyword._type) + ) { + // export type {a}; + // export type {a as b}; + // export type {a} from './b'; + // export type * from './b'; + // export type * as ns from './b'; + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(tt.braceL)) { + while (!this.tokens.matches1(tt.braceR)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + } else { + // * + this.tokens.removeToken(); + if (this.tokens.matches1(tt._as)) { + // as + this.tokens.removeToken(); + // ns + this.tokens.removeToken(); + } + } + // Remove type re-export `... } from './T'` + if ( + this.tokens.matchesContextual(ContextualKeyword._from) && + this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, tt.string) + ) { + this.tokens.removeToken(); + this.tokens.removeToken(); + removeMaybeImportAssertion(this.tokens); + } + return true; + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + elideImportEquals(this.tokens); + } else if (this.injectCreateRequireForImportRequire) { + // We're using require in an environment (Node ESM) that doesn't provide + // it as a global, so generate a helper to import it. + // import -> const + this.tokens.replaceToken("const"); + // Foo + this.tokens.copyToken(); + // = + this.tokens.copyToken(); + // require + this.tokens.replaceToken(this.helperManager.getHelperName("require")); + } else { + // Otherwise, just switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + processImport() { + if (this.tokens.matches2(tt._import, tt.parenL)) { + // Dynamic imports don't need to be transformed. + return false; + } + + const snapshot = this.tokens.snapshot(); + const allImportsRemoved = this.removeImportTypeBindings(); + if (allImportsRemoved) { + this.tokens.restoreToSnapshot(snapshot); + while (!this.tokens.matches1(tt.string)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + removeMaybeImportAssertion(this.tokens); + if (this.tokens.matches1(tt.semi)) { + this.tokens.removeToken(); + } + } + return true; + } + + /** + * Remove type bindings from this import, leaving the rest of the import intact. + * + * Return true if this import was ONLY types, and thus is eligible for removal. This will bail out + * of the replacement operation, so we can return early here. + */ + removeImportTypeBindings() { + this.tokens.copyExpectedToken(tt._import); + if ( + this.tokens.matchesContextual(ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, tt.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + return true; + } + + if (this.tokens.matches1(tt.string)) { + // This is a bare import, so we should proceed with the import. + this.tokens.copyToken(); + return false; + } + + // Skip the "module" token in import reflection. + if ( + this.tokens.matchesContextual(ContextualKeyword._module) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 2, ContextualKeyword._from) + ) { + this.tokens.copyToken(); + } + + let foundNonTypeImport = false; + let needsComma = false; + + if (this.tokens.matches1(tt.name)) { + if (this.isTypeName(this.tokens.identifierName())) { + this.tokens.removeToken(); + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + if (this.tokens.matches1(tt.comma)) { + // We're in a statement like: + // import A, * as B from './A'; + // or + // import A, {foo} from './A'; + // where the `A` is being kept. The comma should be removed if an only + // if the next part of the import statement is elided, but that's hard + // to determine at this point in the code. Instead, always remove it + // and set a flag to add it back if necessary. + needsComma = true; + this.tokens.removeToken(); + } + } + } + + if (this.tokens.matches1(tt.star)) { + if (this.isTypeName(this.tokens.identifierNameAtRelativeIndex(2))) { + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else { + if (needsComma) { + this.tokens.appendCode(","); + } + foundNonTypeImport = true; + this.tokens.copyExpectedToken(tt.star); + this.tokens.copyExpectedToken(tt.name); + this.tokens.copyExpectedToken(tt.name); + } + } else if (this.tokens.matches1(tt.braceL)) { + if (needsComma) { + this.tokens.appendCode(","); + } + this.tokens.copyToken(); + while (!this.tokens.matches1(tt.braceR)) { + const specifierInfo = getImportExportSpecifierInfo(this.tokens); + if (specifierInfo.isType || this.isTypeName(specifierInfo.rightName)) { + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.removeToken(); + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.copyToken(); + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.copyToken(); + } + } + } + this.tokens.copyExpectedToken(tt.braceR); + } + + return !foundNonTypeImport; + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + processExportDefault() { + if ( + shouldElideDefaultExport(this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + return true; + } + + const alreadyHasName = + this.tokens.matches4(tt._export, tt._default, tt._function, tt.name) || + // export default async function + (this.tokens.matches5(tt._export, tt._default, tt.name, tt._function, tt.name) && + this.tokens.matchesContextualAtIndex( + this.tokens.currentIndex() + 2, + ContextualKeyword._async, + )) || + this.tokens.matches4(tt._export, tt._default, tt._class, tt.name) || + this.tokens.matches5(tt._export, tt._default, tt._abstract, tt._class, tt.name); + + if (!alreadyHasName && this.reactHotLoaderTransformer) { + // This is a plain "export default E" statement and we need to assign E to a variable. + // Change "export default E" to "let _default; export default _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; export`); + this.tokens.copyToken(); + this.tokens.appendCode(` ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + return true; + } + return false; + } + + /** + * In TypeScript, we need to remove named exports that were never declared or only declared as a + * type. + */ + processNamedExports() { + if (!this.isTypeScriptTransformEnabled) { + return false; + } + this.tokens.copyExpectedToken(tt._export); + this.tokens.copyExpectedToken(tt.braceL); + + while (!this.tokens.matches1(tt.braceR)) { + const specifierInfo = getImportExportSpecifierInfo(this.tokens); + if (specifierInfo.isType || this.shouldElideExportedName(specifierInfo.leftName)) { + // Type export, so remove all tokens, including any comma. + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.removeToken(); + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + } else { + // Non-type export, so copy all tokens, including any comma. + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.copyToken(); + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.copyToken(); + } + } + } + this.tokens.copyExpectedToken(tt.braceR); + return true; + } + + /** + * ESM elides all imports with the rule that we only elide if we see that it's + * a type and never see it as a value. This is in contrast to CJS, which + * elides imports that are completely unknown. + */ + shouldElideExportedName(name) { + return ( + this.isTypeScriptTransformEnabled && + this.declarationInfo.typeDeclarations.has(name) && + !this.declarationInfo.valueDeclarations.has(name) + ); + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/FlowTransformer.js b/node_modules/sucrase/dist/esm/transformers/FlowTransformer.js new file mode 100644 index 0000000..7df0aca --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/FlowTransformer.js @@ -0,0 +1,182 @@ +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + +import Transformer from "./Transformer"; + +export default class FlowTransformer extends Transformer { + constructor( + rootTransformer, + tokens, + isImportsTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.isImportsTransformEnabled = isImportsTransformEnabled;; + } + + process() { + if ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ) { + return true; + } + if (this.tokens.matches1(tt._enum)) { + this.processEnum(); + return true; + } + if (this.tokens.matches2(tt._export, tt._enum)) { + this.processNamedExportEnum(); + return true; + } + if (this.tokens.matches3(tt._export, tt._default, tt._enum)) { + this.processDefaultExportEnum(); + return true; + } + return false; + } + + /** + * Handle a declaration like: + * export enum E ... + * + * With this imports transform, this becomes: + * const E = [[enum]]; exports.E = E; + * + * otherwise, it becomes: + * export const E = [[enum]]; + */ + processNamedExportEnum() { + if (this.isImportsTransformEnabled) { + // export + this.tokens.removeInitialToken(); + const enumName = this.tokens.identifierNameAtRelativeIndex(1); + this.processEnum(); + this.tokens.appendCode(` exports.${enumName} = ${enumName};`); + } else { + this.tokens.copyToken(); + this.processEnum(); + } + } + + /** + * Handle a declaration like: + * export default enum E + * + * With the imports transform, this becomes: + * const E = [[enum]]; exports.default = E; + * + * otherwise, it becomes: + * const E = [[enum]]; export default E; + */ + processDefaultExportEnum() { + // export + this.tokens.removeInitialToken(); + // default + this.tokens.removeToken(); + const enumName = this.tokens.identifierNameAtRelativeIndex(1); + this.processEnum(); + if (this.isImportsTransformEnabled) { + this.tokens.appendCode(` exports.default = ${enumName};`); + } else { + this.tokens.appendCode(` export default ${enumName};`); + } + } + + /** + * Transpile flow enums to invoke the "flow-enums-runtime" library. + * + * Currently, the transpiled code always uses `require("flow-enums-runtime")`, + * but if future flexibility is needed, we could expose a config option for + * this string (similar to configurable JSX). Even when targeting ESM, the + * default behavior of babel-plugin-transform-flow-enums is to use require + * rather than injecting an import. + * + * Flow enums are quite a bit simpler than TS enums and have some convenient + * constraints: + * - Element initializers must be either always present or always absent. That + * means that we can use fixed lookahead on the first element (if any) and + * assume that all elements are like that. + * - The right-hand side of an element initializer must be a literal value, + * not a complex expression and not referencing other elements. That means + * we can simply copy a single token. + * + * Enums can be broken up into three basic cases: + * + * Mirrored enums: + * enum E {A, B} + * -> + * const E = require("flow-enums-runtime").Mirrored(["A", "B"]); + * + * Initializer enums: + * enum E {A = 1, B = 2} + * -> + * const E = require("flow-enums-runtime")({A: 1, B: 2}); + * + * Symbol enums: + * enum E of symbol {A, B} + * -> + * const E = require("flow-enums-runtime")({A: Symbol("A"), B: Symbol("B")}); + * + * We can statically detect which of the three cases this is by looking at the + * "of" declaration (if any) and seeing if the first element has an initializer. + * Since the other transform details are so similar between the three cases, we + * use a single implementation and vary the transform within processEnumElement + * based on case. + */ + processEnum() { + // enum E -> const E + this.tokens.replaceToken("const"); + this.tokens.copyExpectedToken(tt.name); + + let isSymbolEnum = false; + if (this.tokens.matchesContextual(ContextualKeyword._of)) { + this.tokens.removeToken(); + isSymbolEnum = this.tokens.matchesContextual(ContextualKeyword._symbol); + this.tokens.removeToken(); + } + const hasInitializers = this.tokens.matches3(tt.braceL, tt.name, tt.eq); + this.tokens.appendCode(' = require("flow-enums-runtime")'); + + const isMirrored = !isSymbolEnum && !hasInitializers; + this.tokens.replaceTokenTrimmingLeftWhitespace(isMirrored ? ".Mirrored([" : "({"); + + while (!this.tokens.matches1(tt.braceR)) { + // ... is allowed at the end and has no runtime behavior. + if (this.tokens.matches1(tt.ellipsis)) { + this.tokens.removeToken(); + break; + } + this.processEnumElement(isSymbolEnum, hasInitializers); + if (this.tokens.matches1(tt.comma)) { + this.tokens.copyToken(); + } + } + + this.tokens.replaceToken(isMirrored ? "]);" : "});"); + } + + /** + * Process an individual enum element, producing either an array element or an + * object element based on what type of enum this is. + */ + processEnumElement(isSymbolEnum, hasInitializers) { + if (isSymbolEnum) { + // Symbol enums never have initializers and are expanded to object elements. + // A, -> A: Symbol("A"), + const elementName = this.tokens.identifierName(); + this.tokens.copyToken(); + this.tokens.appendCode(`: Symbol("${elementName}")`); + } else if (hasInitializers) { + // Initializers are expanded to object elements. + // A = 1, -> A: 1, + this.tokens.copyToken(); + this.tokens.replaceTokenTrimmingLeftWhitespace(":"); + this.tokens.copyToken(); + } else { + // Enum elements without initializers become string literal array elements. + // A, -> "A", + this.tokens.replaceToken(`"${this.tokens.identifierName()}"`); + } + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/JSXTransformer.js b/node_modules/sucrase/dist/esm/transformers/JSXTransformer.js new file mode 100644 index 0000000..e5f5ae5 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/JSXTransformer.js @@ -0,0 +1,733 @@ + + + +import XHTMLEntities from "../parser/plugins/jsx/xhtml"; +import {JSXRole} from "../parser/tokenizer"; +import {TokenType as tt} from "../parser/tokenizer/types"; +import {charCodes} from "../parser/util/charcodes"; + +import getJSXPragmaInfo, {} from "../util/getJSXPragmaInfo"; + +import Transformer from "./Transformer"; + +export default class JSXTransformer extends Transformer { + + + + + // State for calculating the line number of each JSX tag in development. + __init() {this.lastLineNumber = 1} + __init2() {this.lastIndex = 0} + + // In development, variable name holding the name of the current file. + __init3() {this.filenameVarName = null} + // Mapping of claimed names for imports in the automatic transform, e,g. + // {jsx: "_jsx"}. This determines which imports to generate in the prefix. + __init4() {this.esmAutomaticImportNameResolutions = {}} + // When automatically adding imports in CJS mode, we store the variable name + // holding the imported CJS module so we can require it in the prefix. + __init5() {this.cjsAutomaticModuleNameResolutions = {}} + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.options = options;JSXTransformer.prototype.__init.call(this);JSXTransformer.prototype.__init2.call(this);JSXTransformer.prototype.__init3.call(this);JSXTransformer.prototype.__init4.call(this);JSXTransformer.prototype.__init5.call(this);; + this.jsxPragmaInfo = getJSXPragmaInfo(options); + this.isAutomaticRuntime = options.jsxRuntime === "automatic"; + this.jsxImportSource = options.jsxImportSource || "react"; + } + + process() { + if (this.tokens.matches1(tt.jsxTagStart)) { + this.processJSXTag(); + return true; + } + return false; + } + + getPrefixCode() { + let prefix = ""; + if (this.filenameVarName) { + prefix += `const ${this.filenameVarName} = ${JSON.stringify(this.options.filePath || "")};`; + } + if (this.isAutomaticRuntime) { + if (this.importProcessor) { + // CJS mode: emit require statements for all modules that were referenced. + for (const [path, resolvedName] of Object.entries(this.cjsAutomaticModuleNameResolutions)) { + prefix += `var ${resolvedName} = require("${path}");`; + } + } else { + // ESM mode: consolidate and emit import statements for referenced names. + const {createElement: createElementResolution, ...otherResolutions} = + this.esmAutomaticImportNameResolutions; + if (createElementResolution) { + prefix += `import {createElement as ${createElementResolution}} from "${this.jsxImportSource}";`; + } + const importSpecifiers = Object.entries(otherResolutions) + .map(([name, resolvedName]) => `${name} as ${resolvedName}`) + .join(", "); + if (importSpecifiers) { + const importPath = + this.jsxImportSource + (this.options.production ? "/jsx-runtime" : "/jsx-dev-runtime"); + prefix += `import {${importSpecifiers}} from "${importPath}";`; + } + } + } + return prefix; + } + + processJSXTag() { + const {jsxRole, start} = this.tokens.currentToken(); + // Calculate line number information at the very start (if in development + // mode) so that the information is guaranteed to be queried in token order. + const elementLocationCode = this.options.production ? null : this.getElementLocationCode(start); + if (this.isAutomaticRuntime && jsxRole !== JSXRole.KeyAfterPropSpread) { + this.transformTagToJSXFunc(elementLocationCode, jsxRole); + } else { + this.transformTagToCreateElement(elementLocationCode); + } + } + + getElementLocationCode(firstTokenStart) { + const lineNumber = this.getLineNumberForIndex(firstTokenStart); + return `lineNumber: ${lineNumber}`; + } + + /** + * Get the line number for this source position. This is calculated lazily and + * must be called in increasing order by index. + */ + getLineNumberForIndex(index) { + const code = this.tokens.code; + while (this.lastIndex < index && this.lastIndex < code.length) { + if (code[this.lastIndex] === "\n") { + this.lastLineNumber++; + } + this.lastIndex++; + } + return this.lastLineNumber; + } + + /** + * Convert the current JSX element to a call to jsx, jsxs, or jsxDEV. This is + * the primary transformation for the automatic transform. + * + * Example: + *
Hello{x}
+ * becomes + * jsxs('div', {a: 1, children: ["Hello", x]}, 2) + */ + transformTagToJSXFunc(elementLocationCode, jsxRole) { + const isStatic = jsxRole === JSXRole.StaticChildren; + // First tag is always jsxTagStart. + this.tokens.replaceToken(this.getJSXFuncInvocationCode(isStatic)); + + let keyCode = null; + if (this.tokens.matches1(tt.jsxTagEnd)) { + // Fragment syntax. + this.tokens.replaceToken(`${this.getFragmentCode()}, {`); + this.processAutomaticChildrenAndEndProps(jsxRole); + } else { + // Normal open tag or self-closing tag. + this.processTagIntro(); + this.tokens.appendCode(", {"); + keyCode = this.processProps(true); + + if (this.tokens.matches2(tt.slash, tt.jsxTagEnd)) { + // Self-closing tag, no children to add, so close the props. + this.tokens.appendCode("}"); + } else if (this.tokens.matches1(tt.jsxTagEnd)) { + // Tag with children. + this.tokens.removeToken(); + this.processAutomaticChildrenAndEndProps(jsxRole); + } else { + throw new Error("Expected either /> or > at the end of the tag."); + } + // If a key was present, move it to its own arg. Note that moving code + // like this will cause line numbers to get out of sync within the JSX + // element if the key expression has a newline in it. This is unfortunate, + // but hopefully should be rare. + if (keyCode) { + this.tokens.appendCode(`, ${keyCode}`); + } + } + if (!this.options.production) { + // If the key wasn't already added, add it now so we can correctly set + // positional args for jsxDEV. + if (keyCode === null) { + this.tokens.appendCode(", void 0"); + } + this.tokens.appendCode(`, ${isStatic}, ${this.getDevSource(elementLocationCode)}, this`); + } + // We're at the close-tag or the end of a self-closing tag, so remove + // everything else and close the function call. + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(tt.jsxTagEnd)) { + this.tokens.removeToken(); + } + this.tokens.replaceToken(")"); + } + + /** + * Convert the current JSX element to a createElement call. In the classic + * runtime, this is the only case. In the automatic runtime, this is called + * as a fallback in some situations. + * + * Example: + *
Hello{x}
+ * becomes + * React.createElement('div', {a: 1, key: 2}, "Hello", x) + */ + transformTagToCreateElement(elementLocationCode) { + // First tag is always jsxTagStart. + this.tokens.replaceToken(this.getCreateElementInvocationCode()); + + if (this.tokens.matches1(tt.jsxTagEnd)) { + // Fragment syntax. + this.tokens.replaceToken(`${this.getFragmentCode()}, null`); + this.processChildren(true); + } else { + // Normal open tag or self-closing tag. + this.processTagIntro(); + this.processPropsObjectWithDevInfo(elementLocationCode); + + if (this.tokens.matches2(tt.slash, tt.jsxTagEnd)) { + // Self-closing tag; no children to process. + } else if (this.tokens.matches1(tt.jsxTagEnd)) { + // Tag with children and a close-tag; process the children as args. + this.tokens.removeToken(); + this.processChildren(true); + } else { + throw new Error("Expected either /> or > at the end of the tag."); + } + } + // We're at the close-tag or the end of a self-closing tag, so remove + // everything else and close the function call. + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(tt.jsxTagEnd)) { + this.tokens.removeToken(); + } + this.tokens.replaceToken(")"); + } + + /** + * Get the code for the relevant function for this context: jsx, jsxs, + * or jsxDEV. The following open-paren is included as well. + * + * These functions are only used for the automatic runtime, so they are always + * auto-imported, but the auto-import will be either CJS or ESM based on the + * target module format. + */ + getJSXFuncInvocationCode(isStatic) { + if (this.options.production) { + if (isStatic) { + return this.claimAutoImportedFuncInvocation("jsxs", "/jsx-runtime"); + } else { + return this.claimAutoImportedFuncInvocation("jsx", "/jsx-runtime"); + } + } else { + return this.claimAutoImportedFuncInvocation("jsxDEV", "/jsx-dev-runtime"); + } + } + + /** + * Return the code to use for the createElement function, e.g. + * `React.createElement`, including the following open-paren. + * + * This is the main function to use for the classic runtime. For the + * automatic runtime, this function is used as a fallback function to + * preserve behavior when there is a prop spread followed by an explicit + * key. In that automatic runtime case, the function should be automatically + * imported. + */ + getCreateElementInvocationCode() { + if (this.isAutomaticRuntime) { + return this.claimAutoImportedFuncInvocation("createElement", ""); + } else { + const {jsxPragmaInfo} = this; + const resolvedPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.base) || jsxPragmaInfo.base + : jsxPragmaInfo.base; + return `${resolvedPragmaBaseName}${jsxPragmaInfo.suffix}(`; + } + } + + /** + * Return the code to use as the component when compiling a shorthand + * fragment, e.g. `React.Fragment`. + * + * This may be called from either the classic or automatic runtime, and + * the value should be auto-imported for the automatic runtime. + */ + getFragmentCode() { + if (this.isAutomaticRuntime) { + return this.claimAutoImportedName( + "Fragment", + this.options.production ? "/jsx-runtime" : "/jsx-dev-runtime", + ); + } else { + const {jsxPragmaInfo} = this; + const resolvedFragmentPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.fragmentBase) || + jsxPragmaInfo.fragmentBase + : jsxPragmaInfo.fragmentBase; + return resolvedFragmentPragmaBaseName + jsxPragmaInfo.fragmentSuffix; + } + } + + /** + * Return code that invokes the given function. + * + * When the imports transform is enabled, use the CJSImportTransformer + * strategy of using `.call(void 0, ...` to avoid passing a `this` value in a + * situation that would otherwise look like a method call. + */ + claimAutoImportedFuncInvocation(funcName, importPathSuffix) { + const funcCode = this.claimAutoImportedName(funcName, importPathSuffix); + if (this.importProcessor) { + return `${funcCode}.call(void 0, `; + } else { + return `${funcCode}(`; + } + } + + claimAutoImportedName(funcName, importPathSuffix) { + if (this.importProcessor) { + // CJS mode: claim a name for the module and mark it for import. + const path = this.jsxImportSource + importPathSuffix; + if (!this.cjsAutomaticModuleNameResolutions[path]) { + this.cjsAutomaticModuleNameResolutions[path] = + this.importProcessor.getFreeIdentifierForPath(path); + } + return `${this.cjsAutomaticModuleNameResolutions[path]}.${funcName}`; + } else { + // ESM mode: claim a name for this function and add it to the names that + // should be auto-imported when the prefix is generated. + if (!this.esmAutomaticImportNameResolutions[funcName]) { + this.esmAutomaticImportNameResolutions[funcName] = this.nameManager.claimFreeName( + `_${funcName}`, + ); + } + return this.esmAutomaticImportNameResolutions[funcName]; + } + } + + /** + * Process the first part of a tag, before any props. + */ + processTagIntro() { + // Walk forward until we see one of these patterns: + // jsxName to start the first prop, preceded by another jsxName to end the tag name. + // jsxName to start the first prop, preceded by greaterThan to end the type argument. + // [open brace] to start the first prop. + // [jsxTagEnd] to end the open-tag. + // [slash, jsxTagEnd] to end the self-closing tag. + let introEnd = this.tokens.currentIndex() + 1; + while ( + this.tokens.tokens[introEnd].isType || + (!this.tokens.matches2AtIndex(introEnd - 1, tt.jsxName, tt.jsxName) && + !this.tokens.matches2AtIndex(introEnd - 1, tt.greaterThan, tt.jsxName) && + !this.tokens.matches1AtIndex(introEnd, tt.braceL) && + !this.tokens.matches1AtIndex(introEnd, tt.jsxTagEnd) && + !this.tokens.matches2AtIndex(introEnd, tt.slash, tt.jsxTagEnd)) + ) { + introEnd++; + } + if (introEnd === this.tokens.currentIndex() + 1) { + const tagName = this.tokens.identifierName(); + if (startsWithLowerCase(tagName)) { + this.tokens.replaceToken(`'${tagName}'`); + } + } + while (this.tokens.currentIndex() < introEnd) { + this.rootTransformer.processToken(); + } + } + + /** + * Starting at the beginning of the props, add the props argument to + * React.createElement, including the comma before it. + */ + processPropsObjectWithDevInfo(elementLocationCode) { + const devProps = this.options.production + ? "" + : `__self: this, __source: ${this.getDevSource(elementLocationCode)}`; + if (!this.tokens.matches1(tt.jsxName) && !this.tokens.matches1(tt.braceL)) { + if (devProps) { + this.tokens.appendCode(`, {${devProps}}`); + } else { + this.tokens.appendCode(`, null`); + } + return; + } + this.tokens.appendCode(`, {`); + this.processProps(false); + if (devProps) { + this.tokens.appendCode(` ${devProps}}`); + } else { + this.tokens.appendCode("}"); + } + } + + /** + * Transform the core part of the props, assuming that a { has already been + * inserted before us and that a } will be inserted after us. + * + * If extractKeyCode is true (i.e. when using any jsx... function), any prop + * named "key" has its code captured and returned rather than being emitted to + * the output code. This shifts line numbers, and emitting the code later will + * correct line numbers again. If no key is found or if extractKeyCode is + * false, this function returns null. + */ + processProps(extractKeyCode) { + let keyCode = null; + while (true) { + if (this.tokens.matches2(tt.jsxName, tt.eq)) { + // This is a regular key={value} or key="value" prop. + const propName = this.tokens.identifierName(); + if (extractKeyCode && propName === "key") { + if (keyCode !== null) { + // The props list has multiple keys. Different implementations are + // inconsistent about what to do here: as of this writing, Babel and + // swc keep the *last* key and completely remove the rest, while + // TypeScript uses the *first* key and leaves the others as regular + // props. The React team collaborated with Babel on the + // implementation of this behavior, so presumably the Babel behavior + // is the one to use. + // Since we won't ever be emitting the previous key code, we need to + // at least emit its newlines here so that the line numbers match up + // in the long run. + this.tokens.appendCode(keyCode.replace(/[^\n]/g, "")); + } + // key + this.tokens.removeToken(); + // = + this.tokens.removeToken(); + const snapshot = this.tokens.snapshot(); + this.processPropValue(); + keyCode = this.tokens.dangerouslyGetAndRemoveCodeSinceSnapshot(snapshot); + // Don't add a comma + continue; + } else { + this.processPropName(propName); + this.tokens.replaceToken(": "); + this.processPropValue(); + } + } else if (this.tokens.matches1(tt.jsxName)) { + // This is a shorthand prop like . + const propName = this.tokens.identifierName(); + this.processPropName(propName); + this.tokens.appendCode(": true"); + } else if (this.tokens.matches1(tt.braceL)) { + // This is prop spread, like
, which we can pass + // through fairly directly as an object spread. + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else { + break; + } + this.tokens.appendCode(","); + } + return keyCode; + } + + processPropName(propName) { + if (propName.includes("-")) { + this.tokens.replaceToken(`'${propName}'`); + } else { + this.tokens.copyToken(); + } + } + + processPropValue() { + if (this.tokens.matches1(tt.braceL)) { + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else if (this.tokens.matches1(tt.jsxTagStart)) { + this.processJSXTag(); + } else { + this.processStringPropValue(); + } + } + + processStringPropValue() { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start + 1, token.end - 1); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXStringValueLiteral(valueCode); + this.tokens.replaceToken(literalCode + replacementCode); + } + + /** + * Starting in the middle of the props object literal, produce an additional + * prop for the children and close the object literal. + */ + processAutomaticChildrenAndEndProps(jsxRole) { + if (jsxRole === JSXRole.StaticChildren) { + this.tokens.appendCode(" children: ["); + this.processChildren(false); + this.tokens.appendCode("]}"); + } else { + // The parser information tells us whether we will see a real child or if + // all remaining children (if any) will resolve to empty. If there are no + // non-empty children, don't emit a children prop at all, but still + // process children so that we properly transform the code into nothing. + if (jsxRole === JSXRole.OneChild) { + this.tokens.appendCode(" children: "); + } + this.processChildren(false); + this.tokens.appendCode("}"); + } + } + + /** + * Transform children into a comma-separated list, which will be either + * arguments to createElement or array elements of a children prop. + */ + processChildren(needsInitialComma) { + let needsComma = needsInitialComma; + while (true) { + if (this.tokens.matches2(tt.jsxTagStart, tt.slash)) { + // Closing tag, so no more children. + return; + } + let didEmitElement = false; + if (this.tokens.matches1(tt.braceL)) { + if (this.tokens.matches2(tt.braceL, tt.braceR)) { + // Empty interpolations and comment-only interpolations are allowed + // and don't create an extra child arg. + this.tokens.replaceToken(""); + this.tokens.replaceToken(""); + } else { + // Interpolated expression. + this.tokens.replaceToken(needsComma ? ", " : ""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + didEmitElement = true; + } + } else if (this.tokens.matches1(tt.jsxTagStart)) { + // Child JSX element + this.tokens.appendCode(needsComma ? ", " : ""); + this.processJSXTag(); + didEmitElement = true; + } else if (this.tokens.matches1(tt.jsxText) || this.tokens.matches1(tt.jsxEmptyText)) { + didEmitElement = this.processChildTextElement(needsComma); + } else { + throw new Error("Unexpected token when processing JSX children."); + } + if (didEmitElement) { + needsComma = true; + } + } + } + + /** + * Turn a JSX text element into a string literal, or nothing at all if the JSX + * text resolves to the empty string. + * + * Returns true if a string literal is emitted, false otherwise. + */ + processChildTextElement(needsComma) { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start, token.end); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXTextLiteral(valueCode); + if (literalCode === '""') { + this.tokens.replaceToken(replacementCode); + return false; + } else { + this.tokens.replaceToken(`${needsComma ? ", " : ""}${literalCode}${replacementCode}`); + return true; + } + } + + getDevSource(elementLocationCode) { + return `{fileName: ${this.getFilenameVarName()}, ${elementLocationCode}}`; + } + + getFilenameVarName() { + if (!this.filenameVarName) { + this.filenameVarName = this.nameManager.claimFreeName("_jsxFileName"); + } + return this.filenameVarName; + } +} + +/** + * Spec for identifiers: https://tc39.github.io/ecma262/#prod-IdentifierStart. + * + * Really only treat anything starting with a-z as tag names. `_`, `$`, `é` + * should be treated as component names + */ +export function startsWithLowerCase(s) { + const firstChar = s.charCodeAt(0); + return firstChar >= charCodes.lowercaseA && firstChar <= charCodes.lowercaseZ; +} + +/** + * Turn the given jsxText string into a JS string literal. Leading and trailing + * whitespace on lines is removed, except immediately after the open-tag and + * before the close-tag. Empty lines are completely removed, and spaces are + * added between lines after that. + * + * We use JSON.stringify to introduce escape characters as necessary, and trim + * the start and end of each line and remove blank lines. + */ +function formatJSXTextLiteral(text) { + let result = ""; + let whitespace = ""; + + let isInInitialLineWhitespace = false; + let seenNonWhitespace = false; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === " " || c === "\t" || c === "\r") { + if (!isInInitialLineWhitespace) { + whitespace += c; + } + } else if (c === "\n") { + whitespace = ""; + isInInitialLineWhitespace = true; + } else { + if (seenNonWhitespace && isInInitialLineWhitespace) { + result += " "; + } + result += whitespace; + whitespace = ""; + if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + i = newI - 1; + result += entity; + } else { + result += c; + } + seenNonWhitespace = true; + isInInitialLineWhitespace = false; + } + } + if (!isInInitialLineWhitespace) { + result += whitespace; + } + return JSON.stringify(result); +} + +/** + * Produce the code that should be printed after the JSX text string literal, + * with most content removed, but all newlines preserved and all spacing at the + * end preserved. + */ +function formatJSXTextReplacement(text) { + let numNewlines = 0; + let numSpaces = 0; + for (const c of text) { + if (c === "\n") { + numNewlines++; + numSpaces = 0; + } else if (c === " ") { + numSpaces++; + } + } + return "\n".repeat(numNewlines) + " ".repeat(numSpaces); +} + +/** + * Format a string in the value position of a JSX prop. + * + * Use the same implementation as convertAttribute from + * babel-helper-builder-react-jsx. + */ +function formatJSXStringValueLiteral(text) { + let result = ""; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === "\n") { + if (/\s/.test(text[i + 1])) { + result += " "; + while (i < text.length && /\s/.test(text[i + 1])) { + i++; + } + } else { + result += "\n"; + } + } else if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + result += entity; + i = newI - 1; + } else { + result += c; + } + } + return JSON.stringify(result); +} + +/** + * Starting at a &, see if there's an HTML entity (specified by name, decimal + * char code, or hex char code) and return it if so. + * + * Modified from jsxReadString in babel-parser. + */ +function processEntity(text, indexAfterAmpersand) { + let str = ""; + let count = 0; + let entity; + let i = indexAfterAmpersand; + + if (text[i] === "#") { + let radix = 10; + i++; + let numStart; + if (text[i] === "x") { + radix = 16; + i++; + numStart = i; + while (i < text.length && isHexDigit(text.charCodeAt(i))) { + i++; + } + } else { + numStart = i; + while (i < text.length && isDecimalDigit(text.charCodeAt(i))) { + i++; + } + } + if (text[i] === ";") { + const numStr = text.slice(numStart, i); + if (numStr) { + i++; + entity = String.fromCodePoint(parseInt(numStr, radix)); + } + } + } else { + while (i < text.length && count++ < 10) { + const ch = text[i]; + i++; + if (ch === ";") { + entity = XHTMLEntities.get(str); + break; + } + str += ch; + } + } + + if (!entity) { + return {entity: "&", newI: indexAfterAmpersand}; + } + return {entity, newI: i}; +} + +function isDecimalDigit(code) { + return code >= charCodes.digit0 && code <= charCodes.digit9; +} + +function isHexDigit(code) { + return ( + (code >= charCodes.digit0 && code <= charCodes.digit9) || + (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) || + (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) + ); +} diff --git a/node_modules/sucrase/dist/esm/transformers/JestHoistTransformer.js b/node_modules/sucrase/dist/esm/transformers/JestHoistTransformer.js new file mode 100644 index 0000000..8f45d06 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/JestHoistTransformer.js @@ -0,0 +1,111 @@ + function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +import {TokenType as tt} from "../parser/tokenizer/types"; + + +import Transformer from "./Transformer"; + +const JEST_GLOBAL_NAME = "jest"; +const HOISTED_METHODS = ["mock", "unmock", "enableAutomock", "disableAutomock"]; + +/** + * Implementation of babel-plugin-jest-hoist, which hoists up some jest method + * calls above the imports to allow them to override other imports. + * + * To preserve line numbers, rather than directly moving the jest.mock code, we + * wrap each invocation in a function statement and then call the function from + * the top of the file. + */ +export default class JestHoistTransformer extends Transformer { + __init() {this.hoistedFunctionNames = []} + + constructor( + rootTransformer, + tokens, + nameManager, + importProcessor, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.nameManager = nameManager;this.importProcessor = importProcessor;JestHoistTransformer.prototype.__init.call(this);; + } + + process() { + if ( + this.tokens.currentToken().scopeDepth === 0 && + this.tokens.matches4(tt.name, tt.dot, tt.name, tt.parenL) && + this.tokens.identifierName() === JEST_GLOBAL_NAME + ) { + // TODO: This only works if imports transform is active, which it will be for jest. + // But if jest adds module support and we no longer need the import transform, this needs fixing. + if (_optionalChain([this, 'access', _ => _.importProcessor, 'optionalAccess', _2 => _2.getGlobalNames, 'call', _3 => _3(), 'optionalAccess', _4 => _4.has, 'call', _5 => _5(JEST_GLOBAL_NAME)])) { + return false; + } + return this.extractHoistedCalls(); + } + + return false; + } + + getHoistedCode() { + if (this.hoistedFunctionNames.length > 0) { + // This will be placed before module interop code, but that's fine since + // imports aren't allowed in module mock factories. + return this.hoistedFunctionNames.map((name) => `${name}();`).join(""); + } + return ""; + } + + /** + * Extracts any methods calls on the jest-object that should be hoisted. + * + * According to the jest docs, https://jestjs.io/docs/en/jest-object#jestmockmodulename-factory-options, + * mock, unmock, enableAutomock, disableAutomock, are the methods that should be hoisted. + * + * We do not apply the same checks of the arguments as babel-plugin-jest-hoist does. + */ + extractHoistedCalls() { + // We're handling a chain of calls where `jest` may or may not need to be inserted for each call + // in the chain, so remove the initial `jest` to make the loop implementation cleaner. + this.tokens.removeToken(); + // Track some state so that multiple non-hoisted chained calls in a row keep their chaining + // syntax. + let followsNonHoistedJestCall = false; + + // Iterate through all chained calls on the jest object. + while (this.tokens.matches3(tt.dot, tt.name, tt.parenL)) { + const methodName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + const shouldHoist = HOISTED_METHODS.includes(methodName); + if (shouldHoist) { + // We've matched e.g. `.mock(...)` or similar call. + // Replace the initial `.` with `function __jestHoist(){jest.` + const hoistedFunctionName = this.nameManager.claimFreeName("__jestHoist"); + this.hoistedFunctionNames.push(hoistedFunctionName); + this.tokens.replaceToken(`function ${hoistedFunctionName}(){${JEST_GLOBAL_NAME}.`); + this.tokens.copyToken(); + this.tokens.copyToken(); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + this.tokens.appendCode(";}"); + followsNonHoistedJestCall = false; + } else { + // This is a non-hoisted method, so just transform the code as usual. + if (followsNonHoistedJestCall) { + // If we didn't hoist the previous call, we can leave the code as-is to chain off of the + // previous method call. It's important to preserve the code here because we don't know + // for sure that the method actually returned the jest object for chaining. + this.tokens.copyToken(); + } else { + // If we hoisted the previous call, we know it returns the jest object back, so we insert + // the identifier `jest` to continue the chain. + this.tokens.replaceToken(`${JEST_GLOBAL_NAME}.`); + } + this.tokens.copyToken(); + this.tokens.copyToken(); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.parenR); + followsNonHoistedJestCall = true; + } + } + + return true; + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/NumericSeparatorTransformer.js b/node_modules/sucrase/dist/esm/transformers/NumericSeparatorTransformer.js new file mode 100644 index 0000000..0cb01a1 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/NumericSeparatorTransformer.js @@ -0,0 +1,20 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + +import Transformer from "./Transformer"; + +export default class NumericSeparatorTransformer extends Transformer { + constructor( tokens) { + super();this.tokens = tokens;; + } + + process() { + if (this.tokens.matches1(tt.num)) { + const code = this.tokens.currentTokenCode(); + if (code.includes("_")) { + this.tokens.replaceToken(code.replace(/_/g, "")); + return true; + } + } + return false; + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/OptionalCatchBindingTransformer.js b/node_modules/sucrase/dist/esm/transformers/OptionalCatchBindingTransformer.js new file mode 100644 index 0000000..547273b --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/OptionalCatchBindingTransformer.js @@ -0,0 +1,19 @@ + +import {TokenType as tt} from "../parser/tokenizer/types"; + +import Transformer from "./Transformer"; + +export default class OptionalCatchBindingTransformer extends Transformer { + constructor( tokens, nameManager) { + super();this.tokens = tokens;this.nameManager = nameManager;; + } + + process() { + if (this.tokens.matches2(tt._catch, tt.braceL)) { + this.tokens.copyToken(); + this.tokens.appendCode(` (${this.nameManager.claimFreeName("e")})`); + return true; + } + return false; + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/OptionalChainingNullishTransformer.js b/node_modules/sucrase/dist/esm/transformers/OptionalChainingNullishTransformer.js new file mode 100644 index 0000000..571d97f --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/OptionalChainingNullishTransformer.js @@ -0,0 +1,155 @@ + +import {TokenType as tt} from "../parser/tokenizer/types"; + +import Transformer from "./Transformer"; + +/** + * Transformer supporting the optional chaining and nullish coalescing operators. + * + * Tech plan here: + * https://github.com/alangpierce/sucrase/wiki/Sucrase-Optional-Chaining-and-Nullish-Coalescing-Technical-Plan + * + * The prefix and suffix code snippets are handled by TokenProcessor, and this transformer handles + * the operators themselves. + */ +export default class OptionalChainingNullishTransformer extends Transformer { + constructor( tokens, nameManager) { + super();this.tokens = tokens;this.nameManager = nameManager;; + } + + process() { + if (this.tokens.matches1(tt.nullishCoalescing)) { + const token = this.tokens.currentToken(); + if (this.tokens.tokens[token.nullishStartIndex].isAsyncOperation) { + this.tokens.replaceTokenTrimmingLeftWhitespace(", async () => ("); + } else { + this.tokens.replaceTokenTrimmingLeftWhitespace(", () => ("); + } + return true; + } + if (this.tokens.matches1(tt._delete)) { + const nextToken = this.tokens.tokenAtRelativeIndex(1); + if (nextToken.isOptionalChainStart) { + this.tokens.removeInitialToken(); + return true; + } + } + const token = this.tokens.currentToken(); + const chainStart = token.subscriptStartIndex; + if ( + chainStart != null && + this.tokens.tokens[chainStart].isOptionalChainStart && + // Super subscripts can't be optional (since super is never null/undefined), and the syntax + // relies on the subscript being intact, so leave this token alone. + this.tokens.tokenAtRelativeIndex(-1).type !== tt._super + ) { + const param = this.nameManager.claimFreeName("_"); + let arrowStartSnippet; + if ( + chainStart > 0 && + this.tokens.matches1AtIndex(chainStart - 1, tt._delete) && + this.isLastSubscriptInChain() + ) { + // Delete operations are special: we already removed the delete keyword, and to still + // perform a delete, we need to insert a delete in the very last part of the chain, which + // in correct code will always be a property access. + arrowStartSnippet = `${param} => delete ${param}`; + } else { + arrowStartSnippet = `${param} => ${param}`; + } + if (this.tokens.tokens[chainStart].isAsyncOperation) { + arrowStartSnippet = `async ${arrowStartSnippet}`; + } + if ( + this.tokens.matches2(tt.questionDot, tt.parenL) || + this.tokens.matches2(tt.questionDot, tt.lessThan) + ) { + if (this.justSkippedSuper()) { + this.tokens.appendCode(".bind(this)"); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'optionalCall', ${arrowStartSnippet}`); + } else if (this.tokens.matches2(tt.questionDot, tt.bracketL)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'optionalAccess', ${arrowStartSnippet}`); + } else if (this.tokens.matches1(tt.questionDot)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'optionalAccess', ${arrowStartSnippet}.`); + } else if (this.tokens.matches1(tt.dot)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'access', ${arrowStartSnippet}.`); + } else if (this.tokens.matches1(tt.bracketL)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'access', ${arrowStartSnippet}[`); + } else if (this.tokens.matches1(tt.parenL)) { + if (this.justSkippedSuper()) { + this.tokens.appendCode(".bind(this)"); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'call', ${arrowStartSnippet}(`); + } else { + throw new Error("Unexpected subscript operator in optional chain."); + } + return true; + } + return false; + } + + /** + * Determine if the current token is the last of its chain, so that we know whether it's eligible + * to have a delete op inserted. + * + * We can do this by walking forward until we determine one way or another. Each + * isOptionalChainStart token must be paired with exactly one isOptionalChainEnd token after it in + * a nesting way, so we can track depth and walk to the end of the chain (the point where the + * depth goes negative) and see if any other subscript token is after us in the chain. + */ + isLastSubscriptInChain() { + let depth = 0; + for (let i = this.tokens.currentIndex() + 1; ; i++) { + if (i >= this.tokens.tokens.length) { + throw new Error("Reached the end of the code while finding the end of the access chain."); + } + if (this.tokens.tokens[i].isOptionalChainStart) { + depth++; + } else if (this.tokens.tokens[i].isOptionalChainEnd) { + depth--; + } + if (depth < 0) { + return true; + } + + // This subscript token is a later one in the same chain. + if (depth === 0 && this.tokens.tokens[i].subscriptStartIndex != null) { + return false; + } + } + } + + /** + * Determine if we are the open-paren in an expression like super.a()?.b. + * + * We can do this by walking backward to find the previous subscript. If that subscript was + * preceded by a super, then we must be the subscript after it, so if this is a call expression, + * we'll need to attach the right context. + */ + justSkippedSuper() { + let depth = 0; + let index = this.tokens.currentIndex() - 1; + while (true) { + if (index < 0) { + throw new Error( + "Reached the start of the code while finding the start of the access chain.", + ); + } + if (this.tokens.tokens[index].isOptionalChainStart) { + depth--; + } else if (this.tokens.tokens[index].isOptionalChainEnd) { + depth++; + } + if (depth < 0) { + return false; + } + + // This subscript token is a later one in the same chain. + if (depth === 0 && this.tokens.tokens[index].subscriptStartIndex != null) { + return this.tokens.tokens[index - 1].type === tt._super; + } + index--; + } + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/ReactDisplayNameTransformer.js b/node_modules/sucrase/dist/esm/transformers/ReactDisplayNameTransformer.js new file mode 100644 index 0000000..0c44c81 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/ReactDisplayNameTransformer.js @@ -0,0 +1,160 @@ + + +import {IdentifierRole} from "../parser/tokenizer"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + +import Transformer from "./Transformer"; + +/** + * Implementation of babel-plugin-transform-react-display-name, which adds a + * display name to usages of React.createClass and createReactClass. + */ +export default class ReactDisplayNameTransformer extends Transformer { + constructor( + rootTransformer, + tokens, + importProcessor, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.options = options;; + } + + process() { + const startIndex = this.tokens.currentIndex(); + if (this.tokens.identifierName() === "createReactClass") { + const newName = + this.importProcessor && this.importProcessor.getIdentifierReplacement("createReactClass"); + if (newName) { + this.tokens.replaceToken(`(0, ${newName})`); + } else { + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + if ( + this.tokens.matches3(tt.name, tt.dot, tt.name) && + this.tokens.identifierName() === "React" && + this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2) === "createClass" + ) { + const newName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement("React") || "React" + : "React"; + if (newName) { + this.tokens.replaceToken(newName); + this.tokens.copyToken(); + this.tokens.copyToken(); + } else { + this.tokens.copyToken(); + this.tokens.copyToken(); + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + return false; + } + + /** + * This is called with the token position at the open-paren. + */ + tryProcessCreateClassCall(startIndex) { + const displayName = this.findDisplayName(startIndex); + if (!displayName) { + return; + } + + if (this.classNeedsDisplayName()) { + this.tokens.copyExpectedToken(tt.parenL); + this.tokens.copyExpectedToken(tt.braceL); + this.tokens.appendCode(`displayName: '${displayName}',`); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(tt.braceR); + this.tokens.copyExpectedToken(tt.parenR); + } + } + + findDisplayName(startIndex) { + if (startIndex < 2) { + return null; + } + if (this.tokens.matches2AtIndex(startIndex - 2, tt.name, tt.eq)) { + // This is an assignment (or declaration) and the LHS is either an identifier or a member + // expression ending in an identifier, so use that identifier name. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if ( + startIndex >= 2 && + this.tokens.tokens[startIndex - 2].identifierRole === IdentifierRole.ObjectKey + ) { + // This is an object literal value. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if (this.tokens.matches2AtIndex(startIndex - 2, tt._export, tt._default)) { + return this.getDisplayNameFromFilename(); + } + return null; + } + + getDisplayNameFromFilename() { + const filePath = this.options.filePath || "unknown"; + const pathSegments = filePath.split("/"); + const filename = pathSegments[pathSegments.length - 1]; + const dotIndex = filename.lastIndexOf("."); + const baseFilename = dotIndex === -1 ? filename : filename.slice(0, dotIndex); + if (baseFilename === "index" && pathSegments[pathSegments.length - 2]) { + return pathSegments[pathSegments.length - 2]; + } else { + return baseFilename; + } + } + + /** + * We only want to add a display name when this is a function call containing + * one argument, which is an object literal without `displayName` as an + * existing key. + */ + classNeedsDisplayName() { + let index = this.tokens.currentIndex(); + if (!this.tokens.matches2(tt.parenL, tt.braceL)) { + return false; + } + // The block starts on the {, and we expect any displayName key to be in + // that context. We need to ignore other other contexts to avoid matching + // nested displayName keys. + const objectStartIndex = index + 1; + const objectContextId = this.tokens.tokens[objectStartIndex].contextId; + if (objectContextId == null) { + throw new Error("Expected non-null context ID on object open-brace."); + } + + for (; index < this.tokens.tokens.length; index++) { + const token = this.tokens.tokens[index]; + if (token.type === tt.braceR && token.contextId === objectContextId) { + index++; + break; + } + + if ( + this.tokens.identifierNameAtIndex(index) === "displayName" && + this.tokens.tokens[index].identifierRole === IdentifierRole.ObjectKey && + token.contextId === objectContextId + ) { + // We found a displayName key, so bail out. + return false; + } + } + + if (index === this.tokens.tokens.length) { + throw new Error("Unexpected end of input when processing React class."); + } + + // If we got this far, we know we have createClass with an object with no + // display name, so we want to proceed as long as that was the only argument. + return ( + this.tokens.matches1AtIndex(index, tt.parenR) || + this.tokens.matches2AtIndex(index, tt.comma, tt.parenR) + ); + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/ReactHotLoaderTransformer.js b/node_modules/sucrase/dist/esm/transformers/ReactHotLoaderTransformer.js new file mode 100644 index 0000000..873902e --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/ReactHotLoaderTransformer.js @@ -0,0 +1,69 @@ +import {IdentifierRole, isTopLevelDeclaration} from "../parser/tokenizer"; + +import Transformer from "./Transformer"; + +export default class ReactHotLoaderTransformer extends Transformer { + __init() {this.extractedDefaultExportName = null} + + constructor( tokens, filePath) { + super();this.tokens = tokens;this.filePath = filePath;ReactHotLoaderTransformer.prototype.__init.call(this);; + } + + setExtractedDefaultExportName(extractedDefaultExportName) { + this.extractedDefaultExportName = extractedDefaultExportName; + } + + getPrefixCode() { + return ` + (function () { + var enterModule = require('react-hot-loader').enterModule; + enterModule && enterModule(module); + })();` + .replace(/\s+/g, " ") + .trim(); + } + + getSuffixCode() { + const topLevelNames = new Set(); + for (const token of this.tokens.tokens) { + if ( + !token.isType && + isTopLevelDeclaration(token) && + token.identifierRole !== IdentifierRole.ImportDeclaration + ) { + topLevelNames.add(this.tokens.identifierNameForToken(token)); + } + } + const namesToRegister = Array.from(topLevelNames).map((name) => ({ + variableName: name, + uniqueLocalName: name, + })); + if (this.extractedDefaultExportName) { + namesToRegister.push({ + variableName: this.extractedDefaultExportName, + uniqueLocalName: "default", + }); + } + return ` +;(function () { + var reactHotLoader = require('react-hot-loader').default; + var leaveModule = require('react-hot-loader').leaveModule; + if (!reactHotLoader) { + return; + } +${namesToRegister + .map( + ({variableName, uniqueLocalName}) => + ` reactHotLoader.register(${variableName}, "${uniqueLocalName}", ${JSON.stringify( + this.filePath || "", + )});`, + ) + .join("\n")} + leaveModule(module); +})();`; + } + + process() { + return false; + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/RootTransformer.js b/node_modules/sucrase/dist/esm/transformers/RootTransformer.js new file mode 100644 index 0000000..35cafe3 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/RootTransformer.js @@ -0,0 +1,458 @@ + + + +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + +import getClassInfo, {} from "../util/getClassInfo"; +import CJSImportTransformer from "./CJSImportTransformer"; +import ESMImportTransformer from "./ESMImportTransformer"; +import FlowTransformer from "./FlowTransformer"; +import JestHoistTransformer from "./JestHoistTransformer"; +import JSXTransformer from "./JSXTransformer"; +import NumericSeparatorTransformer from "./NumericSeparatorTransformer"; +import OptionalCatchBindingTransformer from "./OptionalCatchBindingTransformer"; +import OptionalChainingNullishTransformer from "./OptionalChainingNullishTransformer"; +import ReactDisplayNameTransformer from "./ReactDisplayNameTransformer"; +import ReactHotLoaderTransformer from "./ReactHotLoaderTransformer"; + +import TypeScriptTransformer from "./TypeScriptTransformer"; + + + + + + + + +export default class RootTransformer { + __init() {this.transformers = []} + + + __init2() {this.generatedVariables = []} + + + + + + constructor( + sucraseContext, + transforms, + enableLegacyBabel5ModuleInterop, + options, + ) {;RootTransformer.prototype.__init.call(this);RootTransformer.prototype.__init2.call(this); + this.nameManager = sucraseContext.nameManager; + this.helperManager = sucraseContext.helperManager; + const {tokenProcessor, importProcessor} = sucraseContext; + this.tokens = tokenProcessor; + this.isImportsTransformEnabled = transforms.includes("imports"); + this.isReactHotLoaderTransformEnabled = transforms.includes("react-hot-loader"); + this.disableESTransforms = Boolean(options.disableESTransforms); + + if (!options.disableESTransforms) { + this.transformers.push( + new OptionalChainingNullishTransformer(tokenProcessor, this.nameManager), + ); + this.transformers.push(new NumericSeparatorTransformer(tokenProcessor)); + this.transformers.push(new OptionalCatchBindingTransformer(tokenProcessor, this.nameManager)); + } + + if (transforms.includes("jsx")) { + if (options.jsxRuntime !== "preserve") { + this.transformers.push( + new JSXTransformer(this, tokenProcessor, importProcessor, this.nameManager, options), + ); + } + this.transformers.push( + new ReactDisplayNameTransformer(this, tokenProcessor, importProcessor, options), + ); + } + + let reactHotLoaderTransformer = null; + if (transforms.includes("react-hot-loader")) { + if (!options.filePath) { + throw new Error("filePath is required when using the react-hot-loader transform."); + } + reactHotLoaderTransformer = new ReactHotLoaderTransformer(tokenProcessor, options.filePath); + this.transformers.push(reactHotLoaderTransformer); + } + + // Note that we always want to enable the imports transformer, even when the import transform + // itself isn't enabled, since we need to do type-only import pruning for both Flow and + // TypeScript. + if (transforms.includes("imports")) { + if (importProcessor === null) { + throw new Error("Expected non-null importProcessor with imports transform enabled."); + } + this.transformers.push( + new CJSImportTransformer( + this, + tokenProcessor, + importProcessor, + this.nameManager, + this.helperManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + Boolean(options.enableLegacyTypeScriptModuleInterop), + transforms.includes("typescript"), + Boolean(options.preserveDynamicImport), + ), + ); + } else { + this.transformers.push( + new ESMImportTransformer( + tokenProcessor, + this.nameManager, + this.helperManager, + reactHotLoaderTransformer, + transforms.includes("typescript"), + options, + ), + ); + } + + if (transforms.includes("flow")) { + this.transformers.push( + new FlowTransformer(this, tokenProcessor, transforms.includes("imports")), + ); + } + if (transforms.includes("typescript")) { + this.transformers.push( + new TypeScriptTransformer(this, tokenProcessor, transforms.includes("imports")), + ); + } + if (transforms.includes("jest")) { + this.transformers.push( + new JestHoistTransformer(this, tokenProcessor, this.nameManager, importProcessor), + ); + } + } + + transform() { + this.tokens.reset(); + this.processBalancedCode(); + const shouldAddUseStrict = this.isImportsTransformEnabled; + // "use strict" always needs to be first, so override the normal transformer order. + let prefix = shouldAddUseStrict ? '"use strict";' : ""; + for (const transformer of this.transformers) { + prefix += transformer.getPrefixCode(); + } + prefix += this.helperManager.emitHelpers(); + prefix += this.generatedVariables.map((v) => ` var ${v};`).join(""); + for (const transformer of this.transformers) { + prefix += transformer.getHoistedCode(); + } + let suffix = ""; + for (const transformer of this.transformers) { + suffix += transformer.getSuffixCode(); + } + const result = this.tokens.finish(); + let {code} = result; + if (code.startsWith("#!")) { + let newlineIndex = code.indexOf("\n"); + if (newlineIndex === -1) { + newlineIndex = code.length; + code += "\n"; + } + return { + code: code.slice(0, newlineIndex + 1) + prefix + code.slice(newlineIndex + 1) + suffix, + // The hashbang line has no tokens, so shifting the tokens to account + // for prefix can happen normally. + mappings: this.shiftMappings(result.mappings, prefix.length), + }; + } else { + return { + code: prefix + code + suffix, + mappings: this.shiftMappings(result.mappings, prefix.length), + }; + } + } + + processBalancedCode() { + let braceDepth = 0; + let parenDepth = 0; + while (!this.tokens.isAtEnd()) { + if (this.tokens.matches1(tt.braceL) || this.tokens.matches1(tt.dollarBraceL)) { + braceDepth++; + } else if (this.tokens.matches1(tt.braceR)) { + if (braceDepth === 0) { + return; + } + braceDepth--; + } + if (this.tokens.matches1(tt.parenL)) { + parenDepth++; + } else if (this.tokens.matches1(tt.parenR)) { + if (parenDepth === 0) { + return; + } + parenDepth--; + } + this.processToken(); + } + } + + processToken() { + if (this.tokens.matches1(tt._class)) { + this.processClass(); + return; + } + for (const transformer of this.transformers) { + const wasProcessed = transformer.process(); + if (wasProcessed) { + return; + } + } + this.tokens.copyToken(); + } + + /** + * Skip past a class with a name and return that name. + */ + processNamedClass() { + if (!this.tokens.matches2(tt._class, tt.name)) { + throw new Error("Expected identifier for exported class name."); + } + const name = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + this.processClass(); + return name; + } + + processClass() { + const classInfo = getClassInfo(this, this.tokens, this.nameManager, this.disableESTransforms); + + // Both static and instance initializers need a class name to use to invoke the initializer, so + // assign to one if necessary. + const needsCommaExpression = + (classInfo.headerInfo.isExpression || !classInfo.headerInfo.className) && + classInfo.staticInitializerNames.length + classInfo.instanceInitializerNames.length > 0; + + let className = classInfo.headerInfo.className; + if (needsCommaExpression) { + className = this.nameManager.claimFreeName("_class"); + this.generatedVariables.push(className); + this.tokens.appendCode(` (${className} =`); + } + + const classToken = this.tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected class to have a context ID."); + } + this.tokens.copyExpectedToken(tt._class); + while (!this.tokens.matchesContextIdAndLabel(tt.braceL, contextId)) { + this.processToken(); + } + + this.processClassBody(classInfo, className); + + const staticInitializerStatements = classInfo.staticInitializerNames.map( + (name) => `${className}.${name}()`, + ); + if (needsCommaExpression) { + this.tokens.appendCode( + `, ${staticInitializerStatements.map((s) => `${s}, `).join("")}${className})`, + ); + } else if (classInfo.staticInitializerNames.length > 0) { + this.tokens.appendCode(` ${staticInitializerStatements.map((s) => `${s};`).join(" ")}`); + } + } + + /** + * We want to just handle class fields in all contexts, since TypeScript supports them. Later, + * when some JS implementations support class fields, this should be made optional. + */ + processClassBody(classInfo, className) { + const { + headerInfo, + constructorInsertPos, + constructorInitializerStatements, + fields, + instanceInitializerNames, + rangesToRemove, + } = classInfo; + let fieldIndex = 0; + let rangeToRemoveIndex = 0; + const classContextId = this.tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null context ID on class."); + } + this.tokens.copyExpectedToken(tt.braceL); + if (this.isReactHotLoaderTransformEnabled) { + this.tokens.appendCode( + "__reactstandin__regenerateByEval(key, code) {this[key] = eval(code);}", + ); + } + + const needsConstructorInit = + constructorInitializerStatements.length + instanceInitializerNames.length > 0; + + if (constructorInsertPos === null && needsConstructorInit) { + const constructorInitializersCode = this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ); + if (headerInfo.hasSuperclass) { + const argsName = this.nameManager.claimFreeName("args"); + this.tokens.appendCode( + `constructor(...${argsName}) { super(...${argsName}); ${constructorInitializersCode}; }`, + ); + } else { + this.tokens.appendCode(`constructor() { ${constructorInitializersCode}; }`); + } + } + + while (!this.tokens.matchesContextIdAndLabel(tt.braceR, classContextId)) { + if (fieldIndex < fields.length && this.tokens.currentIndex() === fields[fieldIndex].start) { + let needsCloseBrace = false; + if (this.tokens.matches1(tt.bracketL)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this`); + } else if (this.tokens.matches1(tt.string) || this.tokens.matches1(tt.num)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this[`); + needsCloseBrace = true; + } else { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this.`); + } + while (this.tokens.currentIndex() < fields[fieldIndex].end) { + if (needsCloseBrace && this.tokens.currentIndex() === fields[fieldIndex].equalsIndex) { + this.tokens.appendCode("]"); + } + this.processToken(); + } + this.tokens.appendCode("}"); + fieldIndex++; + } else if ( + rangeToRemoveIndex < rangesToRemove.length && + this.tokens.currentIndex() >= rangesToRemove[rangeToRemoveIndex].start + ) { + if (this.tokens.currentIndex() < rangesToRemove[rangeToRemoveIndex].end) { + this.tokens.removeInitialToken(); + } + while (this.tokens.currentIndex() < rangesToRemove[rangeToRemoveIndex].end) { + this.tokens.removeToken(); + } + rangeToRemoveIndex++; + } else if (this.tokens.currentIndex() === constructorInsertPos) { + this.tokens.copyToken(); + if (needsConstructorInit) { + this.tokens.appendCode( + `;${this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + )};`, + ); + } + this.processToken(); + } else { + this.processToken(); + } + } + this.tokens.copyExpectedToken(tt.braceR); + } + + makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ) { + return [ + ...constructorInitializerStatements, + ...instanceInitializerNames.map((name) => `${className}.prototype.${name}.call(this)`), + ].join(";"); + } + + /** + * Normally it's ok to simply remove type tokens, but we need to be more careful when dealing with + * arrow function return types since they can confuse the parser. In that case, we want to move + * the close-paren to the same line as the arrow. + * + * See https://github.com/alangpierce/sucrase/issues/391 for more details. + */ + processPossibleArrowParamEnd() { + if (this.tokens.matches2(tt.parenR, tt.colon) && this.tokens.tokenAtRelativeIndex(1).isType) { + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, tt.arrow)) { + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(") =>"); + return true; + } + } + return false; + } + + /** + * An async arrow function might be of the form: + * + * async < + * T + * >() => {} + * + * in which case, removing the type parameters will cause a syntax error. Detect this case and + * move the open-paren earlier. + */ + processPossibleAsyncArrowWithTypeParams() { + if ( + !this.tokens.matchesContextual(ContextualKeyword._async) && + !this.tokens.matches1(tt._async) + ) { + return false; + } + const nextToken = this.tokens.tokenAtRelativeIndex(1); + if (nextToken.type !== tt.lessThan || !nextToken.isType) { + return false; + } + + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, tt.parenL)) { + this.tokens.replaceToken("async ("); + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + // We ate a ( token, so we need to process the tokens in between and then the ) token so that + // we remain balanced. + this.processBalancedCode(); + this.processToken(); + return true; + } + return false; + } + + processPossibleTypeRange() { + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + return true; + } + return false; + } + + shiftMappings( + mappings, + prefixLength, + ) { + for (let i = 0; i < mappings.length; i++) { + const mapping = mappings[i]; + if (mapping !== undefined) { + mappings[i] = mapping + prefixLength; + } + } + return mappings; + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/Transformer.js b/node_modules/sucrase/dist/esm/transformers/Transformer.js new file mode 100644 index 0000000..5e8e9e7 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/Transformer.js @@ -0,0 +1,16 @@ +export default class Transformer { + // Return true if anything was processed, false otherwise. + + + getPrefixCode() { + return ""; + } + + getHoistedCode() { + return ""; + } + + getSuffixCode() { + return ""; + } +} diff --git a/node_modules/sucrase/dist/esm/transformers/TypeScriptTransformer.js b/node_modules/sucrase/dist/esm/transformers/TypeScriptTransformer.js new file mode 100644 index 0000000..67e1274 --- /dev/null +++ b/node_modules/sucrase/dist/esm/transformers/TypeScriptTransformer.js @@ -0,0 +1,279 @@ + +import {TokenType as tt} from "../parser/tokenizer/types"; + +import isIdentifier from "../util/isIdentifier"; + +import Transformer from "./Transformer"; + +export default class TypeScriptTransformer extends Transformer { + constructor( + rootTransformer, + tokens, + isImportsTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.isImportsTransformEnabled = isImportsTransformEnabled;; + } + + process() { + if ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ) { + return true; + } + if ( + this.tokens.matches1(tt._public) || + this.tokens.matches1(tt._protected) || + this.tokens.matches1(tt._private) || + this.tokens.matches1(tt._abstract) || + this.tokens.matches1(tt._readonly) || + this.tokens.matches1(tt._override) || + this.tokens.matches1(tt.nonNullAssertion) + ) { + this.tokens.removeInitialToken(); + return true; + } + if (this.tokens.matches1(tt._enum) || this.tokens.matches2(tt._const, tt._enum)) { + this.processEnum(); + return true; + } + if ( + this.tokens.matches2(tt._export, tt._enum) || + this.tokens.matches3(tt._export, tt._const, tt._enum) + ) { + this.processEnum(true); + return true; + } + return false; + } + + processEnum(isExport = false) { + // We might have "export const enum", so just remove all relevant tokens. + this.tokens.removeInitialToken(); + while (this.tokens.matches1(tt._const) || this.tokens.matches1(tt._enum)) { + this.tokens.removeToken(); + } + const enumName = this.tokens.identifierName(); + this.tokens.removeToken(); + if (isExport && !this.isImportsTransformEnabled) { + this.tokens.appendCode("export "); + } + this.tokens.appendCode(`var ${enumName}; (function (${enumName})`); + this.tokens.copyExpectedToken(tt.braceL); + this.processEnumBody(enumName); + this.tokens.copyExpectedToken(tt.braceR); + if (isExport && this.isImportsTransformEnabled) { + this.tokens.appendCode(`)(${enumName} || (exports.${enumName} = ${enumName} = {}));`); + } else { + this.tokens.appendCode(`)(${enumName} || (${enumName} = {}));`); + } + } + + /** + * Transform an enum into equivalent JS. This has complexity in a few places: + * - TS allows string enums, numeric enums, and a mix of the two styles within an enum. + * - Enum keys are allowed to be referenced in later enum values. + * - Enum keys are allowed to be strings. + * - When enum values are omitted, they should follow an auto-increment behavior. + */ + processEnumBody(enumName) { + // Code that can be used to reference the previous enum member, or null if this is the first + // enum member. + let previousValueCode = null; + while (true) { + if (this.tokens.matches1(tt.braceR)) { + break; + } + const {nameStringCode, variableName} = this.extractEnumKeyInfo(this.tokens.currentToken()); + this.tokens.removeInitialToken(); + + if ( + this.tokens.matches3(tt.eq, tt.string, tt.comma) || + this.tokens.matches3(tt.eq, tt.string, tt.braceR) + ) { + this.processStringLiteralEnumMember(enumName, nameStringCode, variableName); + } else if (this.tokens.matches1(tt.eq)) { + this.processExplicitValueEnumMember(enumName, nameStringCode, variableName); + } else { + this.processImplicitValueEnumMember( + enumName, + nameStringCode, + variableName, + previousValueCode, + ); + } + if (this.tokens.matches1(tt.comma)) { + this.tokens.removeToken(); + } + + if (variableName != null) { + previousValueCode = variableName; + } else { + previousValueCode = `${enumName}[${nameStringCode}]`; + } + } + } + + /** + * Detect name information about this enum key, which will be used to determine which code to emit + * and whether we should declare a variable as part of this declaration. + * + * Some cases to keep in mind: + * - Enum keys can be implicitly referenced later, e.g. `X = 1, Y = X`. In Sucrase, we implement + * this by declaring a variable `X` so that later expressions can use it. + * - In addition to the usual identifier key syntax, enum keys are allowed to be string literals, + * e.g. `"hello world" = 3,`. Template literal syntax is NOT allowed. + * - Even if the enum key is defined as a string literal, it may still be referenced by identifier + * later, e.g. `"X" = 1, Y = X`. That means that we need to detect whether or not a string + * literal is identifier-like and emit a variable if so, even if the declaration did not use an + * identifier. + * - Reserved keywords like `break` are valid enum keys, but are not valid to be referenced later + * and would be a syntax error if we emitted a variable, so we need to skip the variable + * declaration in those cases. + * + * The variableName return value captures these nuances: if non-null, we can and must emit a + * variable declaration, and if null, we can't and shouldn't. + */ + extractEnumKeyInfo(nameToken) { + if (nameToken.type === tt.name) { + const name = this.tokens.identifierNameForToken(nameToken); + return { + nameStringCode: `"${name}"`, + variableName: isIdentifier(name) ? name : null, + }; + } else if (nameToken.type === tt.string) { + const name = this.tokens.stringValueForToken(nameToken); + return { + nameStringCode: this.tokens.code.slice(nameToken.start, nameToken.end), + variableName: isIdentifier(name) ? name : null, + }; + } else { + throw new Error("Expected name or string at beginning of enum element."); + } + } + + /** + * Handle an enum member where the RHS is just a string literal (not omitted, not a number, and + * not a complex expression). This is the typical form for TS string enums, and in this case, we + * do *not* create a reverse mapping. + * + * This is called after deleting the key token, when the token processor is at the equals sign. + * + * Example 1: + * someKey = "some value" + * -> + * const someKey = "some value"; MyEnum["someKey"] = someKey; + * + * Example 2: + * "some key" = "some value" + * -> + * MyEnum["some key"] = "some value"; + */ + processStringLiteralEnumMember( + enumName, + nameStringCode, + variableName, + ) { + if (variableName != null) { + this.tokens.appendCode(`const ${variableName}`); + // = + this.tokens.copyToken(); + // value string + this.tokens.copyToken(); + this.tokens.appendCode(`; ${enumName}[${nameStringCode}] = ${variableName};`); + } else { + this.tokens.appendCode(`${enumName}[${nameStringCode}]`); + // = + this.tokens.copyToken(); + // value string + this.tokens.copyToken(); + this.tokens.appendCode(";"); + } + } + + /** + * Handle an enum member initialized with an expression on the right-hand side (other than a + * string literal). In these cases, we should transform the expression and emit code that sets up + * a reverse mapping. + * + * The TypeScript implementation of this operation distinguishes between expressions that can be + * "constant folded" at compile time (i.e. consist of number literals and simple math operations + * on those numbers) and ones that are dynamic. For constant expressions, it emits the resolved + * numeric value, and auto-incrementing is only allowed in that case. Evaluating expressions at + * compile time would add significant complexity to Sucrase, so Sucrase instead leaves the + * expression as-is, and will later emit something like `MyEnum["previousKey"] + 1` to implement + * auto-incrementing. + * + * This is called after deleting the key token, when the token processor is at the equals sign. + * + * Example 1: + * someKey = 1 + 1 + * -> + * const someKey = 1 + 1; MyEnum[MyEnum["someKey"] = someKey] = "someKey"; + * + * Example 2: + * "some key" = 1 + 1 + * -> + * MyEnum[MyEnum["some key"] = 1 + 1] = "some key"; + */ + processExplicitValueEnumMember( + enumName, + nameStringCode, + variableName, + ) { + const rhsEndIndex = this.tokens.currentToken().rhsEndIndex; + if (rhsEndIndex == null) { + throw new Error("Expected rhsEndIndex on enum assign."); + } + + if (variableName != null) { + this.tokens.appendCode(`const ${variableName}`); + this.tokens.copyToken(); + while (this.tokens.currentIndex() < rhsEndIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode( + `; ${enumName}[${enumName}[${nameStringCode}] = ${variableName}] = ${nameStringCode};`, + ); + } else { + this.tokens.appendCode(`${enumName}[${enumName}[${nameStringCode}]`); + this.tokens.copyToken(); + while (this.tokens.currentIndex() < rhsEndIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(`] = ${nameStringCode};`); + } + } + + /** + * Handle an enum member with no right-hand side expression. In this case, the value is the + * previous value plus 1, or 0 if there was no previous value. We should also always emit a + * reverse mapping. + * + * Example 1: + * someKey2 + * -> + * const someKey2 = someKey1 + 1; MyEnum[MyEnum["someKey2"] = someKey2] = "someKey2"; + * + * Example 2: + * "some key 2" + * -> + * MyEnum[MyEnum["some key 2"] = someKey1 + 1] = "some key 2"; + */ + processImplicitValueEnumMember( + enumName, + nameStringCode, + variableName, + previousValueCode, + ) { + let valueCode = previousValueCode != null ? `${previousValueCode} + 1` : "0"; + if (variableName != null) { + this.tokens.appendCode(`const ${variableName} = ${valueCode}; `); + valueCode = variableName; + } + this.tokens.appendCode( + `${enumName}[${enumName}[${nameStringCode}] = ${valueCode}] = ${nameStringCode};`, + ); + } +} diff --git a/node_modules/sucrase/dist/esm/util/elideImportEquals.js b/node_modules/sucrase/dist/esm/util/elideImportEquals.js new file mode 100644 index 0000000..6b18a7a --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/elideImportEquals.js @@ -0,0 +1,29 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + + +export default function elideImportEquals(tokens) { + // import + tokens.removeInitialToken(); + // name + tokens.removeToken(); + // = + tokens.removeToken(); + // name or require + tokens.removeToken(); + // Handle either `import A = require('A')` or `import A = B.C.D`. + if (tokens.matches1(tt.parenL)) { + // ( + tokens.removeToken(); + // path string + tokens.removeToken(); + // ) + tokens.removeToken(); + } else { + while (tokens.matches1(tt.dot)) { + // . + tokens.removeToken(); + // name + tokens.removeToken(); + } + } +} diff --git a/node_modules/sucrase/dist/esm/util/formatTokens.js b/node_modules/sucrase/dist/esm/util/formatTokens.js new file mode 100644 index 0000000..eea07d2 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/formatTokens.js @@ -0,0 +1,74 @@ +import LinesAndColumns from "lines-and-columns"; + + +import {formatTokenType} from "../parser/tokenizer/types"; + +export default function formatTokens(code, tokens) { + if (tokens.length === 0) { + return ""; + } + + const tokenKeys = Object.keys(tokens[0]).filter( + (k) => k !== "type" && k !== "value" && k !== "start" && k !== "end" && k !== "loc", + ); + const typeKeys = Object.keys(tokens[0].type).filter((k) => k !== "label" && k !== "keyword"); + + const headings = ["Location", "Label", "Raw", ...tokenKeys, ...typeKeys]; + + const lines = new LinesAndColumns(code); + const rows = [headings, ...tokens.map(getTokenComponents)]; + const padding = headings.map(() => 0); + for (const components of rows) { + for (let i = 0; i < components.length; i++) { + padding[i] = Math.max(padding[i], components[i].length); + } + } + return rows + .map((components) => components.map((component, i) => component.padEnd(padding[i])).join(" ")) + .join("\n"); + + function getTokenComponents(token) { + const raw = code.slice(token.start, token.end); + return [ + formatRange(token.start, token.end), + formatTokenType(token.type), + truncate(String(raw), 14), + // @ts-ignore: Intentional dynamic access by key. + ...tokenKeys.map((key) => formatValue(token[key], key)), + // @ts-ignore: Intentional dynamic access by key. + ...typeKeys.map((key) => formatValue(token.type[key], key)), + ]; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + function formatValue(value, key) { + if (value === true) { + return key; + } else if (value === false || value === null) { + return ""; + } else { + return String(value); + } + } + + function formatRange(start, end) { + return `${formatPos(start)}-${formatPos(end)}`; + } + + function formatPos(pos) { + const location = lines.locationForIndex(pos); + if (!location) { + return "Unknown"; + } else { + return `${location.line + 1}:${location.column + 1}`; + } + } +} + +function truncate(s, length) { + if (s.length > length) { + return `${s.slice(0, length - 3)}...`; + } else { + return s; + } +} diff --git a/node_modules/sucrase/dist/esm/util/getClassInfo.js b/node_modules/sucrase/dist/esm/util/getClassInfo.js new file mode 100644 index 0000000..04de5c5 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/getClassInfo.js @@ -0,0 +1,347 @@ + + +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +/** + * Get information about the class fields for this class, given a token processor pointing to the + * open-brace at the start of the class. + */ +export default function getClassInfo( + rootTransformer, + tokens, + nameManager, + disableESTransforms, +) { + const snapshot = tokens.snapshot(); + + const headerInfo = processClassHeader(tokens); + + let constructorInitializerStatements = []; + const instanceInitializerNames = []; + const staticInitializerNames = []; + let constructorInsertPos = null; + const fields = []; + const rangesToRemove = []; + + const classContextId = tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null class context ID on class open-brace."); + } + + tokens.nextToken(); + while (!tokens.matchesContextIdAndLabel(tt.braceR, classContextId)) { + if (tokens.matchesContextual(ContextualKeyword._constructor) && !tokens.currentToken().isType) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + } else if (tokens.matches1(tt.semi)) { + if (!disableESTransforms) { + rangesToRemove.push({start: tokens.currentIndex(), end: tokens.currentIndex() + 1}); + } + tokens.nextToken(); + } else if (tokens.currentToken().isType) { + tokens.nextToken(); + } else { + // Either a method or a field. Skip to the identifier part. + const statementStartIndex = tokens.currentIndex(); + let isStatic = false; + let isESPrivate = false; + let isDeclareOrAbstract = false; + while (isAccessModifier(tokens.currentToken())) { + if (tokens.matches1(tt._static)) { + isStatic = true; + } + if (tokens.matches1(tt.hash)) { + isESPrivate = true; + } + if (tokens.matches1(tt._declare) || tokens.matches1(tt._abstract)) { + isDeclareOrAbstract = true; + } + tokens.nextToken(); + } + if (isStatic && tokens.matches1(tt.braceL)) { + // This is a static block, so don't process it in any special way. + skipToNextClassElement(tokens, classContextId); + continue; + } + if (isESPrivate) { + // Sucrase doesn't attempt to transpile private fields; just leave them as-is. + skipToNextClassElement(tokens, classContextId); + continue; + } + if ( + tokens.matchesContextual(ContextualKeyword._constructor) && + !tokens.currentToken().isType + ) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + continue; + } + + const nameStartIndex = tokens.currentIndex(); + skipFieldName(tokens); + if (tokens.matches1(tt.lessThan) || tokens.matches1(tt.parenL)) { + // This is a method, so nothing to process. + skipToNextClassElement(tokens, classContextId); + continue; + } + // There might be a type annotation that we need to skip. + while (tokens.currentToken().isType) { + tokens.nextToken(); + } + if (tokens.matches1(tt.eq)) { + const equalsIndex = tokens.currentIndex(); + // This is an initializer, so we need to wrap in an initializer method. + const valueEnd = tokens.currentToken().rhsEndIndex; + if (valueEnd == null) { + throw new Error("Expected rhsEndIndex on class field assignment."); + } + tokens.nextToken(); + while (tokens.currentIndex() < valueEnd) { + rootTransformer.processToken(); + } + let initializerName; + if (isStatic) { + initializerName = nameManager.claimFreeName("__initStatic"); + staticInitializerNames.push(initializerName); + } else { + initializerName = nameManager.claimFreeName("__init"); + instanceInitializerNames.push(initializerName); + } + // Fields start at the name, so `static x = 1;` has a field range of `x = 1;`. + fields.push({ + initializerName, + equalsIndex, + start: nameStartIndex, + end: tokens.currentIndex(), + }); + } else if (!disableESTransforms || isDeclareOrAbstract) { + // This is a regular field declaration, like `x;`. With the class transform enabled, we just + // remove the line so that no output is produced. With the class transform disabled, we + // usually want to preserve the declaration (but still strip types), but if the `declare` + // or `abstract` keyword is specified, we should remove the line to avoid initializing the + // value to undefined. + rangesToRemove.push({start: statementStartIndex, end: tokens.currentIndex()}); + } + } + } + + tokens.restoreToSnapshot(snapshot); + if (disableESTransforms) { + // With ES transforms disabled, we don't want to transform regular class + // field declarations, and we don't need to do any additional tricks to + // reference the constructor for static init, but we still need to transform + // TypeScript field initializers defined as constructor parameters and we + // still need to remove `declare` fields. For now, we run the same code + // path but omit any field information, as if the class had no field + // declarations. In the future, when we fully drop the class fields + // transform, we can simplify this code significantly. + return { + headerInfo, + constructorInitializerStatements, + instanceInitializerNames: [], + staticInitializerNames: [], + constructorInsertPos, + fields: [], + rangesToRemove, + }; + } else { + return { + headerInfo, + constructorInitializerStatements, + instanceInitializerNames, + staticInitializerNames, + constructorInsertPos, + fields, + rangesToRemove, + }; + } +} + +/** + * Move the token processor to the next method/field in the class. + * + * To do that, we seek forward to the next start of a class name (either an open + * bracket or an identifier, or the closing curly brace), then seek backward to + * include any access modifiers. + */ +function skipToNextClassElement(tokens, classContextId) { + tokens.nextToken(); + while (tokens.currentToken().contextId !== classContextId) { + tokens.nextToken(); + } + while (isAccessModifier(tokens.tokenAtRelativeIndex(-1))) { + tokens.previousToken(); + } +} + +function processClassHeader(tokens) { + const classToken = tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected context ID on class token."); + } + const isExpression = classToken.isExpression; + if (isExpression == null) { + throw new Error("Expected isExpression on class token."); + } + let className = null; + let hasSuperclass = false; + tokens.nextToken(); + if (tokens.matches1(tt.name)) { + className = tokens.identifierName(); + } + while (!tokens.matchesContextIdAndLabel(tt.braceL, contextId)) { + // If this has a superclass, there will always be an `extends` token. If it doesn't have a + // superclass, only type parameters and `implements` clauses can show up here, all of which + // consist only of type tokens. A declaration like `class A {` should *not* count + // as having a superclass. + if (tokens.matches1(tt._extends) && !tokens.currentToken().isType) { + hasSuperclass = true; + } + tokens.nextToken(); + } + return {isExpression, className, hasSuperclass}; +} + +/** + * Extract useful information out of a constructor, starting at the "constructor" name. + */ +function processConstructor(tokens) + + + { + const constructorInitializerStatements = []; + + tokens.nextToken(); + const constructorContextId = tokens.currentToken().contextId; + if (constructorContextId == null) { + throw new Error("Expected context ID on open-paren starting constructor params."); + } + // Advance through parameters looking for access modifiers. + while (!tokens.matchesContextIdAndLabel(tt.parenR, constructorContextId)) { + if (tokens.currentToken().contextId === constructorContextId) { + // Current token is an open paren or comma just before a param, so check + // that param for access modifiers. + tokens.nextToken(); + if (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + while (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + } + const token = tokens.currentToken(); + if (token.type !== tt.name) { + throw new Error("Expected identifier after access modifiers in constructor arg."); + } + const name = tokens.identifierNameForToken(token); + constructorInitializerStatements.push(`this.${name} = ${name}`); + } + } else { + tokens.nextToken(); + } + } + // ) + tokens.nextToken(); + let constructorInsertPos = tokens.currentIndex(); + + // Advance through body looking for a super call. + let foundSuperCall = false; + while (!tokens.matchesContextIdAndLabel(tt.braceR, constructorContextId)) { + if (!foundSuperCall && tokens.matches2(tt._super, tt.parenL)) { + tokens.nextToken(); + const superCallContextId = tokens.currentToken().contextId; + if (superCallContextId == null) { + throw new Error("Expected a context ID on the super call"); + } + while (!tokens.matchesContextIdAndLabel(tt.parenR, superCallContextId)) { + tokens.nextToken(); + } + constructorInsertPos = tokens.currentIndex(); + foundSuperCall = true; + } + tokens.nextToken(); + } + // } + tokens.nextToken(); + + return {constructorInitializerStatements, constructorInsertPos}; +} + +/** + * Determine if this is any token that can go before the name in a method/field. + */ +function isAccessModifier(token) { + return [ + tt._async, + tt._get, + tt._set, + tt.plus, + tt.minus, + tt._readonly, + tt._static, + tt._public, + tt._private, + tt._protected, + tt._override, + tt._abstract, + tt.star, + tt._declare, + tt.hash, + ].includes(token.type); +} + +/** + * The next token or set of tokens is either an identifier or an expression in square brackets, for + * a method or field name. + */ +function skipFieldName(tokens) { + if (tokens.matches1(tt.bracketL)) { + const startToken = tokens.currentToken(); + const classContextId = startToken.contextId; + if (classContextId == null) { + throw new Error("Expected class context ID on computed name open bracket."); + } + while (!tokens.matchesContextIdAndLabel(tt.bracketR, classContextId)) { + tokens.nextToken(); + } + tokens.nextToken(); + } else { + tokens.nextToken(); + } +} diff --git a/node_modules/sucrase/dist/esm/util/getDeclarationInfo.js b/node_modules/sucrase/dist/esm/util/getDeclarationInfo.js new file mode 100644 index 0000000..ade9a81 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/getDeclarationInfo.js @@ -0,0 +1,40 @@ +import {isTopLevelDeclaration} from "../parser/tokenizer"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + + + + + + +export const EMPTY_DECLARATION_INFO = { + typeDeclarations: new Set(), + valueDeclarations: new Set(), +}; + +/** + * Get all top-level identifiers that should be preserved when exported in TypeScript. + * + * Examples: + * - If an identifier is declared as `const x`, then `export {x}` should be preserved. + * - If it's declared as `type x`, then `export {x}` should be removed. + * - If it's declared as both `const x` and `type x`, then the export should be preserved. + * - Classes and enums should be preserved (even though they also introduce types). + * - Imported identifiers should be preserved since we don't have enough information to + * rule them out. --isolatedModules disallows re-exports, which catches errors here. + */ +export default function getDeclarationInfo(tokens) { + const typeDeclarations = new Set(); + const valueDeclarations = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if (token.type === tt.name && isTopLevelDeclaration(token)) { + if (token.isType) { + typeDeclarations.add(tokens.identifierNameForToken(token)); + } else { + valueDeclarations.add(tokens.identifierNameForToken(token)); + } + } + } + return {typeDeclarations, valueDeclarations}; +} diff --git a/node_modules/sucrase/dist/esm/util/getIdentifierNames.js b/node_modules/sucrase/dist/esm/util/getIdentifierNames.js new file mode 100644 index 0000000..5b85901 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/getIdentifierNames.js @@ -0,0 +1,15 @@ + +import {TokenType as tt} from "../parser/tokenizer/types"; + +/** + * Get all identifier names in the code, in order, including duplicates. + */ +export default function getIdentifierNames(code, tokens) { + const names = []; + for (const token of tokens) { + if (token.type === tt.name) { + names.push(code.slice(token.start, token.end)); + } + } + return names; +} diff --git a/node_modules/sucrase/dist/esm/util/getImportExportSpecifierInfo.js b/node_modules/sucrase/dist/esm/util/getImportExportSpecifierInfo.js new file mode 100644 index 0000000..3dc6d2c --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/getImportExportSpecifierInfo.js @@ -0,0 +1,92 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + + + + + + + + + + + + + + + + +/** + * Determine information about this named import or named export specifier. + * + * This syntax is the `a` from statements like these: + * import {A} from "./foo"; + * export {A}; + * export {A} from "./foo"; + * + * As it turns out, we can exactly characterize the syntax meaning by simply + * counting the number of tokens, which can be from 1 to 4: + * {A} + * {type A} + * {A as B} + * {type A as B} + * + * In the type case, we never actually need the names in practice, so don't get + * them. + * + * TODO: There's some redundancy with the type detection here and the isType + * flag that's already present on tokens in TS mode. This function could + * potentially be simplified and/or pushed to the call sites to avoid the object + * allocation. + */ +export default function getImportExportSpecifierInfo( + tokens, + index = tokens.currentIndex(), +) { + let endIndex = index + 1; + if (isSpecifierEnd(tokens, endIndex)) { + // import {A} + const name = tokens.identifierNameAtIndex(index); + return { + isType: false, + leftName: name, + rightName: name, + endIndex, + }; + } + endIndex++; + if (isSpecifierEnd(tokens, endIndex)) { + // import {type A} + return { + isType: true, + leftName: null, + rightName: null, + endIndex, + }; + } + endIndex++; + if (isSpecifierEnd(tokens, endIndex)) { + // import {A as B} + return { + isType: false, + leftName: tokens.identifierNameAtIndex(index), + rightName: tokens.identifierNameAtIndex(index + 2), + endIndex, + }; + } + endIndex++; + if (isSpecifierEnd(tokens, endIndex)) { + // import {type A as B} + return { + isType: true, + leftName: null, + rightName: null, + endIndex, + }; + } + throw new Error(`Unexpected import/export specifier at ${index}`); +} + +function isSpecifierEnd(tokens, index) { + const token = tokens.tokens[index]; + return token.type === tt.braceR || token.type === tt.comma; +} diff --git a/node_modules/sucrase/dist/esm/util/getJSXPragmaInfo.js b/node_modules/sucrase/dist/esm/util/getJSXPragmaInfo.js new file mode 100644 index 0000000..9972342 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/getJSXPragmaInfo.js @@ -0,0 +1,22 @@ + + + + + + + + + +export default function getJSXPragmaInfo(options) { + const [base, suffix] = splitPragma(options.jsxPragma || "React.createElement"); + const [fragmentBase, fragmentSuffix] = splitPragma(options.jsxFragmentPragma || "React.Fragment"); + return {base, suffix, fragmentBase, fragmentSuffix}; +} + +function splitPragma(pragma) { + let dotIndex = pragma.indexOf("."); + if (dotIndex === -1) { + dotIndex = pragma.length; + } + return [pragma.slice(0, dotIndex), pragma.slice(dotIndex)]; +} diff --git a/node_modules/sucrase/dist/esm/util/getNonTypeIdentifiers.js b/node_modules/sucrase/dist/esm/util/getNonTypeIdentifiers.js new file mode 100644 index 0000000..24c73dd --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/getNonTypeIdentifiers.js @@ -0,0 +1,43 @@ + +import {IdentifierRole} from "../parser/tokenizer"; +import {TokenType, TokenType as tt} from "../parser/tokenizer/types"; + +import {startsWithLowerCase} from "../transformers/JSXTransformer"; +import getJSXPragmaInfo from "./getJSXPragmaInfo"; + +export function getNonTypeIdentifiers(tokens, options) { + const jsxPragmaInfo = getJSXPragmaInfo(options); + const nonTypeIdentifiers = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if ( + token.type === tt.name && + !token.isType && + (token.identifierRole === IdentifierRole.Access || + token.identifierRole === IdentifierRole.ObjectShorthand || + token.identifierRole === IdentifierRole.ExportAccess) && + !token.shadowsGlobal + ) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + if (token.type === tt.jsxTagStart) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + } + if ( + token.type === tt.jsxTagStart && + i + 1 < tokens.tokens.length && + tokens.tokens[i + 1].type === tt.jsxTagEnd + ) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + nonTypeIdentifiers.add(jsxPragmaInfo.fragmentBase); + } + if (token.type === tt.jsxName && token.identifierRole === IdentifierRole.Access) { + const identifierName = tokens.identifierNameForToken(token); + // Lower-case single-component tag names like "div" don't count. + if (!startsWithLowerCase(identifierName) || tokens.tokens[i + 1].type === TokenType.dot) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + } + } + return nonTypeIdentifiers; +} diff --git a/node_modules/sucrase/dist/esm/util/getTSImportedNames.js b/node_modules/sucrase/dist/esm/util/getTSImportedNames.js new file mode 100644 index 0000000..523181a --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/getTSImportedNames.js @@ -0,0 +1,84 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + +import getImportExportSpecifierInfo from "./getImportExportSpecifierInfo"; + +/** + * Special case code to scan for imported names in ESM TypeScript. We need to do this so we can + * properly get globals so we can compute shadowed globals. + * + * This is similar to logic in CJSImportProcessor, but trimmed down to avoid logic with CJS + * replacement and flow type imports. + */ +export default function getTSImportedNames(tokens) { + const importedNames = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + if ( + tokens.matches1AtIndex(i, tt._import) && + !tokens.matches3AtIndex(i, tt._import, tt.name, tt.eq) + ) { + collectNamesForImport(tokens, i, importedNames); + } + } + return importedNames; +} + +function collectNamesForImport( + tokens, + index, + importedNames, +) { + index++; + + if (tokens.matches1AtIndex(index, tt.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (tokens.matches1AtIndex(index, tt.name)) { + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + if (tokens.matches1AtIndex(index, tt.comma)) { + index++; + } + } + + if (tokens.matches1AtIndex(index, tt.star)) { + // * as + index += 2; + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + } + + if (tokens.matches1AtIndex(index, tt.braceL)) { + index++; + collectNamesForNamedImport(tokens, index, importedNames); + } +} + +function collectNamesForNamedImport( + tokens, + index, + importedNames, +) { + while (true) { + if (tokens.matches1AtIndex(index, tt.braceR)) { + return; + } + + const specifierInfo = getImportExportSpecifierInfo(tokens, index); + index = specifierInfo.endIndex; + if (!specifierInfo.isType) { + importedNames.add(specifierInfo.rightName); + } + + if (tokens.matches2AtIndex(index, tt.comma, tt.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, tt.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, tt.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(tokens.tokens[index])}`); + } + } +} diff --git a/node_modules/sucrase/dist/esm/util/isAsyncOperation.js b/node_modules/sucrase/dist/esm/util/isAsyncOperation.js new file mode 100644 index 0000000..af40e9a --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/isAsyncOperation.js @@ -0,0 +1,38 @@ +import {ContextualKeyword} from "../parser/tokenizer/keywords"; + + +/** + * Determine whether this optional chain or nullish coalescing operation has any await statements in + * it. If so, we'll need to transpile to an async operation. + * + * We compute this by walking the length of the operation and returning true if we see an await + * keyword used as a real await (rather than an object key or property access). Nested optional + * chain/nullish operations need to be tracked but don't silence await, but a nested async function + * (or any other nested scope) will make the await not count. + */ +export default function isAsyncOperation(tokens) { + let index = tokens.currentIndex(); + let depth = 0; + const startToken = tokens.currentToken(); + do { + const token = tokens.tokens[index]; + if (token.isOptionalChainStart) { + depth++; + } + if (token.isOptionalChainEnd) { + depth--; + } + depth += token.numNullishCoalesceStarts; + depth -= token.numNullishCoalesceEnds; + + if ( + token.contextualKeyword === ContextualKeyword._await && + token.identifierRole == null && + token.scopeDepth === startToken.scopeDepth + ) { + return true; + } + index += 1; + } while (depth > 0 && index < tokens.tokens.length); + return false; +} diff --git a/node_modules/sucrase/dist/esm/util/isIdentifier.js b/node_modules/sucrase/dist/esm/util/isIdentifier.js new file mode 100644 index 0000000..4a62ff6 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/isIdentifier.js @@ -0,0 +1,81 @@ +import {IS_IDENTIFIER_CHAR, IS_IDENTIFIER_START} from "../parser/util/identifier"; + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar +// Hard-code a list of reserved words rather than trying to use keywords or contextual keywords +// from the parser, since currently there are various exceptions, like `package` being reserved +// but unused and various contextual keywords being reserved. Note that we assume that all code +// compiled by Sucrase is in a module, so strict mode words and await are all considered reserved +// here. +const RESERVED_WORDS = new Set([ + // Reserved keywords as of ECMAScript 2015 + "break", + "case", + "catch", + "class", + "const", + "continue", + "debugger", + "default", + "delete", + "do", + "else", + "export", + "extends", + "finally", + "for", + "function", + "if", + "import", + "in", + "instanceof", + "new", + "return", + "super", + "switch", + "this", + "throw", + "try", + "typeof", + "var", + "void", + "while", + "with", + "yield", + // Future reserved keywords + "enum", + "implements", + "interface", + "let", + "package", + "private", + "protected", + "public", + "static", + "await", + // Literals that cannot be used as identifiers + "false", + "null", + "true", +]); + +/** + * Determine if the given name is a legal variable name. + * + * This is needed when transforming TypeScript enums; if an enum key is a valid + * variable name, it might be referenced later in the enum, so we need to + * declare a variable. + */ +export default function isIdentifier(name) { + if (name.length === 0) { + return false; + } + if (!IS_IDENTIFIER_START[name.charCodeAt(0)]) { + return false; + } + for (let i = 1; i < name.length; i++) { + if (!IS_IDENTIFIER_CHAR[name.charCodeAt(i)]) { + return false; + } + } + return !RESERVED_WORDS.has(name); +} diff --git a/node_modules/sucrase/dist/esm/util/removeMaybeImportAssertion.js b/node_modules/sucrase/dist/esm/util/removeMaybeImportAssertion.js new file mode 100644 index 0000000..0be63a5 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/removeMaybeImportAssertion.js @@ -0,0 +1,19 @@ +import {ContextualKeyword} from "../parser/tokenizer/keywords"; +import {TokenType as tt} from "../parser/tokenizer/types"; + + +/** + * Starting at a potential `assert` token remove the import assertion if there + * is one. + */ +export function removeMaybeImportAssertion(tokens) { + if (tokens.matches2(tt.name, tt.braceL) && tokens.matchesContextual(ContextualKeyword._assert)) { + // assert + tokens.removeToken(); + // { + tokens.removeToken(); + tokens.removeBalancedCode(); + // } + tokens.removeToken(); + } +} diff --git a/node_modules/sucrase/dist/esm/util/shouldElideDefaultExport.js b/node_modules/sucrase/dist/esm/util/shouldElideDefaultExport.js new file mode 100644 index 0000000..5eaa6f0 --- /dev/null +++ b/node_modules/sucrase/dist/esm/util/shouldElideDefaultExport.js @@ -0,0 +1,37 @@ +import {TokenType as tt} from "../parser/tokenizer/types"; + + + +/** + * Common method sharing code between CJS and ESM cases, since they're the same here. + */ +export default function shouldElideDefaultExport( + isTypeScriptTransformEnabled, + tokens, + declarationInfo, +) { + if (!isTypeScriptTransformEnabled) { + return false; + } + const exportToken = tokens.currentToken(); + if (exportToken.rhsEndIndex == null) { + throw new Error("Expected non-null rhsEndIndex on export token."); + } + // The export must be of the form `export default a` or `export default a;`. + const numTokens = exportToken.rhsEndIndex - tokens.currentIndex(); + if ( + numTokens !== 3 && + !(numTokens === 4 && tokens.matches1AtIndex(exportToken.rhsEndIndex - 1, tt.semi)) + ) { + return false; + } + const identifierToken = tokens.tokenAtRelativeIndex(2); + if (identifierToken.type !== tt.name) { + return false; + } + const exportedName = tokens.identifierNameForToken(identifierToken); + return ( + declarationInfo.typeDeclarations.has(exportedName) && + !declarationInfo.valueDeclarations.has(exportedName) + ); +} diff --git a/node_modules/sucrase/dist/identifyShadowedGlobals.js b/node_modules/sucrase/dist/identifyShadowedGlobals.js new file mode 100644 index 0000000..4f90407 --- /dev/null +++ b/node_modules/sucrase/dist/identifyShadowedGlobals.js @@ -0,0 +1,97 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + + + +var _tokenizer = require('./parser/tokenizer'); + +var _types = require('./parser/tokenizer/types'); + + +/** + * Traverse the given tokens and modify them if necessary to indicate that some names shadow global + * variables. + */ + function identifyShadowedGlobals( + tokens, + scopes, + globalNames, +) { + if (!hasShadowedGlobals(tokens, globalNames)) { + return; + } + markShadowedGlobals(tokens, scopes, globalNames); +} exports.default = identifyShadowedGlobals; + +/** + * We can do a fast up-front check to see if there are any declarations to global names. If not, + * then there's no point in computing scope assignments. + */ +// Exported for testing. + function hasShadowedGlobals(tokens, globalNames) { + for (const token of tokens.tokens) { + if ( + token.type === _types.TokenType.name && + _tokenizer.isNonTopLevelDeclaration.call(void 0, token) && + globalNames.has(tokens.identifierNameForToken(token)) + ) { + return true; + } + } + return false; +} exports.hasShadowedGlobals = hasShadowedGlobals; + +function markShadowedGlobals( + tokens, + scopes, + globalNames, +) { + const scopeStack = []; + let scopeIndex = scopes.length - 1; + // Scopes were generated at completion time, so they're sorted by end index, so we can maintain a + // good stack by going backwards through them. + for (let i = tokens.tokens.length - 1; ; i--) { + while (scopeStack.length > 0 && scopeStack[scopeStack.length - 1].startTokenIndex === i + 1) { + scopeStack.pop(); + } + while (scopeIndex >= 0 && scopes[scopeIndex].endTokenIndex === i + 1) { + scopeStack.push(scopes[scopeIndex]); + scopeIndex--; + } + // Process scopes after the last iteration so we can make sure we pop all of them. + if (i < 0) { + break; + } + + const token = tokens.tokens[i]; + const name = tokens.identifierNameForToken(token); + if (scopeStack.length > 1 && token.type === _types.TokenType.name && globalNames.has(name)) { + if (_tokenizer.isBlockScopedDeclaration.call(void 0, token)) { + markShadowedForScope(scopeStack[scopeStack.length - 1], tokens, name); + } else if (_tokenizer.isFunctionScopedDeclaration.call(void 0, token)) { + let stackIndex = scopeStack.length - 1; + while (stackIndex > 0 && !scopeStack[stackIndex].isFunctionScope) { + stackIndex--; + } + if (stackIndex < 0) { + throw new Error("Did not find parent function scope."); + } + markShadowedForScope(scopeStack[stackIndex], tokens, name); + } + } + } + if (scopeStack.length > 0) { + throw new Error("Expected empty scope stack after processing file."); + } +} + +function markShadowedForScope(scope, tokens, name) { + for (let i = scope.startTokenIndex; i < scope.endTokenIndex; i++) { + const token = tokens.tokens[i]; + if ( + (token.type === _types.TokenType.name || token.type === _types.TokenType.jsxName) && + tokens.identifierNameForToken(token) === name + ) { + token.shadowsGlobal = true; + } + } +} diff --git a/node_modules/sucrase/dist/index.js b/node_modules/sucrase/dist/index.js new file mode 100644 index 0000000..97258ab --- /dev/null +++ b/node_modules/sucrase/dist/index.js @@ -0,0 +1,133 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _CJSImportProcessor = require('./CJSImportProcessor'); var _CJSImportProcessor2 = _interopRequireDefault(_CJSImportProcessor); +var _computeSourceMap = require('./computeSourceMap'); var _computeSourceMap2 = _interopRequireDefault(_computeSourceMap); +var _HelperManager = require('./HelperManager'); +var _identifyShadowedGlobals = require('./identifyShadowedGlobals'); var _identifyShadowedGlobals2 = _interopRequireDefault(_identifyShadowedGlobals); +var _NameManager = require('./NameManager'); var _NameManager2 = _interopRequireDefault(_NameManager); +var _Options = require('./Options'); +var _parser = require('./parser'); + +var _TokenProcessor = require('./TokenProcessor'); var _TokenProcessor2 = _interopRequireDefault(_TokenProcessor); +var _RootTransformer = require('./transformers/RootTransformer'); var _RootTransformer2 = _interopRequireDefault(_RootTransformer); +var _formatTokens = require('./util/formatTokens'); var _formatTokens2 = _interopRequireDefault(_formatTokens); +var _getTSImportedNames = require('./util/getTSImportedNames'); var _getTSImportedNames2 = _interopRequireDefault(_getTSImportedNames); + + + + + + + + + + + + + + + + + + + + function getVersion() { + /* istanbul ignore next */ + return "3.32.0"; +} exports.getVersion = getVersion; + + function transform(code, options) { + _Options.validateOptions.call(void 0, options); + try { + const sucraseContext = getSucraseContext(code, options); + const transformer = new (0, _RootTransformer2.default)( + sucraseContext, + options.transforms, + Boolean(options.enableLegacyBabel5ModuleInterop), + options, + ); + const transformerResult = transformer.transform(); + let result = {code: transformerResult.code}; + if (options.sourceMapOptions) { + if (!options.filePath) { + throw new Error("filePath must be specified when generating a source map."); + } + result = { + ...result, + sourceMap: _computeSourceMap2.default.call(void 0, + transformerResult, + options.filePath, + options.sourceMapOptions, + code, + sucraseContext.tokenProcessor.tokens, + ), + }; + } + return result; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e) { + if (options.filePath) { + e.message = `Error transforming ${options.filePath}: ${e.message}`; + } + throw e; + } +} exports.transform = transform; + +/** + * Return a string representation of the sucrase tokens, mostly useful for + * diagnostic purposes. + */ + function getFormattedTokens(code, options) { + const tokens = getSucraseContext(code, options).tokenProcessor.tokens; + return _formatTokens2.default.call(void 0, code, tokens); +} exports.getFormattedTokens = getFormattedTokens; + +/** + * Call into the parser/tokenizer and do some further preprocessing: + * - Come up with a set of used names so that we can assign new names. + * - Preprocess all import/export statements so we know which globals we are interested in. + * - Compute situations where any of those globals are shadowed. + * + * In the future, some of these preprocessing steps can be skipped based on what actual work is + * being done. + */ +function getSucraseContext(code, options) { + const isJSXEnabled = options.transforms.includes("jsx"); + const isTypeScriptEnabled = options.transforms.includes("typescript"); + const isFlowEnabled = options.transforms.includes("flow"); + const disableESTransforms = options.disableESTransforms === true; + const file = _parser.parse.call(void 0, code, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const tokens = file.tokens; + const scopes = file.scopes; + + const nameManager = new (0, _NameManager2.default)(code, tokens); + const helperManager = new (0, _HelperManager.HelperManager)(nameManager); + const tokenProcessor = new (0, _TokenProcessor2.default)( + code, + tokens, + isFlowEnabled, + disableESTransforms, + helperManager, + ); + const enableLegacyTypeScriptModuleInterop = Boolean(options.enableLegacyTypeScriptModuleInterop); + + let importProcessor = null; + if (options.transforms.includes("imports")) { + importProcessor = new (0, _CJSImportProcessor2.default)( + nameManager, + tokenProcessor, + enableLegacyTypeScriptModuleInterop, + options, + options.transforms.includes("typescript"), + helperManager, + ); + importProcessor.preprocessTokens(); + // We need to mark shadowed globals after processing imports so we know that the globals are, + // but before type-only import pruning, since that relies on shadowing information. + _identifyShadowedGlobals2.default.call(void 0, tokenProcessor, scopes, importProcessor.getGlobalNames()); + if (options.transforms.includes("typescript")) { + importProcessor.pruneTypeOnlyImports(); + } + } else if (options.transforms.includes("typescript")) { + _identifyShadowedGlobals2.default.call(void 0, tokenProcessor, scopes, _getTSImportedNames2.default.call(void 0, tokenProcessor)); + } + return {tokenProcessor, scopes, nameManager, importProcessor, helperManager}; +} diff --git a/node_modules/sucrase/dist/parser/index.js b/node_modules/sucrase/dist/parser/index.js new file mode 100644 index 0000000..35d832a --- /dev/null +++ b/node_modules/sucrase/dist/parser/index.js @@ -0,0 +1,31 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + +var _base = require('./traverser/base'); +var _index = require('./traverser/index'); + + class File { + + + + constructor(tokens, scopes) { + this.tokens = tokens; + this.scopes = scopes; + } +} exports.File = File; + + function parse( + input, + isJSXEnabled, + isTypeScriptEnabled, + isFlowEnabled, +) { + if (isFlowEnabled && isTypeScriptEnabled) { + throw new Error("Cannot combine flow and typescript plugins."); + } + _base.initParser.call(void 0, input, isJSXEnabled, isTypeScriptEnabled, isFlowEnabled); + const result = _index.parseFile.call(void 0, ); + if (_base.state.error) { + throw _base.augmentError.call(void 0, _base.state.error); + } + return result; +} exports.parse = parse; diff --git a/node_modules/sucrase/dist/parser/plugins/flow.js b/node_modules/sucrase/dist/parser/plugins/flow.js new file mode 100644 index 0000000..9401895 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/flow.js @@ -0,0 +1,1105 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});/* eslint max-len: 0 */ + + + + + + + + + + +var _index = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _types = require('../tokenizer/types'); +var _base = require('../traverser/base'); + + + + + + + + + + + + + +var _expression = require('../traverser/expression'); + + + + + + + + +var _statement = require('../traverser/statement'); + + + + + + + + + +var _util = require('../traverser/util'); + +function isMaybeDefaultImport(lookahead) { + return ( + (lookahead.type === _types.TokenType.name || !!(lookahead.type & _types.TokenType.IS_KEYWORD)) && + lookahead.contextualKeyword !== _keywords.ContextualKeyword._from + ); +} + +function flowParseTypeInitialiser(tok) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, tok || _types.TokenType.colon); + flowParseType(); + _index.popTypeContext.call(void 0, oldIsType); +} + +function flowParsePredicate() { + _util.expect.call(void 0, _types.TokenType.modulo); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._checks); + if (_index.eat.call(void 0, _types.TokenType.parenL)) { + _expression.parseExpression.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); + } +} + +function flowParseTypeAndPredicateInitialiser() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.colon); + if (_index.match.call(void 0, _types.TokenType.modulo)) { + flowParsePredicate(); + } else { + flowParseType(); + if (_index.match.call(void 0, _types.TokenType.modulo)) { + flowParsePredicate(); + } + } + _index.popTypeContext.call(void 0, oldIsType); +} + +function flowParseDeclareClass() { + _index.next.call(void 0, ); + flowParseInterfaceish(/* isClass */ true); +} + +function flowParseDeclareFunction() { + _index.next.call(void 0, ); + _expression.parseIdentifier.call(void 0, ); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + _util.expect.call(void 0, _types.TokenType.parenL); + flowParseFunctionTypeParams(); + _util.expect.call(void 0, _types.TokenType.parenR); + + flowParseTypeAndPredicateInitialiser(); + + _util.semicolon.call(void 0, ); +} + +function flowParseDeclare() { + if (_index.match.call(void 0, _types.TokenType._class)) { + flowParseDeclareClass(); + } else if (_index.match.call(void 0, _types.TokenType._function)) { + flowParseDeclareFunction(); + } else if (_index.match.call(void 0, _types.TokenType._var)) { + flowParseDeclareVariable(); + } else if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._module)) { + if (_index.eat.call(void 0, _types.TokenType.dot)) { + flowParseDeclareModuleExports(); + } else { + flowParseDeclareModule(); + } + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + flowParseDeclareTypeAlias(); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque)) { + flowParseDeclareOpaqueType(); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + flowParseDeclareInterface(); + } else if (_index.match.call(void 0, _types.TokenType._export)) { + flowParseDeclareExportDeclaration(); + } else { + _util.unexpected.call(void 0, ); + } +} + +function flowParseDeclareVariable() { + _index.next.call(void 0, ); + flowParseTypeAnnotatableIdentifier(); + _util.semicolon.call(void 0, ); +} + +function flowParseDeclareModule() { + if (_index.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } + + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_index.match.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (_index.match.call(void 0, _types.TokenType._import)) { + _index.next.call(void 0, ); + _statement.parseImport.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + } + _util.expect.call(void 0, _types.TokenType.braceR); +} + +function flowParseDeclareExportDeclaration() { + _util.expect.call(void 0, _types.TokenType._export); + + if (_index.eat.call(void 0, _types.TokenType._default)) { + if (_index.match.call(void 0, _types.TokenType._function) || _index.match.call(void 0, _types.TokenType._class)) { + // declare export default class ... + // declare export default function ... + flowParseDeclare(); + } else { + // declare export default [type]; + flowParseType(); + _util.semicolon.call(void 0, ); + } + } else if ( + _index.match.call(void 0, _types.TokenType._var) || // declare export var ... + _index.match.call(void 0, _types.TokenType._function) || // declare export function ... + _index.match.call(void 0, _types.TokenType._class) || // declare export class ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque) // declare export opaque .. + ) { + flowParseDeclare(); + } else if ( + _index.match.call(void 0, _types.TokenType.star) || // declare export * from '' + _index.match.call(void 0, _types.TokenType.braceL) || // declare export {} ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._interface) || // declare export interface ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._type) || // declare export type ... + _util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque) // declare export opaque type ... + ) { + _statement.parseExport.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } +} + +function flowParseDeclareModuleExports() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._exports); + flowParseTypeAnnotation(); + _util.semicolon.call(void 0, ); +} + +function flowParseDeclareTypeAlias() { + _index.next.call(void 0, ); + flowParseTypeAlias(); +} + +function flowParseDeclareOpaqueType() { + _index.next.call(void 0, ); + flowParseOpaqueType(true); +} + +function flowParseDeclareInterface() { + _index.next.call(void 0, ); + flowParseInterfaceish(); +} + +// Interfaces + +function flowParseInterfaceish(isClass = false) { + flowParseRestrictedIdentifier(); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + if (_index.eat.call(void 0, _types.TokenType._extends)) { + do { + flowParseInterfaceExtends(); + } while (!isClass && _index.eat.call(void 0, _types.TokenType.comma)); + } + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._mixins)) { + _index.next.call(void 0, ); + do { + flowParseInterfaceExtends(); + } while (_index.eat.call(void 0, _types.TokenType.comma)); + } + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._implements)) { + _index.next.call(void 0, ); + do { + flowParseInterfaceExtends(); + } while (_index.eat.call(void 0, _types.TokenType.comma)); + } + + flowParseObjectType(isClass, false, isClass); +} + +function flowParseInterfaceExtends() { + flowParseQualifiedTypeIdentifier(false); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseInterface() { + flowParseInterfaceish(); +} + +function flowParseRestrictedIdentifier() { + _expression.parseIdentifier.call(void 0, ); +} + +function flowParseTypeAlias() { + flowParseRestrictedIdentifier(); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + flowParseTypeInitialiser(_types.TokenType.eq); + _util.semicolon.call(void 0, ); +} + +function flowParseOpaqueType(declare) { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._type); + flowParseRestrictedIdentifier(); + + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + // Parse the supertype + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeInitialiser(_types.TokenType.colon); + } + + if (!declare) { + flowParseTypeInitialiser(_types.TokenType.eq); + } + _util.semicolon.call(void 0, ); +} + +function flowParseTypeParameter() { + flowParseVariance(); + flowParseTypeAnnotatableIdentifier(); + + if (_index.eat.call(void 0, _types.TokenType.eq)) { + flowParseType(); + } +} + + function flowParseTypeParameterDeclaration() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + // istanbul ignore else: this condition is already checked at all call sites + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.typeParameterStart)) { + _index.next.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + + do { + flowParseTypeParameter(); + if (!_index.match.call(void 0, _types.TokenType.greaterThan)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } while (!_index.match.call(void 0, _types.TokenType.greaterThan) && !_base.state.error); + _util.expect.call(void 0, _types.TokenType.greaterThan); + _index.popTypeContext.call(void 0, oldIsType); +} exports.flowParseTypeParameterDeclaration = flowParseTypeParameterDeclaration; + +function flowParseTypeParameterInstantiation() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.lessThan); + while (!_index.match.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + flowParseType(); + if (!_index.match.call(void 0, _types.TokenType.greaterThan)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } + _util.expect.call(void 0, _types.TokenType.greaterThan); + _index.popTypeContext.call(void 0, oldIsType); +} + +function flowParseInterfaceType() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._interface); + if (_index.eat.call(void 0, _types.TokenType._extends)) { + do { + flowParseInterfaceExtends(); + } while (_index.eat.call(void 0, _types.TokenType.comma)); + } + flowParseObjectType(false, false, false); +} + +function flowParseObjectPropertyKey() { + if (_index.match.call(void 0, _types.TokenType.num) || _index.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } +} + +function flowParseObjectTypeIndexer() { + // Note: bracketL has already been consumed + if (_index.lookaheadType.call(void 0, ) === _types.TokenType.colon) { + flowParseObjectPropertyKey(); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } + _util.expect.call(void 0, _types.TokenType.bracketR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeInternalSlot() { + // Note: both bracketL have already been consumed + flowParseObjectPropertyKey(); + _util.expect.call(void 0, _types.TokenType.bracketR); + _util.expect.call(void 0, _types.TokenType.bracketR); + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.parenL)) { + flowParseObjectTypeMethodish(); + } else { + _index.eat.call(void 0, _types.TokenType.question); + flowParseTypeInitialiser(); + } +} + +function flowParseObjectTypeMethodish() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + } + + _util.expect.call(void 0, _types.TokenType.parenL); + while (!_index.match.call(void 0, _types.TokenType.parenR) && !_index.match.call(void 0, _types.TokenType.ellipsis) && !_base.state.error) { + flowParseFunctionTypeParam(); + if (!_index.match.call(void 0, _types.TokenType.parenR)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } + + if (_index.eat.call(void 0, _types.TokenType.ellipsis)) { + flowParseFunctionTypeParam(); + } + _util.expect.call(void 0, _types.TokenType.parenR); + flowParseTypeInitialiser(); +} + +function flowParseObjectTypeCallProperty() { + flowParseObjectTypeMethodish(); +} + +function flowParseObjectType(allowStatic, allowExact, allowProto) { + let endDelim; + if (allowExact && _index.match.call(void 0, _types.TokenType.braceBarL)) { + _util.expect.call(void 0, _types.TokenType.braceBarL); + endDelim = _types.TokenType.braceBarR; + } else { + _util.expect.call(void 0, _types.TokenType.braceL); + endDelim = _types.TokenType.braceR; + } + + while (!_index.match.call(void 0, endDelim) && !_base.state.error) { + if (allowProto && _util.isContextual.call(void 0, _keywords.ContextualKeyword._proto)) { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead !== _types.TokenType.colon && lookahead !== _types.TokenType.question) { + _index.next.call(void 0, ); + allowStatic = false; + } + } + if (allowStatic && _util.isContextual.call(void 0, _keywords.ContextualKeyword._static)) { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead !== _types.TokenType.colon && lookahead !== _types.TokenType.question) { + _index.next.call(void 0, ); + } + } + + flowParseVariance(); + + if (_index.eat.call(void 0, _types.TokenType.bracketL)) { + if (_index.eat.call(void 0, _types.TokenType.bracketL)) { + flowParseObjectTypeInternalSlot(); + } else { + flowParseObjectTypeIndexer(); + } + } else if (_index.match.call(void 0, _types.TokenType.parenL) || _index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseObjectTypeCallProperty(); + } else { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._get) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._set)) { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead === _types.TokenType.name || lookahead === _types.TokenType.string || lookahead === _types.TokenType.num) { + _index.next.call(void 0, ); + } + } + + flowParseObjectTypeProperty(); + } + + flowObjectTypeSemicolon(); + } + + _util.expect.call(void 0, endDelim); +} + +function flowParseObjectTypeProperty() { + if (_index.match.call(void 0, _types.TokenType.ellipsis)) { + _util.expect.call(void 0, _types.TokenType.ellipsis); + if (!_index.eat.call(void 0, _types.TokenType.comma)) { + _index.eat.call(void 0, _types.TokenType.semi); + } + // Explicit inexact object syntax. + if (_index.match.call(void 0, _types.TokenType.braceR)) { + return; + } + flowParseType(); + } else { + flowParseObjectPropertyKey(); + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.parenL)) { + // This is a method property + flowParseObjectTypeMethodish(); + } else { + _index.eat.call(void 0, _types.TokenType.question); + flowParseTypeInitialiser(); + } + } +} + +function flowObjectTypeSemicolon() { + if (!_index.eat.call(void 0, _types.TokenType.semi) && !_index.eat.call(void 0, _types.TokenType.comma) && !_index.match.call(void 0, _types.TokenType.braceR) && !_index.match.call(void 0, _types.TokenType.braceBarR)) { + _util.unexpected.call(void 0, ); + } +} + +function flowParseQualifiedTypeIdentifier(initialIdAlreadyParsed) { + if (!initialIdAlreadyParsed) { + _expression.parseIdentifier.call(void 0, ); + } + while (_index.eat.call(void 0, _types.TokenType.dot)) { + _expression.parseIdentifier.call(void 0, ); + } +} + +function flowParseGenericType() { + flowParseQualifiedTypeIdentifier(true); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } +} + +function flowParseTypeofType() { + _util.expect.call(void 0, _types.TokenType._typeof); + flowParsePrimaryType(); +} + +function flowParseTupleType() { + _util.expect.call(void 0, _types.TokenType.bracketL); + // We allow trailing commas + while (_base.state.pos < _base.input.length && !_index.match.call(void 0, _types.TokenType.bracketR)) { + flowParseType(); + if (_index.match.call(void 0, _types.TokenType.bracketR)) { + break; + } + _util.expect.call(void 0, _types.TokenType.comma); + } + _util.expect.call(void 0, _types.TokenType.bracketR); +} + +function flowParseFunctionTypeParam() { + const lookahead = _index.lookaheadType.call(void 0, ); + if (lookahead === _types.TokenType.colon || lookahead === _types.TokenType.question) { + _expression.parseIdentifier.call(void 0, ); + _index.eat.call(void 0, _types.TokenType.question); + flowParseTypeInitialiser(); + } else { + flowParseType(); + } +} + +function flowParseFunctionTypeParams() { + while (!_index.match.call(void 0, _types.TokenType.parenR) && !_index.match.call(void 0, _types.TokenType.ellipsis) && !_base.state.error) { + flowParseFunctionTypeParam(); + if (!_index.match.call(void 0, _types.TokenType.parenR)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } + if (_index.eat.call(void 0, _types.TokenType.ellipsis)) { + flowParseFunctionTypeParam(); + } +} + +// The parsing of types roughly parallels the parsing of expressions, and +// primary types are kind of like primary expressions...they're the +// primitives with which other types are constructed. +function flowParsePrimaryType() { + let isGroupedType = false; + const oldNoAnonFunctionType = _base.state.noAnonFunctionType; + + switch (_base.state.type) { + case _types.TokenType.name: { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + flowParseInterfaceType(); + return; + } + _expression.parseIdentifier.call(void 0, ); + flowParseGenericType(); + return; + } + + case _types.TokenType.braceL: + flowParseObjectType(false, false, false); + return; + + case _types.TokenType.braceBarL: + flowParseObjectType(false, true, false); + return; + + case _types.TokenType.bracketL: + flowParseTupleType(); + return; + + case _types.TokenType.lessThan: + flowParseTypeParameterDeclaration(); + _util.expect.call(void 0, _types.TokenType.parenL); + flowParseFunctionTypeParams(); + _util.expect.call(void 0, _types.TokenType.parenR); + _util.expect.call(void 0, _types.TokenType.arrow); + flowParseType(); + return; + + case _types.TokenType.parenL: + _index.next.call(void 0, ); + + // Check to see if this is actually a grouped type + if (!_index.match.call(void 0, _types.TokenType.parenR) && !_index.match.call(void 0, _types.TokenType.ellipsis)) { + if (_index.match.call(void 0, _types.TokenType.name)) { + const token = _index.lookaheadType.call(void 0, ); + isGroupedType = token !== _types.TokenType.question && token !== _types.TokenType.colon; + } else { + isGroupedType = true; + } + } + + if (isGroupedType) { + _base.state.noAnonFunctionType = false; + flowParseType(); + _base.state.noAnonFunctionType = oldNoAnonFunctionType; + + // A `,` or a `) =>` means this is an anonymous function type + if ( + _base.state.noAnonFunctionType || + !(_index.match.call(void 0, _types.TokenType.comma) || (_index.match.call(void 0, _types.TokenType.parenR) && _index.lookaheadType.call(void 0, ) === _types.TokenType.arrow)) + ) { + _util.expect.call(void 0, _types.TokenType.parenR); + return; + } else { + // Eat a comma if there is one + _index.eat.call(void 0, _types.TokenType.comma); + } + } + + flowParseFunctionTypeParams(); + + _util.expect.call(void 0, _types.TokenType.parenR); + _util.expect.call(void 0, _types.TokenType.arrow); + flowParseType(); + return; + + case _types.TokenType.minus: + _index.next.call(void 0, ); + _expression.parseLiteral.call(void 0, ); + return; + + case _types.TokenType.string: + case _types.TokenType.num: + case _types.TokenType._true: + case _types.TokenType._false: + case _types.TokenType._null: + case _types.TokenType._this: + case _types.TokenType._void: + case _types.TokenType.star: + _index.next.call(void 0, ); + return; + + default: + if (_base.state.type === _types.TokenType._typeof) { + flowParseTypeofType(); + return; + } else if (_base.state.type & _types.TokenType.IS_KEYWORD) { + _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.name; + return; + } + } + + _util.unexpected.call(void 0, ); +} + +function flowParsePostfixType() { + flowParsePrimaryType(); + while (!_util.canInsertSemicolon.call(void 0, ) && (_index.match.call(void 0, _types.TokenType.bracketL) || _index.match.call(void 0, _types.TokenType.questionDot))) { + _index.eat.call(void 0, _types.TokenType.questionDot); + _util.expect.call(void 0, _types.TokenType.bracketL); + if (_index.eat.call(void 0, _types.TokenType.bracketR)) { + // Array type + } else { + // Indexed access type + flowParseType(); + _util.expect.call(void 0, _types.TokenType.bracketR); + } + } +} + +function flowParsePrefixType() { + if (_index.eat.call(void 0, _types.TokenType.question)) { + flowParsePrefixType(); + } else { + flowParsePostfixType(); + } +} + +function flowParseAnonFunctionWithoutParens() { + flowParsePrefixType(); + if (!_base.state.noAnonFunctionType && _index.eat.call(void 0, _types.TokenType.arrow)) { + flowParseType(); + } +} + +function flowParseIntersectionType() { + _index.eat.call(void 0, _types.TokenType.bitwiseAND); + flowParseAnonFunctionWithoutParens(); + while (_index.eat.call(void 0, _types.TokenType.bitwiseAND)) { + flowParseAnonFunctionWithoutParens(); + } +} + +function flowParseUnionType() { + _index.eat.call(void 0, _types.TokenType.bitwiseOR); + flowParseIntersectionType(); + while (_index.eat.call(void 0, _types.TokenType.bitwiseOR)) { + flowParseIntersectionType(); + } +} + +function flowParseType() { + flowParseUnionType(); +} + + function flowParseTypeAnnotation() { + flowParseTypeInitialiser(); +} exports.flowParseTypeAnnotation = flowParseTypeAnnotation; + +function flowParseTypeAnnotatableIdentifier() { + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAnnotation(); + } +} + + function flowParseVariance() { + if (_index.match.call(void 0, _types.TokenType.plus) || _index.match.call(void 0, _types.TokenType.minus)) { + _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].isType = true; + } +} exports.flowParseVariance = flowParseVariance; + +// ================================== +// Overrides +// ================================== + + function flowParseFunctionBodyAndFinish(funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAndPredicateInitialiser(); + } + + _expression.parseFunctionBody.call(void 0, false, funcContextId); +} exports.flowParseFunctionBodyAndFinish = flowParseFunctionBodyAndFinish; + + function flowParseSubscript( + startTokenIndex, + noCalls, + stopState, +) { + if (_index.match.call(void 0, _types.TokenType.questionDot) && _index.lookaheadType.call(void 0, ) === _types.TokenType.lessThan) { + if (noCalls) { + stopState.stop = true; + return; + } + _index.next.call(void 0, ); + flowParseTypeParameterInstantiation(); + _util.expect.call(void 0, _types.TokenType.parenL); + _expression.parseCallExpressionArguments.call(void 0, ); + return; + } else if (!noCalls && _index.match.call(void 0, _types.TokenType.lessThan)) { + const snapshot = _base.state.snapshot(); + flowParseTypeParameterInstantiation(); + _util.expect.call(void 0, _types.TokenType.parenL); + _expression.parseCallExpressionArguments.call(void 0, ); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return; + } + } + _expression.baseParseSubscript.call(void 0, startTokenIndex, noCalls, stopState); +} exports.flowParseSubscript = flowParseSubscript; + + function flowStartParseNewArguments() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + const snapshot = _base.state.snapshot(); + flowParseTypeParameterInstantiation(); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } + } +} exports.flowStartParseNewArguments = flowStartParseNewArguments; + +// interfaces + function flowTryParseStatement() { + if (_index.match.call(void 0, _types.TokenType.name) && _base.state.contextualKeyword === _keywords.ContextualKeyword._interface) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.next.call(void 0, ); + flowParseInterface(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._enum)) { + flowParseEnumDeclaration(); + return true; + } + return false; +} exports.flowTryParseStatement = flowTryParseStatement; + + function flowTryParseExportDefaultExpression() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._enum)) { + flowParseEnumDeclaration(); + return true; + } + return false; +} exports.flowTryParseExportDefaultExpression = flowTryParseExportDefaultExpression; + +// declares, interfaces and type aliases + function flowParseIdentifierStatement(contextualKeyword) { + if (contextualKeyword === _keywords.ContextualKeyword._declare) { + if ( + _index.match.call(void 0, _types.TokenType._class) || + _index.match.call(void 0, _types.TokenType.name) || + _index.match.call(void 0, _types.TokenType._function) || + _index.match.call(void 0, _types.TokenType._var) || + _index.match.call(void 0, _types.TokenType._export) + ) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseDeclare(); + _index.popTypeContext.call(void 0, oldIsType); + } + } else if (_index.match.call(void 0, _types.TokenType.name)) { + if (contextualKeyword === _keywords.ContextualKeyword._interface) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseInterface(); + _index.popTypeContext.call(void 0, oldIsType); + } else if (contextualKeyword === _keywords.ContextualKeyword._type) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseTypeAlias(); + _index.popTypeContext.call(void 0, oldIsType); + } else if (contextualKeyword === _keywords.ContextualKeyword._opaque) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + flowParseOpaqueType(false); + _index.popTypeContext.call(void 0, oldIsType); + } + } + _util.semicolon.call(void 0, ); +} exports.flowParseIdentifierStatement = flowParseIdentifierStatement; + +// export type + function flowShouldParseExportDeclaration() { + return ( + _util.isContextual.call(void 0, _keywords.ContextualKeyword._type) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._interface) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._enum) + ); +} exports.flowShouldParseExportDeclaration = flowShouldParseExportDeclaration; + + function flowShouldDisallowExportDefaultSpecifier() { + return ( + _index.match.call(void 0, _types.TokenType.name) && + (_base.state.contextualKeyword === _keywords.ContextualKeyword._type || + _base.state.contextualKeyword === _keywords.ContextualKeyword._interface || + _base.state.contextualKeyword === _keywords.ContextualKeyword._opaque || + _base.state.contextualKeyword === _keywords.ContextualKeyword._enum) + ); +} exports.flowShouldDisallowExportDefaultSpecifier = flowShouldDisallowExportDefaultSpecifier; + + function flowParseExportDeclaration() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + + if (_index.match.call(void 0, _types.TokenType.braceL)) { + // export type { foo, bar }; + _statement.parseExportSpecifiers.call(void 0, ); + _statement.parseExportFrom.call(void 0, ); + } else { + // export type Foo = Bar; + flowParseTypeAlias(); + } + _index.popTypeContext.call(void 0, oldIsType); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._opaque)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + // export opaque type Foo = Bar; + flowParseOpaqueType(false); + _index.popTypeContext.call(void 0, oldIsType); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + flowParseInterface(); + _index.popTypeContext.call(void 0, oldIsType); + } else { + _statement.parseStatement.call(void 0, true); + } +} exports.flowParseExportDeclaration = flowParseExportDeclaration; + + function flowShouldParseExportStar() { + return _index.match.call(void 0, _types.TokenType.star) || (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type) && _index.lookaheadType.call(void 0, ) === _types.TokenType.star); +} exports.flowShouldParseExportStar = flowShouldParseExportStar; + + function flowParseExportStar() { + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const oldIsType = _index.pushTypeContext.call(void 0, 2); + _statement.baseParseExportStar.call(void 0, ); + _index.popTypeContext.call(void 0, oldIsType); + } else { + _statement.baseParseExportStar.call(void 0, ); + } +} exports.flowParseExportStar = flowParseExportStar; + +// parse a the super class type parameters and implements + function flowAfterParseClassSuper(hasSuper) { + if (hasSuper && _index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._implements)) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._implements; + do { + flowParseRestrictedIdentifier(); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterInstantiation(); + } + } while (_index.eat.call(void 0, _types.TokenType.comma)); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.flowAfterParseClassSuper = flowAfterParseClassSuper; + +// parse type parameters for object method shorthand + function flowStartParseObjPropValue() { + // method shorthand + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + flowParseTypeParameterDeclaration(); + if (!_index.match.call(void 0, _types.TokenType.parenL)) _util.unexpected.call(void 0, ); + } +} exports.flowStartParseObjPropValue = flowStartParseObjPropValue; + + function flowParseAssignableListItemTypes() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.eat.call(void 0, _types.TokenType.question); + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAnnotation(); + } + _index.popTypeContext.call(void 0, oldIsType); +} exports.flowParseAssignableListItemTypes = flowParseAssignableListItemTypes; + +// parse typeof and type imports + function flowStartParseImportSpecifiers() { + if (_index.match.call(void 0, _types.TokenType._typeof) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const lh = _index.lookaheadTypeAndKeyword.call(void 0, ); + if (isMaybeDefaultImport(lh) || lh.type === _types.TokenType.braceL || lh.type === _types.TokenType.star) { + _index.next.call(void 0, ); + } + } +} exports.flowStartParseImportSpecifiers = flowStartParseImportSpecifiers; + +// parse import-type/typeof shorthand + function flowParseImportSpecifier() { + const isTypeKeyword = + _base.state.contextualKeyword === _keywords.ContextualKeyword._type || _base.state.type === _types.TokenType._typeof; + if (isTypeKeyword) { + _index.next.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._as) && !_util.isLookaheadContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _expression.parseIdentifier.call(void 0, ); + if (isTypeKeyword && !_index.match.call(void 0, _types.TokenType.name) && !(_base.state.type & _types.TokenType.IS_KEYWORD)) { + // `import {type as ,` or `import {type as }` + } else { + // `import {type as foo` + _expression.parseIdentifier.call(void 0, ); + } + } else { + if (isTypeKeyword && (_index.match.call(void 0, _types.TokenType.name) || !!(_base.state.type & _types.TokenType.IS_KEYWORD))) { + // `import {type foo` + _expression.parseIdentifier.call(void 0, ); + } + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _expression.parseIdentifier.call(void 0, ); + } + } +} exports.flowParseImportSpecifier = flowParseImportSpecifier; + +// parse function type parameters - function foo() {} + function flowStartParseFunctionParams() { + // Originally this checked if the method is a getter/setter, but if it was, we'd crash soon + // anyway, so don't try to propagate that information. + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + flowParseTypeParameterDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.flowStartParseFunctionParams = flowStartParseFunctionParams; + +// parse flow type annotations on variable declarator heads - let foo: string = bar + function flowAfterParseVarHead() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + flowParseTypeAnnotation(); + } +} exports.flowAfterParseVarHead = flowAfterParseVarHead; + +// parse the return type of an async arrow function - let foo = (async (): number => {}); + function flowStartParseAsyncArrowFromCallExpression() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + const oldNoAnonFunctionType = _base.state.noAnonFunctionType; + _base.state.noAnonFunctionType = true; + flowParseTypeAnnotation(); + _base.state.noAnonFunctionType = oldNoAnonFunctionType; + } +} exports.flowStartParseAsyncArrowFromCallExpression = flowStartParseAsyncArrowFromCallExpression; + +// We need to support type parameter declarations for arrow functions. This +// is tricky. There are three situations we need to handle +// +// 1. This is either JSX or an arrow function. We'll try JSX first. If that +// fails, we'll try an arrow function. If that fails, we'll throw the JSX +// error. +// 2. This is an arrow function. We'll parse the type parameter declaration, +// parse the rest, make sure the rest is an arrow function, and go from +// there +// 3. This is neither. Just call the super method + function flowParseMaybeAssign(noIn, isWithinParens) { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + const snapshot = _base.state.snapshot(); + let wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + _base.state.type = _types.TokenType.typeParameterStart; + } else { + return wasArrow; + } + + const oldIsType = _index.pushTypeContext.call(void 0, 0); + flowParseTypeParameterDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (wasArrow) { + return true; + } + _util.unexpected.call(void 0, ); + } + + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); +} exports.flowParseMaybeAssign = flowParseMaybeAssign; + +// handle return types for arrow functions + function flowParseArrow() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + const snapshot = _base.state.snapshot(); + + const oldNoAnonFunctionType = _base.state.noAnonFunctionType; + _base.state.noAnonFunctionType = true; + flowParseTypeAndPredicateInitialiser(); + _base.state.noAnonFunctionType = oldNoAnonFunctionType; + + if (_util.canInsertSemicolon.call(void 0, )) _util.unexpected.call(void 0, ); + if (!_index.match.call(void 0, _types.TokenType.arrow)) _util.unexpected.call(void 0, ); + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } + _index.popTypeContext.call(void 0, oldIsType); + } + return _index.eat.call(void 0, _types.TokenType.arrow); +} exports.flowParseArrow = flowParseArrow; + + function flowParseSubscripts(startTokenIndex, noCalls = false) { + if ( + _base.state.tokens[_base.state.tokens.length - 1].contextualKeyword === _keywords.ContextualKeyword._async && + _index.match.call(void 0, _types.TokenType.lessThan) + ) { + const snapshot = _base.state.snapshot(); + const wasArrow = parseAsyncArrowWithTypeParameters(); + if (wasArrow && !_base.state.error) { + return; + } + _base.state.restoreFromSnapshot(snapshot); + } + + _expression.baseParseSubscripts.call(void 0, startTokenIndex, noCalls); +} exports.flowParseSubscripts = flowParseSubscripts; + +// Returns true if there was an arrow function here. +function parseAsyncArrowWithTypeParameters() { + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + _statement.parseFunctionParams.call(void 0, ); + if (!_expression.parseArrow.call(void 0, )) { + return false; + } + _expression.parseArrowExpression.call(void 0, startTokenIndex); + return true; +} + +function flowParseEnumDeclaration() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._enum); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._enum; + _expression.parseIdentifier.call(void 0, ); + flowParseEnumBody(); +} + +function flowParseEnumBody() { + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._of)) { + _index.next.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.braceL); + flowParseEnumMembers(); + _util.expect.call(void 0, _types.TokenType.braceR); +} + +function flowParseEnumMembers() { + while (!_index.match.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (_index.eat.call(void 0, _types.TokenType.ellipsis)) { + break; + } + flowParseEnumMember(); + if (!_index.match.call(void 0, _types.TokenType.braceR)) { + _util.expect.call(void 0, _types.TokenType.comma); + } + } +} + +function flowParseEnumMember() { + _expression.parseIdentifier.call(void 0, ); + if (_index.eat.call(void 0, _types.TokenType.eq)) { + // Flow enum values are always just one token (a string, number, or boolean literal). + _index.next.call(void 0, ); + } +} diff --git a/node_modules/sucrase/dist/parser/plugins/jsx/index.js b/node_modules/sucrase/dist/parser/plugins/jsx/index.js new file mode 100644 index 0000000..41797de --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/jsx/index.js @@ -0,0 +1,367 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + + + + + + + + + +var _index = require('../../tokenizer/index'); +var _types = require('../../tokenizer/types'); +var _base = require('../../traverser/base'); +var _expression = require('../../traverser/expression'); +var _util = require('../../traverser/util'); +var _charcodes = require('../../util/charcodes'); +var _identifier = require('../../util/identifier'); +var _typescript = require('../typescript'); + +/** + * Read token with JSX contents. + * + * In addition to detecting jsxTagStart and also regular tokens that might be + * part of an expression, this code detects the start and end of text ranges + * within JSX children. In order to properly count the number of children, we + * distinguish jsxText from jsxEmptyText, which is a text range that simplifies + * to the empty string after JSX whitespace trimming. + * + * It turns out that a JSX text range will simplify to the empty string if and + * only if both of these conditions hold: + * - The range consists entirely of whitespace characters (only counting space, + * tab, \r, and \n). + * - The range has at least one newline. + * This can be proven by analyzing any implementation of whitespace trimming, + * e.g. formatJSXTextLiteral in Sucrase or cleanJSXElementLiteralChild in Babel. + */ +function jsxReadToken() { + let sawNewline = false; + let sawNonWhitespace = false; + while (true) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated JSX contents"); + return; + } + + const ch = _base.input.charCodeAt(_base.state.pos); + if (ch === _charcodes.charCodes.lessThan || ch === _charcodes.charCodes.leftCurlyBrace) { + if (_base.state.pos === _base.state.start) { + if (ch === _charcodes.charCodes.lessThan) { + _base.state.pos++; + _index.finishToken.call(void 0, _types.TokenType.jsxTagStart); + return; + } + _index.getTokenFromCode.call(void 0, ch); + return; + } + if (sawNewline && !sawNonWhitespace) { + _index.finishToken.call(void 0, _types.TokenType.jsxEmptyText); + } else { + _index.finishToken.call(void 0, _types.TokenType.jsxText); + } + return; + } + + // This is part of JSX text. + if (ch === _charcodes.charCodes.lineFeed) { + sawNewline = true; + } else if (ch !== _charcodes.charCodes.space && ch !== _charcodes.charCodes.carriageReturn && ch !== _charcodes.charCodes.tab) { + sawNonWhitespace = true; + } + _base.state.pos++; + } +} + +function jsxReadString(quote) { + _base.state.pos++; + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated string constant"); + return; + } + + const ch = _base.input.charCodeAt(_base.state.pos); + if (ch === quote) { + _base.state.pos++; + break; + } + _base.state.pos++; + } + _index.finishToken.call(void 0, _types.TokenType.string); +} + +// Read a JSX identifier (valid tag or attribute name). +// +// Optimized version since JSX identifiers can't contain +// escape characters and so can be read as single slice. +// Also assumes that first character was already checked +// by isIdentifierStart in readToken. + +function jsxReadWord() { + let ch; + do { + if (_base.state.pos > _base.input.length) { + _util.unexpected.call(void 0, "Unexpectedly reached the end of input."); + return; + } + ch = _base.input.charCodeAt(++_base.state.pos); + } while (_identifier.IS_IDENTIFIER_CHAR[ch] || ch === _charcodes.charCodes.dash); + _index.finishToken.call(void 0, _types.TokenType.jsxName); +} + +// Parse next token as JSX identifier +function jsxParseIdentifier() { + nextJSXTagToken(); +} + +// Parse namespaced identifier. +function jsxParseNamespacedName(identifierRole) { + jsxParseIdentifier(); + if (!_index.eat.call(void 0, _types.TokenType.colon)) { + // Plain identifier, so this is an access. + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = identifierRole; + return; + } + // Process the second half of the namespaced name. + jsxParseIdentifier(); +} + +// Parses element name in any form - namespaced, member +// or single identifier. +function jsxParseElementName() { + const firstTokenIndex = _base.state.tokens.length; + jsxParseNamespacedName(_index.IdentifierRole.Access); + let hadDot = false; + while (_index.match.call(void 0, _types.TokenType.dot)) { + hadDot = true; + nextJSXTagToken(); + jsxParseIdentifier(); + } + // For tags like
with a lowercase letter and no dots, the name is + // actually *not* an identifier access, since it's referring to a built-in + // tag name. Remove the identifier role in this case so that it's not + // accidentally transformed by the imports transform when preserving JSX. + if (!hadDot) { + const firstToken = _base.state.tokens[firstTokenIndex]; + const firstChar = _base.input.charCodeAt(firstToken.start); + if (firstChar >= _charcodes.charCodes.lowercaseA && firstChar <= _charcodes.charCodes.lowercaseZ) { + firstToken.identifierRole = null; + } + } +} + +// Parses any type of JSX attribute value. +function jsxParseAttributeValue() { + switch (_base.state.type) { + case _types.TokenType.braceL: + _index.next.call(void 0, ); + _expression.parseExpression.call(void 0, ); + nextJSXTagToken(); + return; + + case _types.TokenType.jsxTagStart: + jsxParseElement(); + nextJSXTagToken(); + return; + + case _types.TokenType.string: + nextJSXTagToken(); + return; + + default: + _util.unexpected.call(void 0, "JSX value should be either an expression or a quoted JSX text"); + } +} + +// Parse JSX spread child, after already processing the { +// Does not parse the closing } +function jsxParseSpreadChild() { + _util.expect.call(void 0, _types.TokenType.ellipsis); + _expression.parseExpression.call(void 0, ); +} + +// Parses JSX opening tag starting after "<". +// Returns true if the tag was self-closing. +// Does not parse the last token. +function jsxParseOpeningElement(initialTokenIndex) { + if (_index.match.call(void 0, _types.TokenType.jsxTagEnd)) { + // This is an open-fragment. + return false; + } + jsxParseElementName(); + if (_base.isTypeScriptEnabled) { + _typescript.tsTryParseJSXTypeArgument.call(void 0, ); + } + let hasSeenPropSpread = false; + while (!_index.match.call(void 0, _types.TokenType.slash) && !_index.match.call(void 0, _types.TokenType.jsxTagEnd) && !_base.state.error) { + if (_index.eat.call(void 0, _types.TokenType.braceL)) { + hasSeenPropSpread = true; + _util.expect.call(void 0, _types.TokenType.ellipsis); + _expression.parseMaybeAssign.call(void 0, ); + // } + nextJSXTagToken(); + continue; + } + if ( + hasSeenPropSpread && + _base.state.end - _base.state.start === 3 && + _base.input.charCodeAt(_base.state.start) === _charcodes.charCodes.lowercaseK && + _base.input.charCodeAt(_base.state.start + 1) === _charcodes.charCodes.lowercaseE && + _base.input.charCodeAt(_base.state.start + 2) === _charcodes.charCodes.lowercaseY + ) { + _base.state.tokens[initialTokenIndex].jsxRole = _index.JSXRole.KeyAfterPropSpread; + } + jsxParseNamespacedName(_index.IdentifierRole.ObjectKey); + if (_index.match.call(void 0, _types.TokenType.eq)) { + nextJSXTagToken(); + jsxParseAttributeValue(); + } + } + const isSelfClosing = _index.match.call(void 0, _types.TokenType.slash); + if (isSelfClosing) { + // / + nextJSXTagToken(); + } + return isSelfClosing; +} + +// Parses JSX closing tag starting after " 1) { + _base.state.tokens[initialTokenIndex].jsxRole = _index.JSXRole.StaticChildren; + } + } + return; + } + numExplicitChildren++; + jsxParseElementAt(); + nextJSXExprToken(); + break; + + case _types.TokenType.jsxText: + numExplicitChildren++; + nextJSXExprToken(); + break; + + case _types.TokenType.jsxEmptyText: + nextJSXExprToken(); + break; + + case _types.TokenType.braceL: + _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.ellipsis)) { + jsxParseSpreadChild(); + nextJSXExprToken(); + // Spread children are a mechanism to explicitly mark children as + // static, so count it as 2 children to satisfy the "more than one + // child" condition. + numExplicitChildren += 2; + } else { + // If we see {}, this is an empty pseudo-expression that doesn't + // count as a child. + if (!_index.match.call(void 0, _types.TokenType.braceR)) { + numExplicitChildren++; + _expression.parseExpression.call(void 0, ); + } + nextJSXExprToken(); + } + + break; + + // istanbul ignore next - should never happen + default: + _util.unexpected.call(void 0, ); + return; + } + } + } +} + +// Parses entire JSX element from current position. +// Does not parse the last token. + function jsxParseElement() { + nextJSXTagToken(); + jsxParseElementAt(); +} exports.jsxParseElement = jsxParseElement; + +// ================================== +// Overrides +// ================================== + + function nextJSXTagToken() { + _base.state.tokens.push(new (0, _index.Token)()); + _index.skipSpace.call(void 0, ); + _base.state.start = _base.state.pos; + const code = _base.input.charCodeAt(_base.state.pos); + + if (_identifier.IS_IDENTIFIER_START[code]) { + jsxReadWord(); + } else if (code === _charcodes.charCodes.quotationMark || code === _charcodes.charCodes.apostrophe) { + jsxReadString(code); + } else { + // The following tokens are just one character each. + ++_base.state.pos; + switch (code) { + case _charcodes.charCodes.greaterThan: + _index.finishToken.call(void 0, _types.TokenType.jsxTagEnd); + break; + case _charcodes.charCodes.lessThan: + _index.finishToken.call(void 0, _types.TokenType.jsxTagStart); + break; + case _charcodes.charCodes.slash: + _index.finishToken.call(void 0, _types.TokenType.slash); + break; + case _charcodes.charCodes.equalsTo: + _index.finishToken.call(void 0, _types.TokenType.eq); + break; + case _charcodes.charCodes.leftCurlyBrace: + _index.finishToken.call(void 0, _types.TokenType.braceL); + break; + case _charcodes.charCodes.dot: + _index.finishToken.call(void 0, _types.TokenType.dot); + break; + case _charcodes.charCodes.colon: + _index.finishToken.call(void 0, _types.TokenType.colon); + break; + default: + _util.unexpected.call(void 0, ); + } + } +} exports.nextJSXTagToken = nextJSXTagToken; + +function nextJSXExprToken() { + _base.state.tokens.push(new (0, _index.Token)()); + _base.state.start = _base.state.pos; + jsxReadToken(); +} diff --git a/node_modules/sucrase/dist/parser/plugins/jsx/xhtml.js b/node_modules/sucrase/dist/parser/plugins/jsx/xhtml.js new file mode 100644 index 0000000..d8e91c5 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/jsx/xhtml.js @@ -0,0 +1,256 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});// Use a Map rather than object to avoid unexpected __proto__ access. +exports. default = new Map([ + ["quot", "\u0022"], + ["amp", "&"], + ["apos", "\u0027"], + ["lt", "<"], + ["gt", ">"], + ["nbsp", "\u00A0"], + ["iexcl", "\u00A1"], + ["cent", "\u00A2"], + ["pound", "\u00A3"], + ["curren", "\u00A4"], + ["yen", "\u00A5"], + ["brvbar", "\u00A6"], + ["sect", "\u00A7"], + ["uml", "\u00A8"], + ["copy", "\u00A9"], + ["ordf", "\u00AA"], + ["laquo", "\u00AB"], + ["not", "\u00AC"], + ["shy", "\u00AD"], + ["reg", "\u00AE"], + ["macr", "\u00AF"], + ["deg", "\u00B0"], + ["plusmn", "\u00B1"], + ["sup2", "\u00B2"], + ["sup3", "\u00B3"], + ["acute", "\u00B4"], + ["micro", "\u00B5"], + ["para", "\u00B6"], + ["middot", "\u00B7"], + ["cedil", "\u00B8"], + ["sup1", "\u00B9"], + ["ordm", "\u00BA"], + ["raquo", "\u00BB"], + ["frac14", "\u00BC"], + ["frac12", "\u00BD"], + ["frac34", "\u00BE"], + ["iquest", "\u00BF"], + ["Agrave", "\u00C0"], + ["Aacute", "\u00C1"], + ["Acirc", "\u00C2"], + ["Atilde", "\u00C3"], + ["Auml", "\u00C4"], + ["Aring", "\u00C5"], + ["AElig", "\u00C6"], + ["Ccedil", "\u00C7"], + ["Egrave", "\u00C8"], + ["Eacute", "\u00C9"], + ["Ecirc", "\u00CA"], + ["Euml", "\u00CB"], + ["Igrave", "\u00CC"], + ["Iacute", "\u00CD"], + ["Icirc", "\u00CE"], + ["Iuml", "\u00CF"], + ["ETH", "\u00D0"], + ["Ntilde", "\u00D1"], + ["Ograve", "\u00D2"], + ["Oacute", "\u00D3"], + ["Ocirc", "\u00D4"], + ["Otilde", "\u00D5"], + ["Ouml", "\u00D6"], + ["times", "\u00D7"], + ["Oslash", "\u00D8"], + ["Ugrave", "\u00D9"], + ["Uacute", "\u00DA"], + ["Ucirc", "\u00DB"], + ["Uuml", "\u00DC"], + ["Yacute", "\u00DD"], + ["THORN", "\u00DE"], + ["szlig", "\u00DF"], + ["agrave", "\u00E0"], + ["aacute", "\u00E1"], + ["acirc", "\u00E2"], + ["atilde", "\u00E3"], + ["auml", "\u00E4"], + ["aring", "\u00E5"], + ["aelig", "\u00E6"], + ["ccedil", "\u00E7"], + ["egrave", "\u00E8"], + ["eacute", "\u00E9"], + ["ecirc", "\u00EA"], + ["euml", "\u00EB"], + ["igrave", "\u00EC"], + ["iacute", "\u00ED"], + ["icirc", "\u00EE"], + ["iuml", "\u00EF"], + ["eth", "\u00F0"], + ["ntilde", "\u00F1"], + ["ograve", "\u00F2"], + ["oacute", "\u00F3"], + ["ocirc", "\u00F4"], + ["otilde", "\u00F5"], + ["ouml", "\u00F6"], + ["divide", "\u00F7"], + ["oslash", "\u00F8"], + ["ugrave", "\u00F9"], + ["uacute", "\u00FA"], + ["ucirc", "\u00FB"], + ["uuml", "\u00FC"], + ["yacute", "\u00FD"], + ["thorn", "\u00FE"], + ["yuml", "\u00FF"], + ["OElig", "\u0152"], + ["oelig", "\u0153"], + ["Scaron", "\u0160"], + ["scaron", "\u0161"], + ["Yuml", "\u0178"], + ["fnof", "\u0192"], + ["circ", "\u02C6"], + ["tilde", "\u02DC"], + ["Alpha", "\u0391"], + ["Beta", "\u0392"], + ["Gamma", "\u0393"], + ["Delta", "\u0394"], + ["Epsilon", "\u0395"], + ["Zeta", "\u0396"], + ["Eta", "\u0397"], + ["Theta", "\u0398"], + ["Iota", "\u0399"], + ["Kappa", "\u039A"], + ["Lambda", "\u039B"], + ["Mu", "\u039C"], + ["Nu", "\u039D"], + ["Xi", "\u039E"], + ["Omicron", "\u039F"], + ["Pi", "\u03A0"], + ["Rho", "\u03A1"], + ["Sigma", "\u03A3"], + ["Tau", "\u03A4"], + ["Upsilon", "\u03A5"], + ["Phi", "\u03A6"], + ["Chi", "\u03A7"], + ["Psi", "\u03A8"], + ["Omega", "\u03A9"], + ["alpha", "\u03B1"], + ["beta", "\u03B2"], + ["gamma", "\u03B3"], + ["delta", "\u03B4"], + ["epsilon", "\u03B5"], + ["zeta", "\u03B6"], + ["eta", "\u03B7"], + ["theta", "\u03B8"], + ["iota", "\u03B9"], + ["kappa", "\u03BA"], + ["lambda", "\u03BB"], + ["mu", "\u03BC"], + ["nu", "\u03BD"], + ["xi", "\u03BE"], + ["omicron", "\u03BF"], + ["pi", "\u03C0"], + ["rho", "\u03C1"], + ["sigmaf", "\u03C2"], + ["sigma", "\u03C3"], + ["tau", "\u03C4"], + ["upsilon", "\u03C5"], + ["phi", "\u03C6"], + ["chi", "\u03C7"], + ["psi", "\u03C8"], + ["omega", "\u03C9"], + ["thetasym", "\u03D1"], + ["upsih", "\u03D2"], + ["piv", "\u03D6"], + ["ensp", "\u2002"], + ["emsp", "\u2003"], + ["thinsp", "\u2009"], + ["zwnj", "\u200C"], + ["zwj", "\u200D"], + ["lrm", "\u200E"], + ["rlm", "\u200F"], + ["ndash", "\u2013"], + ["mdash", "\u2014"], + ["lsquo", "\u2018"], + ["rsquo", "\u2019"], + ["sbquo", "\u201A"], + ["ldquo", "\u201C"], + ["rdquo", "\u201D"], + ["bdquo", "\u201E"], + ["dagger", "\u2020"], + ["Dagger", "\u2021"], + ["bull", "\u2022"], + ["hellip", "\u2026"], + ["permil", "\u2030"], + ["prime", "\u2032"], + ["Prime", "\u2033"], + ["lsaquo", "\u2039"], + ["rsaquo", "\u203A"], + ["oline", "\u203E"], + ["frasl", "\u2044"], + ["euro", "\u20AC"], + ["image", "\u2111"], + ["weierp", "\u2118"], + ["real", "\u211C"], + ["trade", "\u2122"], + ["alefsym", "\u2135"], + ["larr", "\u2190"], + ["uarr", "\u2191"], + ["rarr", "\u2192"], + ["darr", "\u2193"], + ["harr", "\u2194"], + ["crarr", "\u21B5"], + ["lArr", "\u21D0"], + ["uArr", "\u21D1"], + ["rArr", "\u21D2"], + ["dArr", "\u21D3"], + ["hArr", "\u21D4"], + ["forall", "\u2200"], + ["part", "\u2202"], + ["exist", "\u2203"], + ["empty", "\u2205"], + ["nabla", "\u2207"], + ["isin", "\u2208"], + ["notin", "\u2209"], + ["ni", "\u220B"], + ["prod", "\u220F"], + ["sum", "\u2211"], + ["minus", "\u2212"], + ["lowast", "\u2217"], + ["radic", "\u221A"], + ["prop", "\u221D"], + ["infin", "\u221E"], + ["ang", "\u2220"], + ["and", "\u2227"], + ["or", "\u2228"], + ["cap", "\u2229"], + ["cup", "\u222A"], + ["int", "\u222B"], + ["there4", "\u2234"], + ["sim", "\u223C"], + ["cong", "\u2245"], + ["asymp", "\u2248"], + ["ne", "\u2260"], + ["equiv", "\u2261"], + ["le", "\u2264"], + ["ge", "\u2265"], + ["sub", "\u2282"], + ["sup", "\u2283"], + ["nsub", "\u2284"], + ["sube", "\u2286"], + ["supe", "\u2287"], + ["oplus", "\u2295"], + ["otimes", "\u2297"], + ["perp", "\u22A5"], + ["sdot", "\u22C5"], + ["lceil", "\u2308"], + ["rceil", "\u2309"], + ["lfloor", "\u230A"], + ["rfloor", "\u230B"], + ["lang", "\u2329"], + ["rang", "\u232A"], + ["loz", "\u25CA"], + ["spades", "\u2660"], + ["clubs", "\u2663"], + ["hearts", "\u2665"], + ["diams", "\u2666"], +]); diff --git a/node_modules/sucrase/dist/parser/plugins/types.js b/node_modules/sucrase/dist/parser/plugins/types.js new file mode 100644 index 0000000..c892994 --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/types.js @@ -0,0 +1,37 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _index = require('../tokenizer/index'); +var _types = require('../tokenizer/types'); +var _base = require('../traverser/base'); +var _expression = require('../traverser/expression'); +var _flow = require('./flow'); +var _typescript = require('./typescript'); + +/** + * Common parser code for TypeScript and Flow. + */ + +// An apparent conditional expression could actually be an optional parameter in an arrow function. + function typedParseConditional(noIn) { + // If we see ?:, this can't possibly be a valid conditional. typedParseParenItem will be called + // later to finish off the arrow parameter. We also need to handle bare ? tokens for optional + // parameters without type annotations, i.e. ?, and ?) . + if (_index.match.call(void 0, _types.TokenType.question)) { + const nextType = _index.lookaheadType.call(void 0, ); + if (nextType === _types.TokenType.colon || nextType === _types.TokenType.comma || nextType === _types.TokenType.parenR) { + return; + } + } + _expression.baseParseConditional.call(void 0, noIn); +} exports.typedParseConditional = typedParseConditional; + +// Note: These "type casts" are *not* valid TS expressions. +// But we parse them here and change them when completing the arrow function. + function typedParseParenItem() { + _index.eatTypeToken.call(void 0, _types.TokenType.question); + if (_index.match.call(void 0, _types.TokenType.colon)) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseTypeAnnotation.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowParseTypeAnnotation.call(void 0, ); + } + } +} exports.typedParseParenItem = typedParseParenItem; diff --git a/node_modules/sucrase/dist/parser/plugins/typescript.js b/node_modules/sucrase/dist/parser/plugins/typescript.js new file mode 100644 index 0000000..6e5907a --- /dev/null +++ b/node_modules/sucrase/dist/parser/plugins/typescript.js @@ -0,0 +1,1616 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + + + + + + + + + + +var _index = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _types = require('../tokenizer/types'); +var _base = require('../traverser/base'); + + + + + + + + + + + + + + + +var _expression = require('../traverser/expression'); +var _lval = require('../traverser/lval'); + + + + + + + + +var _statement = require('../traverser/statement'); + + + + + + + + + + + +var _util = require('../traverser/util'); +var _jsx = require('./jsx'); + +function tsIsIdentifier() { + // TODO: actually a bit more complex in TypeScript, but shouldn't matter. + // See https://github.com/Microsoft/TypeScript/issues/15008 + return _index.match.call(void 0, _types.TokenType.name); +} + +function isLiteralPropertyName() { + return ( + _index.match.call(void 0, _types.TokenType.name) || + Boolean(_base.state.type & _types.TokenType.IS_KEYWORD) || + _index.match.call(void 0, _types.TokenType.string) || + _index.match.call(void 0, _types.TokenType.num) || + _index.match.call(void 0, _types.TokenType.bigint) || + _index.match.call(void 0, _types.TokenType.decimal) + ); +} + +function tsNextTokenCanFollowModifier() { + // Note: TypeScript's implementation is much more complicated because + // more things are considered modifiers there. + // This implementation only handles modifiers not handled by babylon itself. And "static". + // TODO: Would be nice to avoid lookahead. Want a hasLineBreakUpNext() method... + const snapshot = _base.state.snapshot(); + + _index.next.call(void 0, ); + const canFollowModifier = + (_index.match.call(void 0, _types.TokenType.bracketL) || + _index.match.call(void 0, _types.TokenType.braceL) || + _index.match.call(void 0, _types.TokenType.star) || + _index.match.call(void 0, _types.TokenType.ellipsis) || + _index.match.call(void 0, _types.TokenType.hash) || + isLiteralPropertyName()) && + !_util.hasPrecedingLineBreak.call(void 0, ); + + if (canFollowModifier) { + return true; + } else { + _base.state.restoreFromSnapshot(snapshot); + return false; + } +} + + function tsParseModifiers(allowedModifiers) { + while (true) { + const modifier = tsParseModifier(allowedModifiers); + if (modifier === null) { + break; + } + } +} exports.tsParseModifiers = tsParseModifiers; + +/** Parses a modifier matching one the given modifier names. */ + function tsParseModifier( + allowedModifiers, +) { + if (!_index.match.call(void 0, _types.TokenType.name)) { + return null; + } + + const modifier = _base.state.contextualKeyword; + if (allowedModifiers.indexOf(modifier) !== -1 && tsNextTokenCanFollowModifier()) { + switch (modifier) { + case _keywords.ContextualKeyword._readonly: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._readonly; + break; + case _keywords.ContextualKeyword._abstract: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._abstract; + break; + case _keywords.ContextualKeyword._static: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._static; + break; + case _keywords.ContextualKeyword._public: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._public; + break; + case _keywords.ContextualKeyword._private: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._private; + break; + case _keywords.ContextualKeyword._protected: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._protected; + break; + case _keywords.ContextualKeyword._override: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._override; + break; + case _keywords.ContextualKeyword._declare: + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._declare; + break; + default: + break; + } + return modifier; + } + return null; +} exports.tsParseModifier = tsParseModifier; + +function tsParseEntityName() { + _expression.parseIdentifier.call(void 0, ); + while (_index.eat.call(void 0, _types.TokenType.dot)) { + _expression.parseIdentifier.call(void 0, ); + } +} + +function tsParseTypeReference() { + tsParseEntityName(); + if (!_util.hasPrecedingLineBreak.call(void 0, ) && _index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseThisTypePredicate() { + _index.next.call(void 0, ); + tsParseTypeAnnotation(); +} + +function tsParseThisTypeNode() { + _index.next.call(void 0, ); +} + +function tsParseTypeQuery() { + _util.expect.call(void 0, _types.TokenType._typeof); + if (_index.match.call(void 0, _types.TokenType._import)) { + tsParseImportType(); + } else { + tsParseEntityName(); + } + if (!_util.hasPrecedingLineBreak.call(void 0, ) && _index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseImportType() { + _util.expect.call(void 0, _types.TokenType._import); + _util.expect.call(void 0, _types.TokenType.parenL); + _util.expect.call(void 0, _types.TokenType.string); + _util.expect.call(void 0, _types.TokenType.parenR); + if (_index.eat.call(void 0, _types.TokenType.dot)) { + tsParseEntityName(); + } + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseTypeParameter() { + _index.eat.call(void 0, _types.TokenType._const); + const hadIn = _index.eat.call(void 0, _types.TokenType._in); + const hadOut = _util.eatContextual.call(void 0, _keywords.ContextualKeyword._out); + _index.eat.call(void 0, _types.TokenType._const); + if ((hadIn || hadOut) && !_index.match.call(void 0, _types.TokenType.name)) { + // The "in" or "out" keyword must have actually been the type parameter + // name, so set it as the name. + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.name; + } else { + _expression.parseIdentifier.call(void 0, ); + } + + if (_index.eat.call(void 0, _types.TokenType._extends)) { + tsParseType(); + } + if (_index.eat.call(void 0, _types.TokenType.eq)) { + tsParseType(); + } +} + + function tsTryParseTypeParameters() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeParameters(); + } +} exports.tsTryParseTypeParameters = tsTryParseTypeParameters; + +function tsParseTypeParameters() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.typeParameterStart)) { + _index.next.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + + while (!_index.eat.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + tsParseTypeParameter(); + _index.eat.call(void 0, _types.TokenType.comma); + } + _index.popTypeContext.call(void 0, oldIsType); +} + +// Note: In TypeScript implementation we must provide `yieldContext` and `awaitContext`, +// but here it's always false, because this is only used for types. +function tsFillSignature(returnToken) { + // Arrow fns *must* have return token (`=>`). Normal functions can omit it. + const returnTokenRequired = returnToken === _types.TokenType.arrow; + tsTryParseTypeParameters(); + _util.expect.call(void 0, _types.TokenType.parenL); + // Create a scope even though we're doing type parsing so we don't accidentally + // treat params as top-level bindings. + _base.state.scopeDepth++; + tsParseBindingListForSignature(false /* isBlockScope */); + _base.state.scopeDepth--; + if (returnTokenRequired) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } else if (_index.match.call(void 0, returnToken)) { + tsParseTypeOrTypePredicateAnnotation(returnToken); + } +} + +function tsParseBindingListForSignature(isBlockScope) { + _lval.parseBindingList.call(void 0, _types.TokenType.parenR, isBlockScope); +} + +function tsParseTypeMemberSemicolon() { + if (!_index.eat.call(void 0, _types.TokenType.comma)) { + _util.semicolon.call(void 0, ); + } +} + +function tsParseSignatureMember() { + tsFillSignature(_types.TokenType.colon); + tsParseTypeMemberSemicolon(); +} + +function tsIsUnambiguouslyIndexSignature() { + const snapshot = _base.state.snapshot(); + _index.next.call(void 0, ); // Skip '{' + const isIndexSignature = _index.eat.call(void 0, _types.TokenType.name) && _index.match.call(void 0, _types.TokenType.colon); + _base.state.restoreFromSnapshot(snapshot); + return isIndexSignature; +} + +function tsTryParseIndexSignature() { + if (!(_index.match.call(void 0, _types.TokenType.bracketL) && tsIsUnambiguouslyIndexSignature())) { + return false; + } + + const oldIsType = _index.pushTypeContext.call(void 0, 0); + + _util.expect.call(void 0, _types.TokenType.bracketL); + _expression.parseIdentifier.call(void 0, ); + tsParseTypeAnnotation(); + _util.expect.call(void 0, _types.TokenType.bracketR); + + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + + _index.popTypeContext.call(void 0, oldIsType); + return true; +} + +function tsParsePropertyOrMethodSignature(isReadonly) { + _index.eat.call(void 0, _types.TokenType.question); + + if (!isReadonly && (_index.match.call(void 0, _types.TokenType.parenL) || _index.match.call(void 0, _types.TokenType.lessThan))) { + tsFillSignature(_types.TokenType.colon); + tsParseTypeMemberSemicolon(); + } else { + tsTryParseTypeAnnotation(); + tsParseTypeMemberSemicolon(); + } +} + +function tsParseTypeMember() { + if (_index.match.call(void 0, _types.TokenType.parenL) || _index.match.call(void 0, _types.TokenType.lessThan)) { + // call signature + tsParseSignatureMember(); + return; + } + if (_index.match.call(void 0, _types.TokenType._new)) { + _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.parenL) || _index.match.call(void 0, _types.TokenType.lessThan)) { + // constructor signature + tsParseSignatureMember(); + } else { + tsParsePropertyOrMethodSignature(false); + } + return; + } + const readonly = !!tsParseModifier([_keywords.ContextualKeyword._readonly]); + + const found = tsTryParseIndexSignature(); + if (found) { + return; + } + if ( + (_util.isContextual.call(void 0, _keywords.ContextualKeyword._get) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._set)) && + tsNextTokenCanFollowModifier() + ) { + // This is a getter/setter on a type. The tsNextTokenCanFollowModifier + // function already called next() for us, so continue parsing the name. + } + _expression.parsePropertyName.call(void 0, -1 /* Types don't need context IDs. */); + tsParsePropertyOrMethodSignature(readonly); +} + +function tsParseTypeLiteral() { + tsParseObjectTypeMembers(); +} + +function tsParseObjectTypeMembers() { + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_index.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + tsParseTypeMember(); + } +} + +function tsLookaheadIsStartOfMappedType() { + const snapshot = _base.state.snapshot(); + const isStartOfMappedType = tsIsStartOfMappedType(); + _base.state.restoreFromSnapshot(snapshot); + return isStartOfMappedType; +} + +function tsIsStartOfMappedType() { + _index.next.call(void 0, ); + if (_index.eat.call(void 0, _types.TokenType.plus) || _index.eat.call(void 0, _types.TokenType.minus)) { + return _util.isContextual.call(void 0, _keywords.ContextualKeyword._readonly); + } + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._readonly)) { + _index.next.call(void 0, ); + } + if (!_index.match.call(void 0, _types.TokenType.bracketL)) { + return false; + } + _index.next.call(void 0, ); + if (!tsIsIdentifier()) { + return false; + } + _index.next.call(void 0, ); + return _index.match.call(void 0, _types.TokenType._in); +} + +function tsParseMappedTypeParameter() { + _expression.parseIdentifier.call(void 0, ); + _util.expect.call(void 0, _types.TokenType._in); + tsParseType(); +} + +function tsParseMappedType() { + _util.expect.call(void 0, _types.TokenType.braceL); + if (_index.match.call(void 0, _types.TokenType.plus) || _index.match.call(void 0, _types.TokenType.minus)) { + _index.next.call(void 0, ); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._readonly); + } else { + _util.eatContextual.call(void 0, _keywords.ContextualKeyword._readonly); + } + _util.expect.call(void 0, _types.TokenType.bracketL); + tsParseMappedTypeParameter(); + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as)) { + tsParseType(); + } + _util.expect.call(void 0, _types.TokenType.bracketR); + if (_index.match.call(void 0, _types.TokenType.plus) || _index.match.call(void 0, _types.TokenType.minus)) { + _index.next.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.question); + } else { + _index.eat.call(void 0, _types.TokenType.question); + } + tsTryParseType(); + _util.semicolon.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.braceR); +} + +function tsParseTupleType() { + _util.expect.call(void 0, _types.TokenType.bracketL); + while (!_index.eat.call(void 0, _types.TokenType.bracketR) && !_base.state.error) { + // Do not validate presence of either none or only labeled elements + tsParseTupleElementType(); + _index.eat.call(void 0, _types.TokenType.comma); + } +} + +function tsParseTupleElementType() { + // parses `...TsType[]` + if (_index.eat.call(void 0, _types.TokenType.ellipsis)) { + tsParseType(); + } else { + // parses `TsType?` + tsParseType(); + _index.eat.call(void 0, _types.TokenType.question); + } + + // The type we parsed above was actually a label + if (_index.eat.call(void 0, _types.TokenType.colon)) { + // Labeled tuple types must affix the label with `...` or `?`, so no need to handle those here + tsParseType(); + } +} + +function tsParseParenthesizedType() { + _util.expect.call(void 0, _types.TokenType.parenL); + tsParseType(); + _util.expect.call(void 0, _types.TokenType.parenR); +} + +function tsParseTemplateLiteralType() { + // Finish `, read quasi + _index.nextTemplateToken.call(void 0, ); + // Finish quasi, read ${ + _index.nextTemplateToken.call(void 0, ); + while (!_index.match.call(void 0, _types.TokenType.backQuote) && !_base.state.error) { + _util.expect.call(void 0, _types.TokenType.dollarBraceL); + tsParseType(); + // Finish }, read quasi + _index.nextTemplateToken.call(void 0, ); + // Finish quasi, read either ${ or ` + _index.nextTemplateToken.call(void 0, ); + } + _index.next.call(void 0, ); +} + +var FunctionType; (function (FunctionType) { + const TSFunctionType = 0; FunctionType[FunctionType["TSFunctionType"] = TSFunctionType] = "TSFunctionType"; + const TSConstructorType = TSFunctionType + 1; FunctionType[FunctionType["TSConstructorType"] = TSConstructorType] = "TSConstructorType"; + const TSAbstractConstructorType = TSConstructorType + 1; FunctionType[FunctionType["TSAbstractConstructorType"] = TSAbstractConstructorType] = "TSAbstractConstructorType"; +})(FunctionType || (FunctionType = {})); + +function tsParseFunctionOrConstructorType(type) { + if (type === FunctionType.TSAbstractConstructorType) { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._abstract); + } + if (type === FunctionType.TSConstructorType || type === FunctionType.TSAbstractConstructorType) { + _util.expect.call(void 0, _types.TokenType._new); + } + const oldInDisallowConditionalTypesContext = _base.state.inDisallowConditionalTypesContext; + _base.state.inDisallowConditionalTypesContext = false; + tsFillSignature(_types.TokenType.arrow); + _base.state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; +} + +function tsParseNonArrayType() { + switch (_base.state.type) { + case _types.TokenType.name: + tsParseTypeReference(); + return; + case _types.TokenType._void: + case _types.TokenType._null: + _index.next.call(void 0, ); + return; + case _types.TokenType.string: + case _types.TokenType.num: + case _types.TokenType.bigint: + case _types.TokenType.decimal: + case _types.TokenType._true: + case _types.TokenType._false: + _expression.parseLiteral.call(void 0, ); + return; + case _types.TokenType.minus: + _index.next.call(void 0, ); + _expression.parseLiteral.call(void 0, ); + return; + case _types.TokenType._this: { + tsParseThisTypeNode(); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._is) && !_util.hasPrecedingLineBreak.call(void 0, )) { + tsParseThisTypePredicate(); + } + return; + } + case _types.TokenType._typeof: + tsParseTypeQuery(); + return; + case _types.TokenType._import: + tsParseImportType(); + return; + case _types.TokenType.braceL: + if (tsLookaheadIsStartOfMappedType()) { + tsParseMappedType(); + } else { + tsParseTypeLiteral(); + } + return; + case _types.TokenType.bracketL: + tsParseTupleType(); + return; + case _types.TokenType.parenL: + tsParseParenthesizedType(); + return; + case _types.TokenType.backQuote: + tsParseTemplateLiteralType(); + return; + default: + if (_base.state.type & _types.TokenType.IS_KEYWORD) { + _index.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.name; + return; + } + break; + } + + _util.unexpected.call(void 0, ); +} + +function tsParseArrayTypeOrHigher() { + tsParseNonArrayType(); + while (!_util.hasPrecedingLineBreak.call(void 0, ) && _index.eat.call(void 0, _types.TokenType.bracketL)) { + if (!_index.eat.call(void 0, _types.TokenType.bracketR)) { + // If we hit ] immediately, this is an array type, otherwise it's an indexed access type. + tsParseType(); + _util.expect.call(void 0, _types.TokenType.bracketR); + } + } +} + +function tsParseInferType() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._infer); + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType._extends)) { + // Infer type constraints introduce an ambiguity about whether the "extends" + // is a constraint for this infer type or is another conditional type. + const snapshot = _base.state.snapshot(); + _util.expect.call(void 0, _types.TokenType._extends); + const oldInDisallowConditionalTypesContext = _base.state.inDisallowConditionalTypesContext; + _base.state.inDisallowConditionalTypesContext = true; + tsParseType(); + _base.state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; + if (_base.state.error || (!_base.state.inDisallowConditionalTypesContext && _index.match.call(void 0, _types.TokenType.question))) { + _base.state.restoreFromSnapshot(snapshot); + } + } +} + +function tsParseTypeOperatorOrHigher() { + if ( + _util.isContextual.call(void 0, _keywords.ContextualKeyword._keyof) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._unique) || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._readonly) + ) { + _index.next.call(void 0, ); + tsParseTypeOperatorOrHigher(); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._infer)) { + tsParseInferType(); + } else { + const oldInDisallowConditionalTypesContext = _base.state.inDisallowConditionalTypesContext; + _base.state.inDisallowConditionalTypesContext = false; + tsParseArrayTypeOrHigher(); + _base.state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; + } +} + +function tsParseIntersectionTypeOrHigher() { + _index.eat.call(void 0, _types.TokenType.bitwiseAND); + tsParseTypeOperatorOrHigher(); + if (_index.match.call(void 0, _types.TokenType.bitwiseAND)) { + while (_index.eat.call(void 0, _types.TokenType.bitwiseAND)) { + tsParseTypeOperatorOrHigher(); + } + } +} + +function tsParseUnionTypeOrHigher() { + _index.eat.call(void 0, _types.TokenType.bitwiseOR); + tsParseIntersectionTypeOrHigher(); + if (_index.match.call(void 0, _types.TokenType.bitwiseOR)) { + while (_index.eat.call(void 0, _types.TokenType.bitwiseOR)) { + tsParseIntersectionTypeOrHigher(); + } + } +} + +function tsIsStartOfFunctionType() { + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + return true; + } + return _index.match.call(void 0, _types.TokenType.parenL) && tsLookaheadIsUnambiguouslyStartOfFunctionType(); +} + +function tsSkipParameterStart() { + if (_index.match.call(void 0, _types.TokenType.name) || _index.match.call(void 0, _types.TokenType._this)) { + _index.next.call(void 0, ); + return true; + } + // If this is a possible array/object destructure, walk to the matching bracket/brace. + // The next token after will tell us definitively whether this is a function param. + if (_index.match.call(void 0, _types.TokenType.braceL) || _index.match.call(void 0, _types.TokenType.bracketL)) { + let depth = 1; + _index.next.call(void 0, ); + while (depth > 0 && !_base.state.error) { + if (_index.match.call(void 0, _types.TokenType.braceL) || _index.match.call(void 0, _types.TokenType.bracketL)) { + depth++; + } else if (_index.match.call(void 0, _types.TokenType.braceR) || _index.match.call(void 0, _types.TokenType.bracketR)) { + depth--; + } + _index.next.call(void 0, ); + } + return true; + } + return false; +} + +function tsLookaheadIsUnambiguouslyStartOfFunctionType() { + const snapshot = _base.state.snapshot(); + const isUnambiguouslyStartOfFunctionType = tsIsUnambiguouslyStartOfFunctionType(); + _base.state.restoreFromSnapshot(snapshot); + return isUnambiguouslyStartOfFunctionType; +} + +function tsIsUnambiguouslyStartOfFunctionType() { + _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.parenR) || _index.match.call(void 0, _types.TokenType.ellipsis)) { + // ( ) + // ( ... + return true; + } + if (tsSkipParameterStart()) { + if (_index.match.call(void 0, _types.TokenType.colon) || _index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.question) || _index.match.call(void 0, _types.TokenType.eq)) { + // ( xxx : + // ( xxx , + // ( xxx ? + // ( xxx = + return true; + } + if (_index.match.call(void 0, _types.TokenType.parenR)) { + _index.next.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.arrow)) { + // ( xxx ) => + return true; + } + } + } + return false; +} + +function tsParseTypeOrTypePredicateAnnotation(returnToken) { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, returnToken); + const finishedReturn = tsParseTypePredicateOrAssertsPrefix(); + if (!finishedReturn) { + tsParseType(); + } + _index.popTypeContext.call(void 0, oldIsType); +} + +function tsTryParseTypeOrTypePredicateAnnotation() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeOrTypePredicateAnnotation(_types.TokenType.colon); + } +} + + function tsTryParseTypeAnnotation() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeAnnotation(); + } +} exports.tsTryParseTypeAnnotation = tsTryParseTypeAnnotation; + +function tsTryParseType() { + if (_index.eat.call(void 0, _types.TokenType.colon)) { + tsParseType(); + } +} + +/** + * Detect a few special return syntax cases: `x is T`, `asserts x`, `asserts x is T`, + * `asserts this is T`. + * + * Returns true if we parsed the return type, false if there's still a type to be parsed. + */ +function tsParseTypePredicateOrAssertsPrefix() { + const snapshot = _base.state.snapshot(); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._asserts)) { + // Normally this is `asserts x is T`, but at this point, it might be `asserts is T` (a user- + // defined type guard on the `asserts` variable) or just a type called `asserts`. + _index.next.call(void 0, ); + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._is)) { + // If we see `asserts is`, then this must be of the form `asserts is T`, since + // `asserts is is T` isn't valid. + tsParseType(); + return true; + } else if (tsIsIdentifier() || _index.match.call(void 0, _types.TokenType._this)) { + _index.next.call(void 0, ); + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._is)) { + // If we see `is`, then this is `asserts x is T`. Otherwise, it's `asserts x`. + tsParseType(); + } + return true; + } else { + // Regular type, so bail out and start type parsing from scratch. + _base.state.restoreFromSnapshot(snapshot); + return false; + } + } else if (tsIsIdentifier() || _index.match.call(void 0, _types.TokenType._this)) { + // This is a regular identifier, which may or may not have "is" after it. + _index.next.call(void 0, ); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._is) && !_util.hasPrecedingLineBreak.call(void 0, )) { + _index.next.call(void 0, ); + tsParseType(); + return true; + } else { + // Regular type, so bail out and start type parsing from scratch. + _base.state.restoreFromSnapshot(snapshot); + return false; + } + } + return false; +} + + function tsParseTypeAnnotation() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.colon); + tsParseType(); + _index.popTypeContext.call(void 0, oldIsType); +} exports.tsParseTypeAnnotation = tsParseTypeAnnotation; + + function tsParseType() { + tsParseNonConditionalType(); + if (_base.state.inDisallowConditionalTypesContext || _util.hasPrecedingLineBreak.call(void 0, ) || !_index.eat.call(void 0, _types.TokenType._extends)) { + return; + } + // extends type + const oldInDisallowConditionalTypesContext = _base.state.inDisallowConditionalTypesContext; + _base.state.inDisallowConditionalTypesContext = true; + tsParseNonConditionalType(); + _base.state.inDisallowConditionalTypesContext = oldInDisallowConditionalTypesContext; + + _util.expect.call(void 0, _types.TokenType.question); + // true type + tsParseType(); + _util.expect.call(void 0, _types.TokenType.colon); + // false type + tsParseType(); +} exports.tsParseType = tsParseType; + +function isAbstractConstructorSignature() { + return _util.isContextual.call(void 0, _keywords.ContextualKeyword._abstract) && _index.lookaheadType.call(void 0, ) === _types.TokenType._new; +} + + function tsParseNonConditionalType() { + if (tsIsStartOfFunctionType()) { + tsParseFunctionOrConstructorType(FunctionType.TSFunctionType); + return; + } + if (_index.match.call(void 0, _types.TokenType._new)) { + // As in `new () => Date` + tsParseFunctionOrConstructorType(FunctionType.TSConstructorType); + return; + } else if (isAbstractConstructorSignature()) { + // As in `abstract new () => Date` + tsParseFunctionOrConstructorType(FunctionType.TSAbstractConstructorType); + return; + } + tsParseUnionTypeOrHigher(); +} exports.tsParseNonConditionalType = tsParseNonConditionalType; + + function tsParseTypeAssertion() { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + tsParseType(); + _util.expect.call(void 0, _types.TokenType.greaterThan); + _index.popTypeContext.call(void 0, oldIsType); + _expression.parseMaybeUnary.call(void 0, ); +} exports.tsParseTypeAssertion = tsParseTypeAssertion; + + function tsTryParseJSXTypeArgument() { + if (_index.eat.call(void 0, _types.TokenType.jsxTagStart)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.typeParameterStart; + const oldIsType = _index.pushTypeContext.call(void 0, 1); + while (!_index.match.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + tsParseType(); + _index.eat.call(void 0, _types.TokenType.comma); + } + // Process >, but the one after needs to be parsed JSX-style. + _jsx.nextJSXTagToken.call(void 0, ); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.tsTryParseJSXTypeArgument = tsTryParseJSXTypeArgument; + +function tsParseHeritageClause() { + while (!_index.match.call(void 0, _types.TokenType.braceL) && !_base.state.error) { + tsParseExpressionWithTypeArguments(); + _index.eat.call(void 0, _types.TokenType.comma); + } +} + +function tsParseExpressionWithTypeArguments() { + // Note: TS uses parseLeftHandSideExpressionOrHigher, + // then has grammar errors later if it's not an EntityName. + tsParseEntityName(); + if (_index.match.call(void 0, _types.TokenType.lessThan)) { + tsParseTypeArguments(); + } +} + +function tsParseInterfaceDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + tsTryParseTypeParameters(); + if (_index.eat.call(void 0, _types.TokenType._extends)) { + tsParseHeritageClause(); + } + tsParseObjectTypeMembers(); +} + +function tsParseTypeAliasDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + tsTryParseTypeParameters(); + _util.expect.call(void 0, _types.TokenType.eq); + tsParseType(); + _util.semicolon.call(void 0, ); +} + +function tsParseEnumMember() { + // Computed property names are grammar errors in an enum, so accept just string literal or identifier. + if (_index.match.call(void 0, _types.TokenType.string)) { + _expression.parseLiteral.call(void 0, ); + } else { + _expression.parseIdentifier.call(void 0, ); + } + if (_index.eat.call(void 0, _types.TokenType.eq)) { + const eqIndex = _base.state.tokens.length - 1; + _expression.parseMaybeAssign.call(void 0, ); + _base.state.tokens[eqIndex].rhsEndIndex = _base.state.tokens.length; + } +} + +function tsParseEnumDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_index.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + tsParseEnumMember(); + _index.eat.call(void 0, _types.TokenType.comma); + } +} + +function tsParseModuleBlock() { + _util.expect.call(void 0, _types.TokenType.braceL); + _statement.parseBlockBody.call(void 0, /* end */ _types.TokenType.braceR); +} + +function tsParseModuleOrNamespaceDeclaration() { + _lval.parseBindingIdentifier.call(void 0, false); + if (_index.eat.call(void 0, _types.TokenType.dot)) { + tsParseModuleOrNamespaceDeclaration(); + } else { + tsParseModuleBlock(); + } +} + +function tsParseAmbientExternalModuleDeclaration() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._global)) { + _expression.parseIdentifier.call(void 0, ); + } else if (_index.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + _util.unexpected.call(void 0, ); + } + + if (_index.match.call(void 0, _types.TokenType.braceL)) { + tsParseModuleBlock(); + } else { + _util.semicolon.call(void 0, ); + } +} + + function tsParseImportEqualsDeclaration() { + _lval.parseImportedIdentifier.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.eq); + tsParseModuleReference(); + _util.semicolon.call(void 0, ); +} exports.tsParseImportEqualsDeclaration = tsParseImportEqualsDeclaration; + +function tsIsExternalModuleReference() { + return _util.isContextual.call(void 0, _keywords.ContextualKeyword._require) && _index.lookaheadType.call(void 0, ) === _types.TokenType.parenL; +} + +function tsParseModuleReference() { + if (tsIsExternalModuleReference()) { + tsParseExternalModuleReference(); + } else { + tsParseEntityName(); + } +} + +function tsParseExternalModuleReference() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._require); + _util.expect.call(void 0, _types.TokenType.parenL); + if (!_index.match.call(void 0, _types.TokenType.string)) { + _util.unexpected.call(void 0, ); + } + _expression.parseLiteral.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); +} + +// Utilities + +// Returns true if a statement matched. +function tsTryParseDeclare() { + if (_util.isLineTerminator.call(void 0, )) { + return false; + } + switch (_base.state.type) { + case _types.TokenType._function: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _index.next.call(void 0, ); + // We don't need to precisely get the function start here, since it's only used to mark + // the function as a type if it's bodiless, and it's already a type here. + const functionStart = _base.state.start; + _statement.parseFunction.call(void 0, functionStart, /* isStatement */ true); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + case _types.TokenType._class: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _statement.parseClass.call(void 0, /* isStatement */ true, /* optionalId */ false); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + case _types.TokenType._const: { + if (_index.match.call(void 0, _types.TokenType._const) && _util.isLookaheadContextual.call(void 0, _keywords.ContextualKeyword._enum)) { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + // `const enum = 0;` not allowed because "enum" is a strict mode reserved word. + _util.expect.call(void 0, _types.TokenType._const); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._enum); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._enum; + tsParseEnumDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + } + // falls through + case _types.TokenType._var: + case _types.TokenType._let: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + _statement.parseVarStatement.call(void 0, _base.state.type !== _types.TokenType._var); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + case _types.TokenType.name: { + const oldIsType = _index.pushTypeContext.call(void 0, 1); + const contextualKeyword = _base.state.contextualKeyword; + let matched = false; + if (contextualKeyword === _keywords.ContextualKeyword._global) { + tsParseAmbientExternalModuleDeclaration(); + matched = true; + } else { + matched = tsParseDeclaration(contextualKeyword, /* isBeforeToken */ true); + } + _index.popTypeContext.call(void 0, oldIsType); + return matched; + } + default: + return false; + } +} + +// Note: this won't be called unless the keyword is allowed in `shouldParseExportDeclaration`. +// Returns true if it matched a declaration. +function tsTryParseExportDeclaration() { + return tsParseDeclaration(_base.state.contextualKeyword, /* isBeforeToken */ true); +} + +// Returns true if it matched a statement. +function tsParseExpressionStatement(contextualKeyword) { + switch (contextualKeyword) { + case _keywords.ContextualKeyword._declare: { + const declareTokenIndex = _base.state.tokens.length - 1; + const matched = tsTryParseDeclare(); + if (matched) { + _base.state.tokens[declareTokenIndex].type = _types.TokenType._declare; + return true; + } + break; + } + case _keywords.ContextualKeyword._global: + // `global { }` (with no `declare`) may appear inside an ambient module declaration. + // Would like to use tsParseAmbientExternalModuleDeclaration here, but already ran past "global". + if (_index.match.call(void 0, _types.TokenType.braceL)) { + tsParseModuleBlock(); + return true; + } + break; + + default: + return tsParseDeclaration(contextualKeyword, /* isBeforeToken */ false); + } + return false; +} + +/** + * Common code for parsing a declaration. + * + * isBeforeToken indicates that the current parser state is at the contextual + * keyword (and that it is not yet emitted) rather than reading the token after + * it. When isBeforeToken is true, we may be preceded by an `export` token and + * should include that token in a type context we create, e.g. to handle + * `export interface` or `export type`. (This is a bit of a hack and should be + * cleaned up at some point.) + * + * Returns true if it matched a declaration. + */ +function tsParseDeclaration(contextualKeyword, isBeforeToken) { + switch (contextualKeyword) { + case _keywords.ContextualKeyword._abstract: + if (tsCheckLineTerminator(isBeforeToken) && _index.match.call(void 0, _types.TokenType._class)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._abstract; + _statement.parseClass.call(void 0, /* isStatement */ true, /* optionalId */ false); + return true; + } + break; + + case _keywords.ContextualKeyword._enum: + if (tsCheckLineTerminator(isBeforeToken) && _index.match.call(void 0, _types.TokenType.name)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._enum; + tsParseEnumDeclaration(); + return true; + } + break; + + case _keywords.ContextualKeyword._interface: + if (tsCheckLineTerminator(isBeforeToken) && _index.match.call(void 0, _types.TokenType.name)) { + // `next` is true in "export" and "declare" contexts, so we want to remove that token + // as well. + const oldIsType = _index.pushTypeContext.call(void 0, isBeforeToken ? 2 : 1); + tsParseInterfaceDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + break; + + case _keywords.ContextualKeyword._module: + if (tsCheckLineTerminator(isBeforeToken)) { + if (_index.match.call(void 0, _types.TokenType.string)) { + const oldIsType = _index.pushTypeContext.call(void 0, isBeforeToken ? 2 : 1); + tsParseAmbientExternalModuleDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } else if (_index.match.call(void 0, _types.TokenType.name)) { + const oldIsType = _index.pushTypeContext.call(void 0, isBeforeToken ? 2 : 1); + tsParseModuleOrNamespaceDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + } + break; + + case _keywords.ContextualKeyword._namespace: + if (tsCheckLineTerminator(isBeforeToken) && _index.match.call(void 0, _types.TokenType.name)) { + const oldIsType = _index.pushTypeContext.call(void 0, isBeforeToken ? 2 : 1); + tsParseModuleOrNamespaceDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + break; + + case _keywords.ContextualKeyword._type: + if (tsCheckLineTerminator(isBeforeToken) && _index.match.call(void 0, _types.TokenType.name)) { + const oldIsType = _index.pushTypeContext.call(void 0, isBeforeToken ? 2 : 1); + tsParseTypeAliasDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + break; + + default: + break; + } + return false; +} + +function tsCheckLineTerminator(isBeforeToken) { + if (isBeforeToken) { + // Babel checks hasFollowingLineBreak here and returns false, but this + // doesn't actually come up, e.g. `export interface` can never be on its own + // line in valid code. + _index.next.call(void 0, ); + return true; + } else { + return !_util.isLineTerminator.call(void 0, ); + } +} + +// Returns true if there was a generic async arrow function. +function tsTryParseGenericAsyncArrowFunction() { + const snapshot = _base.state.snapshot(); + + tsParseTypeParameters(); + _statement.parseFunctionParams.call(void 0, ); + tsTryParseTypeOrTypePredicateAnnotation(); + _util.expect.call(void 0, _types.TokenType.arrow); + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + return false; + } + + _expression.parseFunctionBody.call(void 0, true); + return true; +} + +/** + * If necessary, hack the tokenizer state so that this bitshift was actually a + * less-than token, then keep parsing. This should only be used in situations + * where we restore from snapshot on error (which reverts this change) or + * where bitshift would be illegal anyway (e.g. in a class "extends" clause). + * + * This hack is useful to handle situations like foo<() => void>() where + * there can legitimately be two open-angle-brackets in a row in TS. + */ +function tsParseTypeArgumentsWithPossibleBitshift() { + if (_base.state.type === _types.TokenType.bitShiftL) { + _base.state.pos -= 1; + _index.finishToken.call(void 0, _types.TokenType.lessThan); + } + tsParseTypeArguments(); +} + +function tsParseTypeArguments() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _util.expect.call(void 0, _types.TokenType.lessThan); + while (!_index.eat.call(void 0, _types.TokenType.greaterThan) && !_base.state.error) { + tsParseType(); + _index.eat.call(void 0, _types.TokenType.comma); + } + _index.popTypeContext.call(void 0, oldIsType); +} + + function tsIsDeclarationStart() { + if (_index.match.call(void 0, _types.TokenType.name)) { + switch (_base.state.contextualKeyword) { + case _keywords.ContextualKeyword._abstract: + case _keywords.ContextualKeyword._declare: + case _keywords.ContextualKeyword._enum: + case _keywords.ContextualKeyword._interface: + case _keywords.ContextualKeyword._module: + case _keywords.ContextualKeyword._namespace: + case _keywords.ContextualKeyword._type: + return true; + default: + break; + } + } + + return false; +} exports.tsIsDeclarationStart = tsIsDeclarationStart; + +// ====================================================== +// OVERRIDES +// ====================================================== + + function tsParseFunctionBodyAndFinish(functionStart, funcContextId) { + // For arrow functions, `parseArrow` handles the return type itself. + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeOrTypePredicateAnnotation(_types.TokenType.colon); + } + + // The original code checked the node type to make sure this function type allows a missing + // body, but we skip that to avoid sending around the node type. We instead just use the + // allowExpressionBody boolean to make sure it's not an arrow function. + if (!_index.match.call(void 0, _types.TokenType.braceL) && _util.isLineTerminator.call(void 0, )) { + // Retroactively mark the function declaration as a type. + let i = _base.state.tokens.length - 1; + while ( + i >= 0 && + (_base.state.tokens[i].start >= functionStart || + _base.state.tokens[i].type === _types.TokenType._default || + _base.state.tokens[i].type === _types.TokenType._export) + ) { + _base.state.tokens[i].isType = true; + i--; + } + return; + } + + _expression.parseFunctionBody.call(void 0, false, funcContextId); +} exports.tsParseFunctionBodyAndFinish = tsParseFunctionBodyAndFinish; + + function tsParseSubscript( + startTokenIndex, + noCalls, + stopState, +) { + if (!_util.hasPrecedingLineBreak.call(void 0, ) && _index.eat.call(void 0, _types.TokenType.bang)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType.nonNullAssertion; + return; + } + + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.bitShiftL)) { + // There are number of things we are going to "maybe" parse, like type arguments on + // tagged template expressions. If any of them fail, walk it back and continue. + const snapshot = _base.state.snapshot(); + + if (!noCalls && _expression.atPossibleAsync.call(void 0, )) { + // Almost certainly this is a generic async function `async () => ... + // But it might be a call with a type argument `async();` + const asyncArrowFn = tsTryParseGenericAsyncArrowFunction(); + if (asyncArrowFn) { + return; + } + } + tsParseTypeArgumentsWithPossibleBitshift(); + if (!noCalls && _index.eat.call(void 0, _types.TokenType.parenL)) { + // With f(), the subscriptStartIndex marker is on the ( token. + _base.state.tokens[_base.state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + _expression.parseCallExpressionArguments.call(void 0, ); + } else if (_index.match.call(void 0, _types.TokenType.backQuote)) { + // Tagged template with a type argument. + _expression.parseTemplate.call(void 0, ); + } else if ( + // The remaining possible case is an instantiation expression, e.g. + // Array . Check for a few cases that would disqualify it and + // cause us to bail out. + // a>c is not (a)>c, but a<(b>>c) + _base.state.type === _types.TokenType.greaterThan || + // ac is (ac + (_base.state.type !== _types.TokenType.parenL && + Boolean(_base.state.type & _types.TokenType.IS_EXPRESSION_START) && + !_util.hasPrecedingLineBreak.call(void 0, )) + ) { + // Bail out. We have something like ac, which is not an expression with + // type arguments but an (a < b) > c comparison. + _util.unexpected.call(void 0, ); + } + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return; + } + } else if (!noCalls && _index.match.call(void 0, _types.TokenType.questionDot) && _index.lookaheadType.call(void 0, ) === _types.TokenType.lessThan) { + // If we see f?.<, then this must be an optional call with a type argument. + _index.next.call(void 0, ); + _base.state.tokens[startTokenIndex].isOptionalChainStart = true; + // With f?.(), the subscriptStartIndex marker is on the ?. token. + _base.state.tokens[_base.state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + + tsParseTypeArguments(); + _util.expect.call(void 0, _types.TokenType.parenL); + _expression.parseCallExpressionArguments.call(void 0, ); + } + _expression.baseParseSubscript.call(void 0, startTokenIndex, noCalls, stopState); +} exports.tsParseSubscript = tsParseSubscript; + + function tsTryParseExport() { + if (_index.eat.call(void 0, _types.TokenType._import)) { + // One of these cases: + // export import A = B; + // export import type A = require("A"); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type) && _index.lookaheadType.call(void 0, ) !== _types.TokenType.eq) { + // Eat a `type` token, unless it's actually an identifier name. + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._type); + } + tsParseImportEqualsDeclaration(); + return true; + } else if (_index.eat.call(void 0, _types.TokenType.eq)) { + // `export = x;` + _expression.parseExpression.call(void 0, ); + _util.semicolon.call(void 0, ); + return true; + } else if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as)) { + // `export as namespace A;` + // See `parseNamespaceExportDeclaration` in TypeScript's own parser + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._namespace); + _expression.parseIdentifier.call(void 0, ); + _util.semicolon.call(void 0, ); + return true; + } else { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const nextType = _index.lookaheadType.call(void 0, ); + // export type {foo} from 'a'; + // export type * from 'a';' + // export type * as ns from 'a';' + if (nextType === _types.TokenType.braceL || nextType === _types.TokenType.star) { + _index.next.call(void 0, ); + } + } + return false; + } +} exports.tsTryParseExport = tsTryParseExport; + +/** + * Parse a TS import specifier, which may be prefixed with "type" and may be of + * the form `foo as bar`. + * + * The number of identifier-like tokens we see happens to be enough to uniquely + * identify the form, so simply count the number of identifiers rather than + * matching the words `type` or `as`. This is particularly important because + * `type` and `as` could each actually be plain identifiers rather than + * keywords. + */ + function tsParseImportSpecifier() { + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.braceR)) { + // import {foo} + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ImportDeclaration; + return; + } + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.braceR)) { + // import {type foo} + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ImportDeclaration; + _base.state.tokens[_base.state.tokens.length - 2].isType = true; + _base.state.tokens[_base.state.tokens.length - 1].isType = true; + return; + } + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.braceR)) { + // import {foo as bar} + _base.state.tokens[_base.state.tokens.length - 3].identifierRole = _index.IdentifierRole.ImportAccess; + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ImportDeclaration; + return; + } + _expression.parseIdentifier.call(void 0, ); + // import {type foo as bar} + _base.state.tokens[_base.state.tokens.length - 3].identifierRole = _index.IdentifierRole.ImportAccess; + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ImportDeclaration; + _base.state.tokens[_base.state.tokens.length - 4].isType = true; + _base.state.tokens[_base.state.tokens.length - 3].isType = true; + _base.state.tokens[_base.state.tokens.length - 2].isType = true; + _base.state.tokens[_base.state.tokens.length - 1].isType = true; +} exports.tsParseImportSpecifier = tsParseImportSpecifier; + +/** + * Just like named import specifiers, export specifiers can have from 1 to 4 + * tokens, inclusive, and the number of tokens determines the role of each token. + */ + function tsParseExportSpecifier() { + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.braceR)) { + // export {foo} + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ExportAccess; + return; + } + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.braceR)) { + // export {type foo} + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ExportAccess; + _base.state.tokens[_base.state.tokens.length - 2].isType = true; + _base.state.tokens[_base.state.tokens.length - 1].isType = true; + return; + } + _expression.parseIdentifier.call(void 0, ); + if (_index.match.call(void 0, _types.TokenType.comma) || _index.match.call(void 0, _types.TokenType.braceR)) { + // export {foo as bar} + _base.state.tokens[_base.state.tokens.length - 3].identifierRole = _index.IdentifierRole.ExportAccess; + return; + } + _expression.parseIdentifier.call(void 0, ); + // export {type foo as bar} + _base.state.tokens[_base.state.tokens.length - 3].identifierRole = _index.IdentifierRole.ExportAccess; + _base.state.tokens[_base.state.tokens.length - 4].isType = true; + _base.state.tokens[_base.state.tokens.length - 3].isType = true; + _base.state.tokens[_base.state.tokens.length - 2].isType = true; + _base.state.tokens[_base.state.tokens.length - 1].isType = true; +} exports.tsParseExportSpecifier = tsParseExportSpecifier; + + function tsTryParseExportDefaultExpression() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._abstract) && _index.lookaheadType.call(void 0, ) === _types.TokenType._class) { + _base.state.type = _types.TokenType._abstract; + _index.next.call(void 0, ); // Skip "abstract" + _statement.parseClass.call(void 0, true, true); + return true; + } + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._interface)) { + // Make sure "export default" are considered type tokens so the whole thing is removed. + const oldIsType = _index.pushTypeContext.call(void 0, 2); + tsParseDeclaration(_keywords.ContextualKeyword._interface, true); + _index.popTypeContext.call(void 0, oldIsType); + return true; + } + return false; +} exports.tsTryParseExportDefaultExpression = tsTryParseExportDefaultExpression; + + function tsTryParseStatementContent() { + if (_base.state.type === _types.TokenType._const) { + const ahead = _index.lookaheadTypeAndKeyword.call(void 0, ); + if (ahead.type === _types.TokenType.name && ahead.contextualKeyword === _keywords.ContextualKeyword._enum) { + _util.expect.call(void 0, _types.TokenType._const); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._enum); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._enum; + tsParseEnumDeclaration(); + return true; + } + } + return false; +} exports.tsTryParseStatementContent = tsTryParseStatementContent; + + function tsTryParseClassMemberWithIsStatic(isStatic) { + const memberStartIndexAfterStatic = _base.state.tokens.length; + tsParseModifiers([ + _keywords.ContextualKeyword._abstract, + _keywords.ContextualKeyword._readonly, + _keywords.ContextualKeyword._declare, + _keywords.ContextualKeyword._static, + _keywords.ContextualKeyword._override, + ]); + + const modifiersEndIndex = _base.state.tokens.length; + const found = tsTryParseIndexSignature(); + if (found) { + // Index signatures are type declarations, so set the modifier tokens as + // type tokens. Most tokens could be assumed to be type tokens, but `static` + // is ambiguous unless we set it explicitly here. + const memberStartIndex = isStatic + ? memberStartIndexAfterStatic - 1 + : memberStartIndexAfterStatic; + for (let i = memberStartIndex; i < modifiersEndIndex; i++) { + _base.state.tokens[i].isType = true; + } + return true; + } + return false; +} exports.tsTryParseClassMemberWithIsStatic = tsTryParseClassMemberWithIsStatic; + +// Note: The reason we do this in `parseIdentifierStatement` and not `parseStatement` +// is that e.g. `type()` is valid JS, so we must try parsing that first. +// If it's really a type, we will parse `type` as the statement, and can correct it here +// by parsing the rest. + function tsParseIdentifierStatement(contextualKeyword) { + const matched = tsParseExpressionStatement(contextualKeyword); + if (!matched) { + _util.semicolon.call(void 0, ); + } +} exports.tsParseIdentifierStatement = tsParseIdentifierStatement; + + function tsParseExportDeclaration() { + // "export declare" is equivalent to just "export". + const isDeclare = _util.eatContextual.call(void 0, _keywords.ContextualKeyword._declare); + if (isDeclare) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._declare; + } + + let matchedDeclaration = false; + if (_index.match.call(void 0, _types.TokenType.name)) { + if (isDeclare) { + const oldIsType = _index.pushTypeContext.call(void 0, 2); + matchedDeclaration = tsTryParseExportDeclaration(); + _index.popTypeContext.call(void 0, oldIsType); + } else { + matchedDeclaration = tsTryParseExportDeclaration(); + } + } + if (!matchedDeclaration) { + if (isDeclare) { + const oldIsType = _index.pushTypeContext.call(void 0, 2); + _statement.parseStatement.call(void 0, true); + _index.popTypeContext.call(void 0, oldIsType); + } else { + _statement.parseStatement.call(void 0, true); + } + } +} exports.tsParseExportDeclaration = tsParseExportDeclaration; + + function tsAfterParseClassSuper(hasSuper) { + if (hasSuper && (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.bitShiftL))) { + tsParseTypeArgumentsWithPossibleBitshift(); + } + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._implements)) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._implements; + const oldIsType = _index.pushTypeContext.call(void 0, 1); + tsParseHeritageClause(); + _index.popTypeContext.call(void 0, oldIsType); + } +} exports.tsAfterParseClassSuper = tsAfterParseClassSuper; + + function tsStartParseObjPropValue() { + tsTryParseTypeParameters(); +} exports.tsStartParseObjPropValue = tsStartParseObjPropValue; + + function tsStartParseFunctionParams() { + tsTryParseTypeParameters(); +} exports.tsStartParseFunctionParams = tsStartParseFunctionParams; + +// `let x: number;` + function tsAfterParseVarHead() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + if (!_util.hasPrecedingLineBreak.call(void 0, )) { + _index.eat.call(void 0, _types.TokenType.bang); + } + tsTryParseTypeAnnotation(); + _index.popTypeContext.call(void 0, oldIsType); +} exports.tsAfterParseVarHead = tsAfterParseVarHead; + +// parse the return type of an async arrow function - let foo = (async (): number => {}); + function tsStartParseAsyncArrowFromCallExpression() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + tsParseTypeAnnotation(); + } +} exports.tsStartParseAsyncArrowFromCallExpression = tsStartParseAsyncArrowFromCallExpression; + +// Returns true if the expression was an arrow function. + function tsParseMaybeAssign(noIn, isWithinParens) { + // Note: When the JSX plugin is on, type assertions (` x`) aren't valid syntax. + if (_base.isJSXEnabled) { + return tsParseMaybeAssignWithJSX(noIn, isWithinParens); + } else { + return tsParseMaybeAssignWithoutJSX(noIn, isWithinParens); + } +} exports.tsParseMaybeAssign = tsParseMaybeAssign; + + function tsParseMaybeAssignWithJSX(noIn, isWithinParens) { + if (!_index.match.call(void 0, _types.TokenType.lessThan)) { + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + } + + // Prefer to parse JSX if possible. But may be an arrow fn. + const snapshot = _base.state.snapshot(); + let wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Otherwise, try as type-parameterized arrow function. + _base.state.type = _types.TokenType.typeParameterStart; + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (!wasArrow) { + _util.unexpected.call(void 0, ); + } + + return wasArrow; +} exports.tsParseMaybeAssignWithJSX = tsParseMaybeAssignWithJSX; + + function tsParseMaybeAssignWithoutJSX(noIn, isWithinParens) { + if (!_index.match.call(void 0, _types.TokenType.lessThan)) { + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + } + + const snapshot = _base.state.snapshot(); + // This is similar to TypeScript's `tryParseParenthesizedArrowFunctionExpression`. + tsParseTypeParameters(); + const wasArrow = _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); + if (!wasArrow) { + _util.unexpected.call(void 0, ); + } + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } else { + return wasArrow; + } + + // Try parsing a type cast instead of an arrow function. + // This will start with a type assertion (via parseMaybeUnary). + // But don't directly call `tsParseTypeAssertion` because we want to handle any binary after it. + return _expression.baseParseMaybeAssign.call(void 0, noIn, isWithinParens); +} exports.tsParseMaybeAssignWithoutJSX = tsParseMaybeAssignWithoutJSX; + + function tsParseArrow() { + if (_index.match.call(void 0, _types.TokenType.colon)) { + // This is different from how the TS parser does it. + // TS uses lookahead. Babylon parses it as a parenthesized expression and converts. + const snapshot = _base.state.snapshot(); + + tsParseTypeOrTypePredicateAnnotation(_types.TokenType.colon); + if (_util.canInsertSemicolon.call(void 0, )) _util.unexpected.call(void 0, ); + if (!_index.match.call(void 0, _types.TokenType.arrow)) _util.unexpected.call(void 0, ); + + if (_base.state.error) { + _base.state.restoreFromSnapshot(snapshot); + } + } + return _index.eat.call(void 0, _types.TokenType.arrow); +} exports.tsParseArrow = tsParseArrow; + +// Allow type annotations inside of a parameter list. + function tsParseAssignableListItemTypes() { + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.eat.call(void 0, _types.TokenType.question); + tsTryParseTypeAnnotation(); + _index.popTypeContext.call(void 0, oldIsType); +} exports.tsParseAssignableListItemTypes = tsParseAssignableListItemTypes; + + function tsParseMaybeDecoratorArguments() { + if (_index.match.call(void 0, _types.TokenType.lessThan) || _index.match.call(void 0, _types.TokenType.bitShiftL)) { + tsParseTypeArgumentsWithPossibleBitshift(); + } + _statement.baseParseMaybeDecoratorArguments.call(void 0, ); +} exports.tsParseMaybeDecoratorArguments = tsParseMaybeDecoratorArguments; diff --git a/node_modules/sucrase/dist/parser/tokenizer/index.js b/node_modules/sucrase/dist/parser/tokenizer/index.js new file mode 100644 index 0000000..61cb188 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/index.js @@ -0,0 +1,1001 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }/* eslint max-len: 0 */ + +var _base = require('../traverser/base'); +var _util = require('../traverser/util'); +var _charcodes = require('../util/charcodes'); +var _identifier = require('../util/identifier'); +var _whitespace = require('../util/whitespace'); +var _keywords = require('./keywords'); +var _readWord = require('./readWord'); var _readWord2 = _interopRequireDefault(_readWord); +var _types = require('./types'); + +var IdentifierRole; (function (IdentifierRole) { + const Access = 0; IdentifierRole[IdentifierRole["Access"] = Access] = "Access"; + const ExportAccess = Access + 1; IdentifierRole[IdentifierRole["ExportAccess"] = ExportAccess] = "ExportAccess"; + const TopLevelDeclaration = ExportAccess + 1; IdentifierRole[IdentifierRole["TopLevelDeclaration"] = TopLevelDeclaration] = "TopLevelDeclaration"; + const FunctionScopedDeclaration = TopLevelDeclaration + 1; IdentifierRole[IdentifierRole["FunctionScopedDeclaration"] = FunctionScopedDeclaration] = "FunctionScopedDeclaration"; + const BlockScopedDeclaration = FunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["BlockScopedDeclaration"] = BlockScopedDeclaration] = "BlockScopedDeclaration"; + const ObjectShorthandTopLevelDeclaration = BlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandTopLevelDeclaration"] = ObjectShorthandTopLevelDeclaration] = "ObjectShorthandTopLevelDeclaration"; + const ObjectShorthandFunctionScopedDeclaration = ObjectShorthandTopLevelDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandFunctionScopedDeclaration"] = ObjectShorthandFunctionScopedDeclaration] = "ObjectShorthandFunctionScopedDeclaration"; + const ObjectShorthandBlockScopedDeclaration = ObjectShorthandFunctionScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthandBlockScopedDeclaration"] = ObjectShorthandBlockScopedDeclaration] = "ObjectShorthandBlockScopedDeclaration"; + const ObjectShorthand = ObjectShorthandBlockScopedDeclaration + 1; IdentifierRole[IdentifierRole["ObjectShorthand"] = ObjectShorthand] = "ObjectShorthand"; + // Any identifier bound in an import statement, e.g. both A and b from + // `import A, * as b from 'A';` + const ImportDeclaration = ObjectShorthand + 1; IdentifierRole[IdentifierRole["ImportDeclaration"] = ImportDeclaration] = "ImportDeclaration"; + const ObjectKey = ImportDeclaration + 1; IdentifierRole[IdentifierRole["ObjectKey"] = ObjectKey] = "ObjectKey"; + // The `foo` in `import {foo as bar} from "./abc";`. + const ImportAccess = ObjectKey + 1; IdentifierRole[IdentifierRole["ImportAccess"] = ImportAccess] = "ImportAccess"; +})(IdentifierRole || (exports.IdentifierRole = IdentifierRole = {})); + +/** + * Extra information on jsxTagStart tokens, used to determine which of the three + * jsx functions are called in the automatic transform. + */ +var JSXRole; (function (JSXRole) { + // The element is self-closing or has a body that resolves to empty. We + // shouldn't emit children at all in this case. + const NoChildren = 0; JSXRole[JSXRole["NoChildren"] = NoChildren] = "NoChildren"; + // The element has a single explicit child, which might still be an arbitrary + // expression like an array. We should emit that expression as the children. + const OneChild = NoChildren + 1; JSXRole[JSXRole["OneChild"] = OneChild] = "OneChild"; + // The element has at least two explicitly-specified children or has spread + // children, so child positions are assumed to be "static". We should wrap + // these children in an array. + const StaticChildren = OneChild + 1; JSXRole[JSXRole["StaticChildren"] = StaticChildren] = "StaticChildren"; + // The element has a prop named "key" after a prop spread, so we should fall + // back to the createElement function. + const KeyAfterPropSpread = StaticChildren + 1; JSXRole[JSXRole["KeyAfterPropSpread"] = KeyAfterPropSpread] = "KeyAfterPropSpread"; +})(JSXRole || (exports.JSXRole = JSXRole = {})); + + function isDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} exports.isDeclaration = isDeclaration; + + function isNonTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} exports.isNonTopLevelDeclaration = isNonTopLevelDeclaration; + + function isTopLevelDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ImportDeclaration + ); +} exports.isTopLevelDeclaration = isTopLevelDeclaration; + + function isBlockScopedDeclaration(token) { + const role = token.identifierRole; + // Treat top-level declarations as block scope since the distinction doesn't matter here. + return ( + role === IdentifierRole.TopLevelDeclaration || + role === IdentifierRole.BlockScopedDeclaration || + role === IdentifierRole.ObjectShorthandTopLevelDeclaration || + role === IdentifierRole.ObjectShorthandBlockScopedDeclaration + ); +} exports.isBlockScopedDeclaration = isBlockScopedDeclaration; + + function isFunctionScopedDeclaration(token) { + const role = token.identifierRole; + return ( + role === IdentifierRole.FunctionScopedDeclaration || + role === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} exports.isFunctionScopedDeclaration = isFunctionScopedDeclaration; + + function isObjectShorthandDeclaration(token) { + return ( + token.identifierRole === IdentifierRole.ObjectShorthandTopLevelDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandBlockScopedDeclaration || + token.identifierRole === IdentifierRole.ObjectShorthandFunctionScopedDeclaration + ); +} exports.isObjectShorthandDeclaration = isObjectShorthandDeclaration; + +// Object type used to represent tokens. Note that normally, tokens +// simply exist as properties on the parser object. This is only +// used for the onToken callback and the external tokenizer. + class Token { + constructor() { + this.type = _base.state.type; + this.contextualKeyword = _base.state.contextualKeyword; + this.start = _base.state.start; + this.end = _base.state.end; + this.scopeDepth = _base.state.scopeDepth; + this.isType = _base.state.isType; + this.identifierRole = null; + this.jsxRole = null; + this.shadowsGlobal = false; + this.isAsyncOperation = false; + this.contextId = null; + this.rhsEndIndex = null; + this.isExpression = false; + this.numNullishCoalesceStarts = 0; + this.numNullishCoalesceEnds = 0; + this.isOptionalChainStart = false; + this.isOptionalChainEnd = false; + this.subscriptStartIndex = null; + this.nullishStartIndex = null; + } + + + + + + + + + + // Initially false for all tokens, then may be computed in a follow-up step that does scope + // analysis. + + // Initially false for all tokens, but may be set during transform to mark it as containing an + // await operation. + + + // For assignments, the index of the RHS. For export tokens, the end of the export. + + // For class tokens, records if the class is a class expression or a class statement. + + // Number of times to insert a `nullishCoalesce(` snippet before this token. + + // Number of times to insert a `)` snippet after this token. + + // If true, insert an `optionalChain([` snippet before this token. + + // If true, insert a `])` snippet after this token. + + // Tag for `.`, `?.`, `[`, `?.[`, `(`, and `?.(` to denote the "root" token for this + // subscript chain. This can be used to determine if this chain is an optional chain. + + // Tag for `??` operators to denote the root token for this nullish coalescing call. + +} exports.Token = Token; + +// ## Tokenizer + +// Move to the next token + function next() { + _base.state.tokens.push(new Token()); + nextToken(); +} exports.next = next; + +// Call instead of next when inside a template, since that needs to be handled differently. + function nextTemplateToken() { + _base.state.tokens.push(new Token()); + _base.state.start = _base.state.pos; + readTmplToken(); +} exports.nextTemplateToken = nextTemplateToken; + +// The tokenizer never parses regexes by default. Instead, the parser is responsible for +// instructing it to parse a regex when we see a slash at the start of an expression. + function retokenizeSlashAsRegex() { + if (_base.state.type === _types.TokenType.assign) { + --_base.state.pos; + } + readRegexp(); +} exports.retokenizeSlashAsRegex = retokenizeSlashAsRegex; + + function pushTypeContext(existingTokensInType) { + for (let i = _base.state.tokens.length - existingTokensInType; i < _base.state.tokens.length; i++) { + _base.state.tokens[i].isType = true; + } + const oldIsType = _base.state.isType; + _base.state.isType = true; + return oldIsType; +} exports.pushTypeContext = pushTypeContext; + + function popTypeContext(oldIsType) { + _base.state.isType = oldIsType; +} exports.popTypeContext = popTypeContext; + + function eat(type) { + if (match(type)) { + next(); + return true; + } else { + return false; + } +} exports.eat = eat; + + function eatTypeToken(tokenType) { + const oldIsType = _base.state.isType; + _base.state.isType = true; + eat(tokenType); + _base.state.isType = oldIsType; +} exports.eatTypeToken = eatTypeToken; + + function match(type) { + return _base.state.type === type; +} exports.match = match; + + function lookaheadType() { + const snapshot = _base.state.snapshot(); + next(); + const type = _base.state.type; + _base.state.restoreFromSnapshot(snapshot); + return type; +} exports.lookaheadType = lookaheadType; + + class TypeAndKeyword { + + + constructor(type, contextualKeyword) { + this.type = type; + this.contextualKeyword = contextualKeyword; + } +} exports.TypeAndKeyword = TypeAndKeyword; + + function lookaheadTypeAndKeyword() { + const snapshot = _base.state.snapshot(); + next(); + const type = _base.state.type; + const contextualKeyword = _base.state.contextualKeyword; + _base.state.restoreFromSnapshot(snapshot); + return new TypeAndKeyword(type, contextualKeyword); +} exports.lookaheadTypeAndKeyword = lookaheadTypeAndKeyword; + + function nextTokenStart() { + return nextTokenStartSince(_base.state.pos); +} exports.nextTokenStart = nextTokenStart; + + function nextTokenStartSince(pos) { + _whitespace.skipWhiteSpace.lastIndex = pos; + const skip = _whitespace.skipWhiteSpace.exec(_base.input); + return pos + skip[0].length; +} exports.nextTokenStartSince = nextTokenStartSince; + + function lookaheadCharCode() { + return _base.input.charCodeAt(nextTokenStart()); +} exports.lookaheadCharCode = lookaheadCharCode; + +// Read a single token, updating the parser object's token-related +// properties. + function nextToken() { + skipSpace(); + _base.state.start = _base.state.pos; + if (_base.state.pos >= _base.input.length) { + const tokens = _base.state.tokens; + // We normally run past the end a bit, but if we're way past the end, avoid an infinite loop. + // Also check the token positions rather than the types since sometimes we rewrite the token + // type to something else. + if ( + tokens.length >= 2 && + tokens[tokens.length - 1].start >= _base.input.length && + tokens[tokens.length - 2].start >= _base.input.length + ) { + _util.unexpected.call(void 0, "Unexpectedly reached the end of input."); + } + finishToken(_types.TokenType.eof); + return; + } + readToken(_base.input.charCodeAt(_base.state.pos)); +} exports.nextToken = nextToken; + +function readToken(code) { + // Identifier or keyword. '\uXXXX' sequences are allowed in + // identifiers, so '\' also dispatches to that. + if ( + _identifier.IS_IDENTIFIER_START[code] || + code === _charcodes.charCodes.backslash || + (code === _charcodes.charCodes.atSign && _base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.atSign) + ) { + _readWord2.default.call(void 0, ); + } else { + getTokenFromCode(code); + } +} + +function skipBlockComment() { + while ( + _base.input.charCodeAt(_base.state.pos) !== _charcodes.charCodes.asterisk || + _base.input.charCodeAt(_base.state.pos + 1) !== _charcodes.charCodes.slash + ) { + _base.state.pos++; + if (_base.state.pos > _base.input.length) { + _util.unexpected.call(void 0, "Unterminated comment", _base.state.pos - 2); + return; + } + } + _base.state.pos += 2; +} + + function skipLineComment(startSkip) { + let ch = _base.input.charCodeAt((_base.state.pos += startSkip)); + if (_base.state.pos < _base.input.length) { + while ( + ch !== _charcodes.charCodes.lineFeed && + ch !== _charcodes.charCodes.carriageReturn && + ch !== _charcodes.charCodes.lineSeparator && + ch !== _charcodes.charCodes.paragraphSeparator && + ++_base.state.pos < _base.input.length + ) { + ch = _base.input.charCodeAt(_base.state.pos); + } + } +} exports.skipLineComment = skipLineComment; + +// Called at the start of the parse and after every token. Skips +// whitespace and comments. + function skipSpace() { + while (_base.state.pos < _base.input.length) { + const ch = _base.input.charCodeAt(_base.state.pos); + switch (ch) { + case _charcodes.charCodes.carriageReturn: + if (_base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.lineFeed) { + ++_base.state.pos; + } + + case _charcodes.charCodes.lineFeed: + case _charcodes.charCodes.lineSeparator: + case _charcodes.charCodes.paragraphSeparator: + ++_base.state.pos; + break; + + case _charcodes.charCodes.slash: + switch (_base.input.charCodeAt(_base.state.pos + 1)) { + case _charcodes.charCodes.asterisk: + _base.state.pos += 2; + skipBlockComment(); + break; + + case _charcodes.charCodes.slash: + skipLineComment(2); + break; + + default: + return; + } + break; + + default: + if (_whitespace.IS_WHITESPACE[ch]) { + ++_base.state.pos; + } else { + return; + } + } + } +} exports.skipSpace = skipSpace; + +// Called at the end of every token. Sets various fields, and skips the space after the token, so +// that the next one's `start` will point at the right position. + function finishToken( + type, + contextualKeyword = _keywords.ContextualKeyword.NONE, +) { + _base.state.end = _base.state.pos; + _base.state.type = type; + _base.state.contextualKeyword = contextualKeyword; +} exports.finishToken = finishToken; + +// ### Token reading + +// This is the function that is called to fetch the next token. It +// is somewhat obscure, because it works in character codes rather +// than characters, and because operator parsing has been inlined +// into it. +// +// All in the name of speed. +function readToken_dot() { + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar >= _charcodes.charCodes.digit0 && nextChar <= _charcodes.charCodes.digit9) { + readNumber(true); + return; + } + + if (nextChar === _charcodes.charCodes.dot && _base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.dot) { + _base.state.pos += 3; + finishToken(_types.TokenType.ellipsis); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.dot); + } +} + +function readToken_slash() { + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + } else { + finishOp(_types.TokenType.slash, 1); + } +} + +function readToken_mult_modulo(code) { + // '%*' + let tokenType = code === _charcodes.charCodes.asterisk ? _types.TokenType.star : _types.TokenType.modulo; + let width = 1; + let nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + // Exponentiation operator ** + if (code === _charcodes.charCodes.asterisk && nextChar === _charcodes.charCodes.asterisk) { + width++; + nextChar = _base.input.charCodeAt(_base.state.pos + 2); + tokenType = _types.TokenType.exponent; + } + + // Match *= or %=, disallowing *=> which can be valid in flow. + if ( + nextChar === _charcodes.charCodes.equalsTo && + _base.input.charCodeAt(_base.state.pos + 2) !== _charcodes.charCodes.greaterThan + ) { + width++; + tokenType = _types.TokenType.assign; + } + + finishOp(tokenType, width); +} + +function readToken_pipe_amp(code) { + // '|&' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + if (nextChar === code) { + if (_base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.equalsTo) { + // ||= or &&= + finishOp(_types.TokenType.assign, 3); + } else { + // || or && + finishOp(code === _charcodes.charCodes.verticalBar ? _types.TokenType.logicalOR : _types.TokenType.logicalAND, 2); + } + return; + } + + if (code === _charcodes.charCodes.verticalBar) { + // '|>' + if (nextChar === _charcodes.charCodes.greaterThan) { + finishOp(_types.TokenType.pipeline, 2); + return; + } else if (nextChar === _charcodes.charCodes.rightCurlyBrace && _base.isFlowEnabled) { + // '|}' + finishOp(_types.TokenType.braceBarR, 2); + return; + } + } + + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + return; + } + + finishOp(code === _charcodes.charCodes.verticalBar ? _types.TokenType.bitwiseOR : _types.TokenType.bitwiseAND, 1); +} + +function readToken_caret() { + // '^' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + } else { + finishOp(_types.TokenType.bitwiseXOR, 1); + } +} + +function readToken_plus_min(code) { + // '+-' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + if (nextChar === code) { + // Tentatively call this a prefix operator, but it might be changed to postfix later. + finishOp(_types.TokenType.preIncDec, 2); + return; + } + + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 2); + } else if (code === _charcodes.charCodes.plusSign) { + finishOp(_types.TokenType.plus, 1); + } else { + finishOp(_types.TokenType.minus, 1); + } +} + +function readToken_lt() { + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + if (nextChar === _charcodes.charCodes.lessThan) { + if (_base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, 3); + return; + } + // We see <<, but need to be really careful about whether to treat it as a + // true left-shift or as two < tokens. + if (_base.state.isType) { + // Within a type, << might come up in a snippet like `Array<() => void>`, + // so treat it as two < tokens. Importantly, this should only override << + // rather than other tokens like <= . If we treated <= as < in a type + // context, then the snippet `a as T <= 1` would incorrectly start parsing + // a type argument on T. We don't need to worry about `a as T << 1` + // because TypeScript disallows that syntax. + finishOp(_types.TokenType.lessThan, 1); + } else { + // Outside a type, this might be a true left-shift operator, or it might + // still be two open-type-arg tokens, such as in `f<() => void>()`. We + // look at the token while considering the `f`, so we don't yet know that + // we're in a type context. In this case, we initially tokenize as a + // left-shift and correct after-the-fact as necessary in + // tsParseTypeArgumentsWithPossibleBitshift . + finishOp(_types.TokenType.bitShiftL, 2); + } + return; + } + + if (nextChar === _charcodes.charCodes.equalsTo) { + // <= + finishOp(_types.TokenType.relationalOrEqual, 2); + } else { + finishOp(_types.TokenType.lessThan, 1); + } +} + +function readToken_gt() { + if (_base.state.isType) { + // Avoid right-shift for things like `Array>` and + // greater-than-or-equal for things like `const a: Array=[];`. + finishOp(_types.TokenType.greaterThan, 1); + return; + } + + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + + if (nextChar === _charcodes.charCodes.greaterThan) { + const size = _base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.greaterThan ? 3 : 2; + if (_base.input.charCodeAt(_base.state.pos + size) === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.assign, size + 1); + return; + } + finishOp(_types.TokenType.bitShiftR, size); + return; + } + + if (nextChar === _charcodes.charCodes.equalsTo) { + // >= + finishOp(_types.TokenType.relationalOrEqual, 2); + } else { + finishOp(_types.TokenType.greaterThan, 1); + } +} + +/** + * Called after `as` expressions in TS; we're switching from a type to a + * non-type context, so a > token may actually be >= . This is needed because >= + * must be tokenized as a > in a type context because of code like + * `const x: Array=[];`, but `a as T >= 1` is a code example where it must be + * treated as >=. + * + * Notably, this only applies to >, not <. In a code snippet like `a as T <= 1`, + * we must NOT tokenize as <, or else the type parser will start parsing a type + * argument and fail. + */ + function rescan_gt() { + if (_base.state.type === _types.TokenType.greaterThan) { + _base.state.pos -= 1; + readToken_gt(); + } +} exports.rescan_gt = rescan_gt; + +function readToken_eq_excl(code) { + // '=!' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + if (nextChar === _charcodes.charCodes.equalsTo) { + finishOp(_types.TokenType.equality, _base.input.charCodeAt(_base.state.pos + 2) === _charcodes.charCodes.equalsTo ? 3 : 2); + return; + } + if (code === _charcodes.charCodes.equalsTo && nextChar === _charcodes.charCodes.greaterThan) { + // '=>' + _base.state.pos += 2; + finishToken(_types.TokenType.arrow); + return; + } + finishOp(code === _charcodes.charCodes.equalsTo ? _types.TokenType.eq : _types.TokenType.bang, 1); +} + +function readToken_question() { + // '?' + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + const nextChar2 = _base.input.charCodeAt(_base.state.pos + 2); + if ( + nextChar === _charcodes.charCodes.questionMark && + // In Flow (but not TypeScript), ??string is a valid type that should be + // tokenized as two individual ? tokens. + !(_base.isFlowEnabled && _base.state.isType) + ) { + if (nextChar2 === _charcodes.charCodes.equalsTo) { + // '??=' + finishOp(_types.TokenType.assign, 3); + } else { + // '??' + finishOp(_types.TokenType.nullishCoalescing, 2); + } + } else if ( + nextChar === _charcodes.charCodes.dot && + !(nextChar2 >= _charcodes.charCodes.digit0 && nextChar2 <= _charcodes.charCodes.digit9) + ) { + // '.' not followed by a number + _base.state.pos += 2; + finishToken(_types.TokenType.questionDot); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.question); + } +} + + function getTokenFromCode(code) { + switch (code) { + case _charcodes.charCodes.numberSign: + ++_base.state.pos; + finishToken(_types.TokenType.hash); + return; + + // The interpretation of a dot depends on whether it is followed + // by a digit or another two dots. + + case _charcodes.charCodes.dot: + readToken_dot(); + return; + + // Punctuation tokens. + case _charcodes.charCodes.leftParenthesis: + ++_base.state.pos; + finishToken(_types.TokenType.parenL); + return; + case _charcodes.charCodes.rightParenthesis: + ++_base.state.pos; + finishToken(_types.TokenType.parenR); + return; + case _charcodes.charCodes.semicolon: + ++_base.state.pos; + finishToken(_types.TokenType.semi); + return; + case _charcodes.charCodes.comma: + ++_base.state.pos; + finishToken(_types.TokenType.comma); + return; + case _charcodes.charCodes.leftSquareBracket: + ++_base.state.pos; + finishToken(_types.TokenType.bracketL); + return; + case _charcodes.charCodes.rightSquareBracket: + ++_base.state.pos; + finishToken(_types.TokenType.bracketR); + return; + + case _charcodes.charCodes.leftCurlyBrace: + if (_base.isFlowEnabled && _base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.verticalBar) { + finishOp(_types.TokenType.braceBarL, 2); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.braceL); + } + return; + + case _charcodes.charCodes.rightCurlyBrace: + ++_base.state.pos; + finishToken(_types.TokenType.braceR); + return; + + case _charcodes.charCodes.colon: + if (_base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.colon) { + finishOp(_types.TokenType.doubleColon, 2); + } else { + ++_base.state.pos; + finishToken(_types.TokenType.colon); + } + return; + + case _charcodes.charCodes.questionMark: + readToken_question(); + return; + case _charcodes.charCodes.atSign: + ++_base.state.pos; + finishToken(_types.TokenType.at); + return; + + case _charcodes.charCodes.graveAccent: + ++_base.state.pos; + finishToken(_types.TokenType.backQuote); + return; + + case _charcodes.charCodes.digit0: { + const nextChar = _base.input.charCodeAt(_base.state.pos + 1); + // '0x', '0X', '0o', '0O', '0b', '0B' + if ( + nextChar === _charcodes.charCodes.lowercaseX || + nextChar === _charcodes.charCodes.uppercaseX || + nextChar === _charcodes.charCodes.lowercaseO || + nextChar === _charcodes.charCodes.uppercaseO || + nextChar === _charcodes.charCodes.lowercaseB || + nextChar === _charcodes.charCodes.uppercaseB + ) { + readRadixNumber(); + return; + } + } + // Anything else beginning with a digit is an integer, octal + // number, or float. + case _charcodes.charCodes.digit1: + case _charcodes.charCodes.digit2: + case _charcodes.charCodes.digit3: + case _charcodes.charCodes.digit4: + case _charcodes.charCodes.digit5: + case _charcodes.charCodes.digit6: + case _charcodes.charCodes.digit7: + case _charcodes.charCodes.digit8: + case _charcodes.charCodes.digit9: + readNumber(false); + return; + + // Quotes produce strings. + case _charcodes.charCodes.quotationMark: + case _charcodes.charCodes.apostrophe: + readString(code); + return; + + // Operators are parsed inline in tiny state machines. '=' (charCodes.equalsTo) is + // often referred to. `finishOp` simply skips the amount of + // characters it is given as second argument, and returns a token + // of the type given by its first argument. + + case _charcodes.charCodes.slash: + readToken_slash(); + return; + + case _charcodes.charCodes.percentSign: + case _charcodes.charCodes.asterisk: + readToken_mult_modulo(code); + return; + + case _charcodes.charCodes.verticalBar: + case _charcodes.charCodes.ampersand: + readToken_pipe_amp(code); + return; + + case _charcodes.charCodes.caret: + readToken_caret(); + return; + + case _charcodes.charCodes.plusSign: + case _charcodes.charCodes.dash: + readToken_plus_min(code); + return; + + case _charcodes.charCodes.lessThan: + readToken_lt(); + return; + + case _charcodes.charCodes.greaterThan: + readToken_gt(); + return; + + case _charcodes.charCodes.equalsTo: + case _charcodes.charCodes.exclamationMark: + readToken_eq_excl(code); + return; + + case _charcodes.charCodes.tilde: + finishOp(_types.TokenType.tilde, 1); + return; + + default: + break; + } + + _util.unexpected.call(void 0, `Unexpected character '${String.fromCharCode(code)}'`, _base.state.pos); +} exports.getTokenFromCode = getTokenFromCode; + +function finishOp(type, size) { + _base.state.pos += size; + finishToken(type); +} + +function readRegexp() { + const start = _base.state.pos; + let escaped = false; + let inClass = false; + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated regular expression", start); + return; + } + const code = _base.input.charCodeAt(_base.state.pos); + if (escaped) { + escaped = false; + } else { + if (code === _charcodes.charCodes.leftSquareBracket) { + inClass = true; + } else if (code === _charcodes.charCodes.rightSquareBracket && inClass) { + inClass = false; + } else if (code === _charcodes.charCodes.slash && !inClass) { + break; + } + escaped = code === _charcodes.charCodes.backslash; + } + ++_base.state.pos; + } + ++_base.state.pos; + // Need to use `skipWord` because '\uXXXX' sequences are allowed here (don't ask). + skipWord(); + + finishToken(_types.TokenType.regexp); +} + +/** + * Read a decimal integer. Note that this can't be unified with the similar code + * in readRadixNumber (which also handles hex digits) because "e" needs to be + * the end of the integer so that we can properly handle scientific notation. + */ +function readInt() { + while (true) { + const code = _base.input.charCodeAt(_base.state.pos); + if ((code >= _charcodes.charCodes.digit0 && code <= _charcodes.charCodes.digit9) || code === _charcodes.charCodes.underscore) { + _base.state.pos++; + } else { + break; + } + } +} + +function readRadixNumber() { + _base.state.pos += 2; // 0x + + // Walk to the end of the number, allowing hex digits. + while (true) { + const code = _base.input.charCodeAt(_base.state.pos); + if ( + (code >= _charcodes.charCodes.digit0 && code <= _charcodes.charCodes.digit9) || + (code >= _charcodes.charCodes.lowercaseA && code <= _charcodes.charCodes.lowercaseF) || + (code >= _charcodes.charCodes.uppercaseA && code <= _charcodes.charCodes.uppercaseF) || + code === _charcodes.charCodes.underscore + ) { + _base.state.pos++; + } else { + break; + } + } + + const nextChar = _base.input.charCodeAt(_base.state.pos); + if (nextChar === _charcodes.charCodes.lowercaseN) { + ++_base.state.pos; + finishToken(_types.TokenType.bigint); + } else { + finishToken(_types.TokenType.num); + } +} + +// Read an integer, octal integer, or floating-point number. +function readNumber(startsWithDot) { + let isBigInt = false; + let isDecimal = false; + + if (!startsWithDot) { + readInt(); + } + + let nextChar = _base.input.charCodeAt(_base.state.pos); + if (nextChar === _charcodes.charCodes.dot) { + ++_base.state.pos; + readInt(); + nextChar = _base.input.charCodeAt(_base.state.pos); + } + + if (nextChar === _charcodes.charCodes.uppercaseE || nextChar === _charcodes.charCodes.lowercaseE) { + nextChar = _base.input.charCodeAt(++_base.state.pos); + if (nextChar === _charcodes.charCodes.plusSign || nextChar === _charcodes.charCodes.dash) { + ++_base.state.pos; + } + readInt(); + nextChar = _base.input.charCodeAt(_base.state.pos); + } + + if (nextChar === _charcodes.charCodes.lowercaseN) { + ++_base.state.pos; + isBigInt = true; + } else if (nextChar === _charcodes.charCodes.lowercaseM) { + ++_base.state.pos; + isDecimal = true; + } + + if (isBigInt) { + finishToken(_types.TokenType.bigint); + return; + } + + if (isDecimal) { + finishToken(_types.TokenType.decimal); + return; + } + + finishToken(_types.TokenType.num); +} + +function readString(quote) { + _base.state.pos++; + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated string constant"); + return; + } + const ch = _base.input.charCodeAt(_base.state.pos); + if (ch === _charcodes.charCodes.backslash) { + _base.state.pos++; + } else if (ch === quote) { + break; + } + _base.state.pos++; + } + _base.state.pos++; + finishToken(_types.TokenType.string); +} + +// Reads template string tokens. +function readTmplToken() { + for (;;) { + if (_base.state.pos >= _base.input.length) { + _util.unexpected.call(void 0, "Unterminated template"); + return; + } + const ch = _base.input.charCodeAt(_base.state.pos); + if ( + ch === _charcodes.charCodes.graveAccent || + (ch === _charcodes.charCodes.dollarSign && _base.input.charCodeAt(_base.state.pos + 1) === _charcodes.charCodes.leftCurlyBrace) + ) { + if (_base.state.pos === _base.state.start && match(_types.TokenType.template)) { + if (ch === _charcodes.charCodes.dollarSign) { + _base.state.pos += 2; + finishToken(_types.TokenType.dollarBraceL); + return; + } else { + ++_base.state.pos; + finishToken(_types.TokenType.backQuote); + return; + } + } + finishToken(_types.TokenType.template); + return; + } + if (ch === _charcodes.charCodes.backslash) { + _base.state.pos++; + } + _base.state.pos++; + } +} + +// Skip to the end of the current word. Note that this is the same as the snippet at the end of +// readWord, but calling skipWord from readWord seems to slightly hurt performance from some rough +// measurements. + function skipWord() { + while (_base.state.pos < _base.input.length) { + const ch = _base.input.charCodeAt(_base.state.pos); + if (_identifier.IS_IDENTIFIER_CHAR[ch]) { + _base.state.pos++; + } else if (ch === _charcodes.charCodes.backslash) { + // \u + _base.state.pos += 2; + if (_base.input.charCodeAt(_base.state.pos) === _charcodes.charCodes.leftCurlyBrace) { + while ( + _base.state.pos < _base.input.length && + _base.input.charCodeAt(_base.state.pos) !== _charcodes.charCodes.rightCurlyBrace + ) { + _base.state.pos++; + } + _base.state.pos++; + } + } else { + break; + } + } +} exports.skipWord = skipWord; diff --git a/node_modules/sucrase/dist/parser/tokenizer/keywords.js b/node_modules/sucrase/dist/parser/tokenizer/keywords.js new file mode 100644 index 0000000..e741926 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/keywords.js @@ -0,0 +1,43 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var ContextualKeyword; (function (ContextualKeyword) { + const NONE = 0; ContextualKeyword[ContextualKeyword["NONE"] = NONE] = "NONE"; + const _abstract = NONE + 1; ContextualKeyword[ContextualKeyword["_abstract"] = _abstract] = "_abstract"; + const _accessor = _abstract + 1; ContextualKeyword[ContextualKeyword["_accessor"] = _accessor] = "_accessor"; + const _as = _accessor + 1; ContextualKeyword[ContextualKeyword["_as"] = _as] = "_as"; + const _assert = _as + 1; ContextualKeyword[ContextualKeyword["_assert"] = _assert] = "_assert"; + const _asserts = _assert + 1; ContextualKeyword[ContextualKeyword["_asserts"] = _asserts] = "_asserts"; + const _async = _asserts + 1; ContextualKeyword[ContextualKeyword["_async"] = _async] = "_async"; + const _await = _async + 1; ContextualKeyword[ContextualKeyword["_await"] = _await] = "_await"; + const _checks = _await + 1; ContextualKeyword[ContextualKeyword["_checks"] = _checks] = "_checks"; + const _constructor = _checks + 1; ContextualKeyword[ContextualKeyword["_constructor"] = _constructor] = "_constructor"; + const _declare = _constructor + 1; ContextualKeyword[ContextualKeyword["_declare"] = _declare] = "_declare"; + const _enum = _declare + 1; ContextualKeyword[ContextualKeyword["_enum"] = _enum] = "_enum"; + const _exports = _enum + 1; ContextualKeyword[ContextualKeyword["_exports"] = _exports] = "_exports"; + const _from = _exports + 1; ContextualKeyword[ContextualKeyword["_from"] = _from] = "_from"; + const _get = _from + 1; ContextualKeyword[ContextualKeyword["_get"] = _get] = "_get"; + const _global = _get + 1; ContextualKeyword[ContextualKeyword["_global"] = _global] = "_global"; + const _implements = _global + 1; ContextualKeyword[ContextualKeyword["_implements"] = _implements] = "_implements"; + const _infer = _implements + 1; ContextualKeyword[ContextualKeyword["_infer"] = _infer] = "_infer"; + const _interface = _infer + 1; ContextualKeyword[ContextualKeyword["_interface"] = _interface] = "_interface"; + const _is = _interface + 1; ContextualKeyword[ContextualKeyword["_is"] = _is] = "_is"; + const _keyof = _is + 1; ContextualKeyword[ContextualKeyword["_keyof"] = _keyof] = "_keyof"; + const _mixins = _keyof + 1; ContextualKeyword[ContextualKeyword["_mixins"] = _mixins] = "_mixins"; + const _module = _mixins + 1; ContextualKeyword[ContextualKeyword["_module"] = _module] = "_module"; + const _namespace = _module + 1; ContextualKeyword[ContextualKeyword["_namespace"] = _namespace] = "_namespace"; + const _of = _namespace + 1; ContextualKeyword[ContextualKeyword["_of"] = _of] = "_of"; + const _opaque = _of + 1; ContextualKeyword[ContextualKeyword["_opaque"] = _opaque] = "_opaque"; + const _out = _opaque + 1; ContextualKeyword[ContextualKeyword["_out"] = _out] = "_out"; + const _override = _out + 1; ContextualKeyword[ContextualKeyword["_override"] = _override] = "_override"; + const _private = _override + 1; ContextualKeyword[ContextualKeyword["_private"] = _private] = "_private"; + const _protected = _private + 1; ContextualKeyword[ContextualKeyword["_protected"] = _protected] = "_protected"; + const _proto = _protected + 1; ContextualKeyword[ContextualKeyword["_proto"] = _proto] = "_proto"; + const _public = _proto + 1; ContextualKeyword[ContextualKeyword["_public"] = _public] = "_public"; + const _readonly = _public + 1; ContextualKeyword[ContextualKeyword["_readonly"] = _readonly] = "_readonly"; + const _require = _readonly + 1; ContextualKeyword[ContextualKeyword["_require"] = _require] = "_require"; + const _satisfies = _require + 1; ContextualKeyword[ContextualKeyword["_satisfies"] = _satisfies] = "_satisfies"; + const _set = _satisfies + 1; ContextualKeyword[ContextualKeyword["_set"] = _set] = "_set"; + const _static = _set + 1; ContextualKeyword[ContextualKeyword["_static"] = _static] = "_static"; + const _symbol = _static + 1; ContextualKeyword[ContextualKeyword["_symbol"] = _symbol] = "_symbol"; + const _type = _symbol + 1; ContextualKeyword[ContextualKeyword["_type"] = _type] = "_type"; + const _unique = _type + 1; ContextualKeyword[ContextualKeyword["_unique"] = _unique] = "_unique"; + const _using = _unique + 1; ContextualKeyword[ContextualKeyword["_using"] = _using] = "_using"; +})(ContextualKeyword || (exports.ContextualKeyword = ContextualKeyword = {})); diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWord.js b/node_modules/sucrase/dist/parser/tokenizer/readWord.js new file mode 100644 index 0000000..69ed5c9 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWord.js @@ -0,0 +1,64 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _base = require('../traverser/base'); +var _charcodes = require('../util/charcodes'); +var _identifier = require('../util/identifier'); +var _index = require('./index'); +var _readWordTree = require('./readWordTree'); +var _types = require('./types'); + +/** + * Read an identifier, producing either a name token or matching on one of the existing keywords. + * For performance, we pre-generate big decision tree that we traverse. Each node represents a + * prefix and has 27 values, where the first value is the token or contextual token, if any (-1 if + * not), and the other 26 values are the transitions to other nodes, or -1 to stop. + */ + function readWord() { + let treePos = 0; + let code = 0; + let pos = _base.state.pos; + while (pos < _base.input.length) { + code = _base.input.charCodeAt(pos); + if (code < _charcodes.charCodes.lowercaseA || code > _charcodes.charCodes.lowercaseZ) { + break; + } + const next = _readWordTree.READ_WORD_TREE[treePos + (code - _charcodes.charCodes.lowercaseA) + 1]; + if (next === -1) { + break; + } else { + treePos = next; + pos++; + } + } + + const keywordValue = _readWordTree.READ_WORD_TREE[treePos]; + if (keywordValue > -1 && !_identifier.IS_IDENTIFIER_CHAR[code]) { + _base.state.pos = pos; + if (keywordValue & 1) { + _index.finishToken.call(void 0, keywordValue >>> 1); + } else { + _index.finishToken.call(void 0, _types.TokenType.name, keywordValue >>> 1); + } + return; + } + + while (pos < _base.input.length) { + const ch = _base.input.charCodeAt(pos); + if (_identifier.IS_IDENTIFIER_CHAR[ch]) { + pos++; + } else if (ch === _charcodes.charCodes.backslash) { + // \u + pos += 2; + if (_base.input.charCodeAt(pos) === _charcodes.charCodes.leftCurlyBrace) { + while (pos < _base.input.length && _base.input.charCodeAt(pos) !== _charcodes.charCodes.rightCurlyBrace) { + pos++; + } + pos++; + } + } else if (ch === _charcodes.charCodes.atSign && _base.input.charCodeAt(pos + 1) === _charcodes.charCodes.atSign) { + pos += 2; + } else { + break; + } + } + _base.state.pos = pos; + _index.finishToken.call(void 0, _types.TokenType.name); +} exports.default = readWord; diff --git a/node_modules/sucrase/dist/parser/tokenizer/readWordTree.js b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.js new file mode 100644 index 0000000..fcc7733 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/readWordTree.js @@ -0,0 +1,671 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});// Generated file, do not edit! Run "yarn generate" to re-generate this file. +var _keywords = require('./keywords'); +var _types = require('./types'); + +// prettier-ignore + const READ_WORD_TREE = new Int32Array([ + // "" + -1, 27, 783, 918, 1755, 2376, 2862, 3483, -1, 3699, -1, 4617, 4752, 4833, 5130, 5508, 5940, -1, 6480, 6939, 7749, 8181, 8451, 8613, -1, 8829, -1, + // "a" + -1, -1, 54, 243, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 432, -1, -1, -1, 675, -1, -1, -1, + // "ab" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 81, -1, -1, -1, -1, -1, -1, -1, + // "abs" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 108, -1, -1, -1, -1, -1, -1, + // "abst" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 135, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstr" + -1, 162, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstra" + -1, -1, -1, 189, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "abstrac" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 216, -1, -1, -1, -1, -1, -1, + // "abstract" + _keywords.ContextualKeyword._abstract << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ac" + -1, -1, -1, 270, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "acc" + -1, -1, -1, -1, -1, 297, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "acce" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 324, -1, -1, -1, -1, -1, -1, -1, + // "acces" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 351, -1, -1, -1, -1, -1, -1, -1, + // "access" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 378, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "accesso" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 405, -1, -1, -1, -1, -1, -1, -1, -1, + // "accessor" + _keywords.ContextualKeyword._accessor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "as" + _keywords.ContextualKeyword._as << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 459, -1, -1, -1, -1, -1, 594, -1, + // "ass" + -1, -1, -1, -1, -1, 486, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asse" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 513, -1, -1, -1, -1, -1, -1, -1, -1, + // "asser" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 540, -1, -1, -1, -1, -1, -1, + // "assert" + _keywords.ContextualKeyword._assert << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 567, -1, -1, -1, -1, -1, -1, -1, + // "asserts" + _keywords.ContextualKeyword._asserts << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asy" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 621, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "asyn" + -1, -1, -1, 648, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "async" + _keywords.ContextualKeyword._async << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "aw" + -1, 702, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 729, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "awai" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 756, -1, -1, -1, -1, -1, -1, + // "await" + _keywords.ContextualKeyword._await << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "b" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 810, -1, -1, -1, -1, -1, -1, -1, -1, + // "br" + -1, -1, -1, -1, -1, 837, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "bre" + -1, 864, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "brea" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 891, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "break" + (_types.TokenType._break << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "c" + -1, 945, -1, -1, -1, -1, -1, -1, 1107, -1, -1, -1, 1242, -1, -1, 1350, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ca" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 972, 1026, -1, -1, -1, -1, -1, -1, + // "cas" + -1, -1, -1, -1, -1, 999, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "case" + (_types.TokenType._case << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cat" + -1, -1, -1, 1053, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catc" + -1, -1, -1, -1, -1, -1, -1, -1, 1080, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "catch" + (_types.TokenType._catch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ch" + -1, -1, -1, -1, -1, 1134, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "che" + -1, -1, -1, 1161, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "chec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1188, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "check" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1215, -1, -1, -1, -1, -1, -1, -1, + // "checks" + _keywords.ContextualKeyword._checks << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cl" + -1, 1269, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1296, -1, -1, -1, -1, -1, -1, -1, + // "clas" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1323, -1, -1, -1, -1, -1, -1, -1, + // "class" + (_types.TokenType._class << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "co" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1377, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "con" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1404, 1620, -1, -1, -1, -1, -1, -1, + // "cons" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1431, -1, -1, -1, -1, -1, -1, + // "const" + (_types.TokenType._const << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1458, -1, -1, -1, -1, -1, -1, -1, -1, + // "constr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1485, -1, -1, -1, -1, -1, + // "constru" + -1, -1, -1, 1512, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "construc" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1539, -1, -1, -1, -1, -1, -1, + // "construct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1566, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1593, -1, -1, -1, -1, -1, -1, -1, -1, + // "constructor" + _keywords.ContextualKeyword._constructor << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "cont" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 1647, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "conti" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1674, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "contin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1701, -1, -1, -1, -1, -1, + // "continu" + -1, -1, -1, -1, -1, 1728, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "continue" + (_types.TokenType._continue << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "d" + -1, -1, -1, -1, -1, 1782, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2349, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "de" + -1, -1, 1809, 1971, -1, -1, 2106, -1, -1, -1, -1, -1, 2241, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "deb" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1836, -1, -1, -1, -1, -1, + // "debu" + -1, -1, -1, -1, -1, -1, -1, 1863, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debug" + -1, -1, -1, -1, -1, -1, -1, 1890, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugg" + -1, -1, -1, -1, -1, 1917, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1944, -1, -1, -1, -1, -1, -1, -1, -1, + // "debugger" + (_types.TokenType._debugger << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1998, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decl" + -1, 2025, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "decla" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2052, -1, -1, -1, -1, -1, -1, -1, -1, + // "declar" + -1, -1, -1, -1, -1, 2079, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "declare" + _keywords.ContextualKeyword._declare << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "def" + -1, 2133, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2160, -1, -1, -1, -1, -1, + // "defau" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2187, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "defaul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2214, -1, -1, -1, -1, -1, -1, + // "default" + (_types.TokenType._default << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "del" + -1, -1, -1, -1, -1, 2268, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "dele" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2295, -1, -1, -1, -1, -1, -1, + // "delet" + -1, -1, -1, -1, -1, 2322, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "delete" + (_types.TokenType._delete << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "do" + (_types.TokenType._do << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "e" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2403, -1, 2484, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2565, -1, -1, + // "el" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2430, -1, -1, -1, -1, -1, -1, -1, + // "els" + -1, -1, -1, -1, -1, 2457, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "else" + (_types.TokenType._else << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "en" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2511, -1, -1, -1, -1, -1, + // "enu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2538, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "enum" + _keywords.ContextualKeyword._enum << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ex" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2592, -1, -1, -1, 2727, -1, -1, -1, -1, -1, -1, + // "exp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2619, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "expo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2646, -1, -1, -1, -1, -1, -1, -1, -1, + // "expor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2673, -1, -1, -1, -1, -1, -1, + // "export" + (_types.TokenType._export << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2700, -1, -1, -1, -1, -1, -1, -1, + // "exports" + _keywords.ContextualKeyword._exports << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ext" + -1, -1, -1, -1, -1, 2754, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2781, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "exten" + -1, -1, -1, -1, 2808, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "extend" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2835, -1, -1, -1, -1, -1, -1, -1, + // "extends" + (_types.TokenType._extends << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "f" + -1, 2889, -1, -1, -1, -1, -1, -1, -1, 2997, -1, -1, -1, -1, -1, 3159, -1, -1, 3213, -1, -1, 3294, -1, -1, -1, -1, -1, + // "fa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2916, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fal" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2943, -1, -1, -1, -1, -1, -1, -1, + // "fals" + -1, -1, -1, -1, -1, 2970, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "false" + (_types.TokenType._false << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3024, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fin" + -1, 3051, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fina" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3078, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "final" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3105, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "finall" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3132, -1, + // "finally" + (_types.TokenType._finally << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3186, -1, -1, -1, -1, -1, -1, -1, -1, + // "for" + (_types.TokenType._for << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3240, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3267, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "from" + _keywords.ContextualKeyword._from << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3321, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "fun" + -1, -1, -1, 3348, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "func" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3375, -1, -1, -1, -1, -1, -1, + // "funct" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 3402, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3429, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "functio" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3456, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "function" + (_types.TokenType._function << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "g" + -1, -1, -1, -1, -1, 3510, -1, -1, -1, -1, -1, -1, 3564, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ge" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3537, -1, -1, -1, -1, -1, -1, + // "get" + _keywords.ContextualKeyword._get << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "gl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3591, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glo" + -1, -1, 3618, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "glob" + -1, 3645, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "globa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3672, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "global" + _keywords.ContextualKeyword._global << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "i" + -1, -1, -1, -1, -1, -1, 3726, -1, -1, -1, -1, -1, -1, 3753, 4077, -1, -1, -1, -1, 4590, -1, -1, -1, -1, -1, -1, -1, + // "if" + (_types.TokenType._if << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "im" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3780, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imp" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3807, -1, -1, 3996, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impl" + -1, -1, -1, -1, -1, 3834, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "imple" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3861, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implem" + -1, -1, -1, -1, -1, 3888, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impleme" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3915, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "implemen" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3942, -1, -1, -1, -1, -1, -1, + // "implement" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 3969, -1, -1, -1, -1, -1, -1, -1, + // "implements" + _keywords.ContextualKeyword._implements << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "impo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4023, -1, -1, -1, -1, -1, -1, -1, -1, + // "impor" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4050, -1, -1, -1, -1, -1, -1, + // "import" + (_types.TokenType._import << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "in" + (_types.TokenType._in << 1) + 1, -1, -1, -1, -1, -1, 4104, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4185, 4401, -1, -1, -1, -1, -1, -1, + // "inf" + -1, -1, -1, -1, -1, 4131, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "infe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4158, -1, -1, -1, -1, -1, -1, -1, -1, + // "infer" + _keywords.ContextualKeyword._infer << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ins" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4212, -1, -1, -1, -1, -1, -1, + // "inst" + -1, 4239, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "insta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4266, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instan" + -1, -1, -1, 4293, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanc" + -1, -1, -1, -1, -1, 4320, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instance" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4347, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceo" + -1, -1, -1, -1, -1, -1, 4374, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "instanceof" + (_types.TokenType._instanceof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "int" + -1, -1, -1, -1, -1, 4428, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "inte" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4455, -1, -1, -1, -1, -1, -1, -1, -1, + // "inter" + -1, -1, -1, -1, -1, -1, 4482, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interf" + -1, 4509, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfa" + -1, -1, -1, 4536, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interfac" + -1, -1, -1, -1, -1, 4563, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "interface" + _keywords.ContextualKeyword._interface << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "is" + _keywords.ContextualKeyword._is << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "k" + -1, -1, -1, -1, -1, 4644, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ke" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4671, -1, + // "key" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4698, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyo" + -1, -1, -1, -1, -1, -1, 4725, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "keyof" + _keywords.ContextualKeyword._keyof << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "l" + -1, -1, -1, -1, -1, 4779, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "le" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4806, -1, -1, -1, -1, -1, -1, + // "let" + (_types.TokenType._let << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "m" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4860, -1, -1, -1, -1, -1, 4995, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4887, -1, -1, + // "mix" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 4914, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4941, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mixin" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 4968, -1, -1, -1, -1, -1, -1, -1, + // "mixins" + _keywords.ContextualKeyword._mixins << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mo" + -1, -1, -1, -1, 5022, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "mod" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5049, -1, -1, -1, -1, -1, + // "modu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5076, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "modul" + -1, -1, -1, -1, -1, 5103, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "module" + _keywords.ContextualKeyword._module << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "n" + -1, 5157, -1, -1, -1, 5373, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5427, -1, -1, -1, -1, -1, + // "na" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5184, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nam" + -1, -1, -1, -1, -1, 5211, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "name" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5238, -1, -1, -1, -1, -1, -1, -1, + // "names" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5265, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namesp" + -1, 5292, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespa" + -1, -1, -1, 5319, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespac" + -1, -1, -1, -1, -1, 5346, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "namespace" + _keywords.ContextualKeyword._namespace << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ne" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5400, -1, -1, -1, + // "new" + (_types.TokenType._new << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5454, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "nul" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5481, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "null" + (_types.TokenType._null << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "o" + -1, -1, -1, -1, -1, -1, 5535, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5562, -1, -1, -1, -1, 5697, 5751, -1, -1, -1, -1, + // "of" + _keywords.ContextualKeyword._of << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "op" + -1, 5589, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5616, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5643, -1, -1, -1, -1, -1, + // "opaqu" + -1, -1, -1, -1, -1, 5670, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "opaque" + _keywords.ContextualKeyword._opaque << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ou" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5724, -1, -1, -1, -1, -1, -1, + // "out" + _keywords.ContextualKeyword._out << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ov" + -1, -1, -1, -1, -1, 5778, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ove" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5805, -1, -1, -1, -1, -1, -1, -1, -1, + // "over" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5832, -1, -1, -1, -1, -1, -1, -1, -1, + // "overr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5859, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "overri" + -1, -1, -1, -1, 5886, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "overrid" + -1, -1, -1, -1, -1, 5913, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "override" + _keywords.ContextualKeyword._override << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "p" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 5967, -1, -1, 6345, -1, -1, -1, -1, -1, + // "pr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 5994, -1, -1, -1, -1, -1, 6129, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pri" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6021, -1, -1, -1, -1, + // "priv" + -1, 6048, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "priva" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6075, -1, -1, -1, -1, -1, -1, + // "privat" + -1, -1, -1, -1, -1, 6102, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "private" + _keywords.ContextualKeyword._private << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6156, -1, -1, -1, -1, -1, -1, + // "prot" + -1, -1, -1, -1, -1, 6183, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6318, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "prote" + -1, -1, -1, 6210, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protec" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6237, -1, -1, -1, -1, -1, -1, + // "protect" + -1, -1, -1, -1, -1, 6264, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protecte" + -1, -1, -1, -1, 6291, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "protected" + _keywords.ContextualKeyword._protected << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "proto" + _keywords.ContextualKeyword._proto << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pu" + -1, -1, 6372, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "pub" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6399, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6426, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "publi" + -1, -1, -1, 6453, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "public" + _keywords.ContextualKeyword._public << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "r" + -1, -1, -1, -1, -1, 6507, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "re" + -1, 6534, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6696, -1, -1, 6831, -1, -1, -1, -1, -1, -1, + // "rea" + -1, -1, -1, -1, 6561, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "read" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6588, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "reado" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6615, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readon" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6642, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "readonl" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6669, -1, + // "readonly" + _keywords.ContextualKeyword._readonly << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "req" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6723, -1, -1, -1, -1, -1, + // "requ" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 6750, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "requi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6777, -1, -1, -1, -1, -1, -1, -1, -1, + // "requir" + -1, -1, -1, -1, -1, 6804, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "require" + _keywords.ContextualKeyword._require << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ret" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6858, -1, -1, -1, -1, -1, + // "retu" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6885, -1, -1, -1, -1, -1, -1, -1, -1, + // "retur" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6912, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "return" + (_types.TokenType._return << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "s" + -1, 6966, -1, -1, -1, 7182, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7236, 7371, -1, 7479, -1, 7614, -1, + // "sa" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6993, -1, -1, -1, -1, -1, -1, + // "sat" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7020, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sati" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7047, -1, -1, -1, -1, -1, -1, -1, + // "satis" + -1, -1, -1, -1, -1, -1, 7074, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "satisf" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7101, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "satisfi" + -1, -1, -1, -1, -1, 7128, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "satisfie" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7155, -1, -1, -1, -1, -1, -1, -1, + // "satisfies" + _keywords.ContextualKeyword._satisfies << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "se" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7209, -1, -1, -1, -1, -1, -1, + // "set" + _keywords.ContextualKeyword._set << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "st" + -1, 7263, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sta" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7290, -1, -1, -1, -1, -1, -1, + // "stat" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7317, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "stati" + -1, -1, -1, 7344, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "static" + _keywords.ContextualKeyword._static << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "su" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7398, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sup" + -1, -1, -1, -1, -1, 7425, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "supe" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7452, -1, -1, -1, -1, -1, -1, -1, -1, + // "super" + (_types.TokenType._super << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sw" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7506, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "swi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7533, -1, -1, -1, -1, -1, -1, + // "swit" + -1, -1, -1, 7560, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switc" + -1, -1, -1, -1, -1, -1, -1, -1, 7587, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "switch" + (_types.TokenType._switch << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sy" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7641, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "sym" + -1, -1, 7668, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "symb" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7695, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "symbo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7722, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "symbol" + _keywords.ContextualKeyword._symbol << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "t" + -1, -1, -1, -1, -1, -1, -1, -1, 7776, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7938, -1, -1, -1, -1, -1, -1, 8046, -1, + // "th" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 7803, -1, -1, -1, -1, -1, -1, -1, -1, 7857, -1, -1, -1, -1, -1, -1, -1, -1, + // "thi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7830, -1, -1, -1, -1, -1, -1, -1, + // "this" + (_types.TokenType._this << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7884, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "thro" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7911, -1, -1, -1, + // "throw" + (_types.TokenType._throw << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "tr" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7965, -1, -1, -1, 8019, -1, + // "tru" + -1, -1, -1, -1, -1, 7992, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "true" + (_types.TokenType._true << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "try" + (_types.TokenType._try << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "ty" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8073, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typ" + -1, -1, -1, -1, -1, 8100, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "type" + _keywords.ContextualKeyword._type << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8127, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeo" + -1, -1, -1, -1, -1, -1, 8154, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "typeof" + (_types.TokenType._typeof << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "u" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8208, -1, -1, -1, -1, 8343, -1, -1, -1, -1, -1, -1, -1, + // "un" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8235, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uni" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8262, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "uniq" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8289, -1, -1, -1, -1, -1, + // "uniqu" + -1, -1, -1, -1, -1, 8316, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "unique" + _keywords.ContextualKeyword._unique << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "us" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8370, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "usi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8397, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "usin" + -1, -1, -1, -1, -1, -1, -1, 8424, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "using" + _keywords.ContextualKeyword._using << 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "v" + -1, 8478, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8532, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "va" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8505, -1, -1, -1, -1, -1, -1, -1, -1, + // "var" + (_types.TokenType._var << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "vo" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8559, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "voi" + -1, -1, -1, -1, 8586, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "void" + (_types.TokenType._void << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "w" + -1, -1, -1, -1, -1, -1, -1, -1, 8640, 8748, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wh" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8667, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8694, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "whil" + -1, -1, -1, -1, -1, 8721, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "while" + (_types.TokenType._while << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "wi" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8775, -1, -1, -1, -1, -1, -1, + // "wit" + -1, -1, -1, -1, -1, -1, -1, -1, 8802, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "with" + (_types.TokenType._with << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "y" + -1, -1, -1, -1, -1, -1, -1, -1, -1, 8856, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yi" + -1, -1, -1, -1, -1, 8883, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yie" + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 8910, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yiel" + -1, -1, -1, -1, 8937, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + // "yield" + (_types.TokenType._yield << 1) + 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]); exports.READ_WORD_TREE = READ_WORD_TREE; diff --git a/node_modules/sucrase/dist/parser/tokenizer/state.js b/node_modules/sucrase/dist/parser/tokenizer/state.js new file mode 100644 index 0000000..359e1b4 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/state.js @@ -0,0 +1,106 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); +var _keywords = require('./keywords'); +var _types = require('./types'); + + class Scope { + + + + + constructor(startTokenIndex, endTokenIndex, isFunctionScope) { + this.startTokenIndex = startTokenIndex; + this.endTokenIndex = endTokenIndex; + this.isFunctionScope = isFunctionScope; + } +} exports.Scope = Scope; + + class StateSnapshot { + constructor( + potentialArrowAt, + noAnonFunctionType, + inDisallowConditionalTypesContext, + tokensLength, + scopesLength, + pos, + type, + contextualKeyword, + start, + end, + isType, + scopeDepth, + error, + ) {;this.potentialArrowAt = potentialArrowAt;this.noAnonFunctionType = noAnonFunctionType;this.inDisallowConditionalTypesContext = inDisallowConditionalTypesContext;this.tokensLength = tokensLength;this.scopesLength = scopesLength;this.pos = pos;this.type = type;this.contextualKeyword = contextualKeyword;this.start = start;this.end = end;this.isType = isType;this.scopeDepth = scopeDepth;this.error = error;} +} exports.StateSnapshot = StateSnapshot; + + class State {constructor() { State.prototype.__init.call(this);State.prototype.__init2.call(this);State.prototype.__init3.call(this);State.prototype.__init4.call(this);State.prototype.__init5.call(this);State.prototype.__init6.call(this);State.prototype.__init7.call(this);State.prototype.__init8.call(this);State.prototype.__init9.call(this);State.prototype.__init10.call(this);State.prototype.__init11.call(this);State.prototype.__init12.call(this);State.prototype.__init13.call(this); } + // Used to signify the start of a potential arrow function + __init() {this.potentialArrowAt = -1} + + // Used by Flow to handle an edge case involving function type parsing. + __init2() {this.noAnonFunctionType = false} + + // Used by TypeScript to handle ambiguities when parsing conditional types. + __init3() {this.inDisallowConditionalTypesContext = false} + + // Token store. + __init4() {this.tokens = []} + + // Array of all observed scopes, ordered by their ending position. + __init5() {this.scopes = []} + + // The current position of the tokenizer in the input. + __init6() {this.pos = 0} + + // Information about the current token. + __init7() {this.type = _types.TokenType.eof} + __init8() {this.contextualKeyword = _keywords.ContextualKeyword.NONE} + __init9() {this.start = 0} + __init10() {this.end = 0} + + __init11() {this.isType = false} + __init12() {this.scopeDepth = 0} + + /** + * If the parser is in an error state, then the token is always tt.eof and all functions can + * keep executing but should be written so they don't get into an infinite loop in this situation. + * + * This approach, combined with the ability to snapshot and restore state, allows us to implement + * backtracking without exceptions and without needing to explicitly propagate error states + * everywhere. + */ + __init13() {this.error = null} + + snapshot() { + return new StateSnapshot( + this.potentialArrowAt, + this.noAnonFunctionType, + this.inDisallowConditionalTypesContext, + this.tokens.length, + this.scopes.length, + this.pos, + this.type, + this.contextualKeyword, + this.start, + this.end, + this.isType, + this.scopeDepth, + this.error, + ); + } + + restoreFromSnapshot(snapshot) { + this.potentialArrowAt = snapshot.potentialArrowAt; + this.noAnonFunctionType = snapshot.noAnonFunctionType; + this.inDisallowConditionalTypesContext = snapshot.inDisallowConditionalTypesContext; + this.tokens.length = snapshot.tokensLength; + this.scopes.length = snapshot.scopesLength; + this.pos = snapshot.pos; + this.type = snapshot.type; + this.contextualKeyword = snapshot.contextualKeyword; + this.start = snapshot.start; + this.end = snapshot.end; + this.isType = snapshot.isType; + this.scopeDepth = snapshot.scopeDepth; + this.error = snapshot.error; + } +} exports.default = State; diff --git a/node_modules/sucrase/dist/parser/tokenizer/types.js b/node_modules/sucrase/dist/parser/tokenizer/types.js new file mode 100644 index 0000000..c1f2a81 --- /dev/null +++ b/node_modules/sucrase/dist/parser/tokenizer/types.js @@ -0,0 +1,361 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});// Generated file, do not edit! Run "yarn generate" to re-generate this file. +/* istanbul ignore file */ +/** + * Enum of all token types, with bit fields to signify meaningful properties. + */ +var TokenType; (function (TokenType) { + // Precedence 0 means not an operator; otherwise it is a positive number up to 12. + const PRECEDENCE_MASK = 0xf; TokenType[TokenType["PRECEDENCE_MASK"] = PRECEDENCE_MASK] = "PRECEDENCE_MASK"; + const IS_KEYWORD = 1 << 4; TokenType[TokenType["IS_KEYWORD"] = IS_KEYWORD] = "IS_KEYWORD"; + const IS_ASSIGN = 1 << 5; TokenType[TokenType["IS_ASSIGN"] = IS_ASSIGN] = "IS_ASSIGN"; + const IS_RIGHT_ASSOCIATIVE = 1 << 6; TokenType[TokenType["IS_RIGHT_ASSOCIATIVE"] = IS_RIGHT_ASSOCIATIVE] = "IS_RIGHT_ASSOCIATIVE"; + const IS_PREFIX = 1 << 7; TokenType[TokenType["IS_PREFIX"] = IS_PREFIX] = "IS_PREFIX"; + const IS_POSTFIX = 1 << 8; TokenType[TokenType["IS_POSTFIX"] = IS_POSTFIX] = "IS_POSTFIX"; + const IS_EXPRESSION_START = 1 << 9; TokenType[TokenType["IS_EXPRESSION_START"] = IS_EXPRESSION_START] = "IS_EXPRESSION_START"; + + const num = 512; TokenType[TokenType["num"] = num] = "num"; // num startsExpr + const bigint = 1536; TokenType[TokenType["bigint"] = bigint] = "bigint"; // bigint startsExpr + const decimal = 2560; TokenType[TokenType["decimal"] = decimal] = "decimal"; // decimal startsExpr + const regexp = 3584; TokenType[TokenType["regexp"] = regexp] = "regexp"; // regexp startsExpr + const string = 4608; TokenType[TokenType["string"] = string] = "string"; // string startsExpr + const name = 5632; TokenType[TokenType["name"] = name] = "name"; // name startsExpr + const eof = 6144; TokenType[TokenType["eof"] = eof] = "eof"; // eof + const bracketL = 7680; TokenType[TokenType["bracketL"] = bracketL] = "bracketL"; // [ startsExpr + const bracketR = 8192; TokenType[TokenType["bracketR"] = bracketR] = "bracketR"; // ] + const braceL = 9728; TokenType[TokenType["braceL"] = braceL] = "braceL"; // { startsExpr + const braceBarL = 10752; TokenType[TokenType["braceBarL"] = braceBarL] = "braceBarL"; // {| startsExpr + const braceR = 11264; TokenType[TokenType["braceR"] = braceR] = "braceR"; // } + const braceBarR = 12288; TokenType[TokenType["braceBarR"] = braceBarR] = "braceBarR"; // |} + const parenL = 13824; TokenType[TokenType["parenL"] = parenL] = "parenL"; // ( startsExpr + const parenR = 14336; TokenType[TokenType["parenR"] = parenR] = "parenR"; // ) + const comma = 15360; TokenType[TokenType["comma"] = comma] = "comma"; // , + const semi = 16384; TokenType[TokenType["semi"] = semi] = "semi"; // ; + const colon = 17408; TokenType[TokenType["colon"] = colon] = "colon"; // : + const doubleColon = 18432; TokenType[TokenType["doubleColon"] = doubleColon] = "doubleColon"; // :: + const dot = 19456; TokenType[TokenType["dot"] = dot] = "dot"; // . + const question = 20480; TokenType[TokenType["question"] = question] = "question"; // ? + const questionDot = 21504; TokenType[TokenType["questionDot"] = questionDot] = "questionDot"; // ?. + const arrow = 22528; TokenType[TokenType["arrow"] = arrow] = "arrow"; // => + const template = 23552; TokenType[TokenType["template"] = template] = "template"; // template + const ellipsis = 24576; TokenType[TokenType["ellipsis"] = ellipsis] = "ellipsis"; // ... + const backQuote = 25600; TokenType[TokenType["backQuote"] = backQuote] = "backQuote"; // ` + const dollarBraceL = 27136; TokenType[TokenType["dollarBraceL"] = dollarBraceL] = "dollarBraceL"; // ${ startsExpr + const at = 27648; TokenType[TokenType["at"] = at] = "at"; // @ + const hash = 29184; TokenType[TokenType["hash"] = hash] = "hash"; // # startsExpr + const eq = 29728; TokenType[TokenType["eq"] = eq] = "eq"; // = isAssign + const assign = 30752; TokenType[TokenType["assign"] = assign] = "assign"; // _= isAssign + const preIncDec = 32640; TokenType[TokenType["preIncDec"] = preIncDec] = "preIncDec"; // ++/-- prefix postfix startsExpr + const postIncDec = 33664; TokenType[TokenType["postIncDec"] = postIncDec] = "postIncDec"; // ++/-- prefix postfix startsExpr + const bang = 34432; TokenType[TokenType["bang"] = bang] = "bang"; // ! prefix startsExpr + const tilde = 35456; TokenType[TokenType["tilde"] = tilde] = "tilde"; // ~ prefix startsExpr + const pipeline = 35841; TokenType[TokenType["pipeline"] = pipeline] = "pipeline"; // |> prec:1 + const nullishCoalescing = 36866; TokenType[TokenType["nullishCoalescing"] = nullishCoalescing] = "nullishCoalescing"; // ?? prec:2 + const logicalOR = 37890; TokenType[TokenType["logicalOR"] = logicalOR] = "logicalOR"; // || prec:2 + const logicalAND = 38915; TokenType[TokenType["logicalAND"] = logicalAND] = "logicalAND"; // && prec:3 + const bitwiseOR = 39940; TokenType[TokenType["bitwiseOR"] = bitwiseOR] = "bitwiseOR"; // | prec:4 + const bitwiseXOR = 40965; TokenType[TokenType["bitwiseXOR"] = bitwiseXOR] = "bitwiseXOR"; // ^ prec:5 + const bitwiseAND = 41990; TokenType[TokenType["bitwiseAND"] = bitwiseAND] = "bitwiseAND"; // & prec:6 + const equality = 43015; TokenType[TokenType["equality"] = equality] = "equality"; // ==/!= prec:7 + const lessThan = 44040; TokenType[TokenType["lessThan"] = lessThan] = "lessThan"; // < prec:8 + const greaterThan = 45064; TokenType[TokenType["greaterThan"] = greaterThan] = "greaterThan"; // > prec:8 + const relationalOrEqual = 46088; TokenType[TokenType["relationalOrEqual"] = relationalOrEqual] = "relationalOrEqual"; // <=/>= prec:8 + const bitShiftL = 47113; TokenType[TokenType["bitShiftL"] = bitShiftL] = "bitShiftL"; // << prec:9 + const bitShiftR = 48137; TokenType[TokenType["bitShiftR"] = bitShiftR] = "bitShiftR"; // >>/>>> prec:9 + const plus = 49802; TokenType[TokenType["plus"] = plus] = "plus"; // + prec:10 prefix startsExpr + const minus = 50826; TokenType[TokenType["minus"] = minus] = "minus"; // - prec:10 prefix startsExpr + const modulo = 51723; TokenType[TokenType["modulo"] = modulo] = "modulo"; // % prec:11 startsExpr + const star = 52235; TokenType[TokenType["star"] = star] = "star"; // * prec:11 + const slash = 53259; TokenType[TokenType["slash"] = slash] = "slash"; // / prec:11 + const exponent = 54348; TokenType[TokenType["exponent"] = exponent] = "exponent"; // ** prec:12 rightAssociative + const jsxName = 55296; TokenType[TokenType["jsxName"] = jsxName] = "jsxName"; // jsxName + const jsxText = 56320; TokenType[TokenType["jsxText"] = jsxText] = "jsxText"; // jsxText + const jsxEmptyText = 57344; TokenType[TokenType["jsxEmptyText"] = jsxEmptyText] = "jsxEmptyText"; // jsxEmptyText + const jsxTagStart = 58880; TokenType[TokenType["jsxTagStart"] = jsxTagStart] = "jsxTagStart"; // jsxTagStart startsExpr + const jsxTagEnd = 59392; TokenType[TokenType["jsxTagEnd"] = jsxTagEnd] = "jsxTagEnd"; // jsxTagEnd + const typeParameterStart = 60928; TokenType[TokenType["typeParameterStart"] = typeParameterStart] = "typeParameterStart"; // typeParameterStart startsExpr + const nonNullAssertion = 61440; TokenType[TokenType["nonNullAssertion"] = nonNullAssertion] = "nonNullAssertion"; // nonNullAssertion + const _break = 62480; TokenType[TokenType["_break"] = _break] = "_break"; // break keyword + const _case = 63504; TokenType[TokenType["_case"] = _case] = "_case"; // case keyword + const _catch = 64528; TokenType[TokenType["_catch"] = _catch] = "_catch"; // catch keyword + const _continue = 65552; TokenType[TokenType["_continue"] = _continue] = "_continue"; // continue keyword + const _debugger = 66576; TokenType[TokenType["_debugger"] = _debugger] = "_debugger"; // debugger keyword + const _default = 67600; TokenType[TokenType["_default"] = _default] = "_default"; // default keyword + const _do = 68624; TokenType[TokenType["_do"] = _do] = "_do"; // do keyword + const _else = 69648; TokenType[TokenType["_else"] = _else] = "_else"; // else keyword + const _finally = 70672; TokenType[TokenType["_finally"] = _finally] = "_finally"; // finally keyword + const _for = 71696; TokenType[TokenType["_for"] = _for] = "_for"; // for keyword + const _function = 73232; TokenType[TokenType["_function"] = _function] = "_function"; // function keyword startsExpr + const _if = 73744; TokenType[TokenType["_if"] = _if] = "_if"; // if keyword + const _return = 74768; TokenType[TokenType["_return"] = _return] = "_return"; // return keyword + const _switch = 75792; TokenType[TokenType["_switch"] = _switch] = "_switch"; // switch keyword + const _throw = 77456; TokenType[TokenType["_throw"] = _throw] = "_throw"; // throw keyword prefix startsExpr + const _try = 77840; TokenType[TokenType["_try"] = _try] = "_try"; // try keyword + const _var = 78864; TokenType[TokenType["_var"] = _var] = "_var"; // var keyword + const _let = 79888; TokenType[TokenType["_let"] = _let] = "_let"; // let keyword + const _const = 80912; TokenType[TokenType["_const"] = _const] = "_const"; // const keyword + const _while = 81936; TokenType[TokenType["_while"] = _while] = "_while"; // while keyword + const _with = 82960; TokenType[TokenType["_with"] = _with] = "_with"; // with keyword + const _new = 84496; TokenType[TokenType["_new"] = _new] = "_new"; // new keyword startsExpr + const _this = 85520; TokenType[TokenType["_this"] = _this] = "_this"; // this keyword startsExpr + const _super = 86544; TokenType[TokenType["_super"] = _super] = "_super"; // super keyword startsExpr + const _class = 87568; TokenType[TokenType["_class"] = _class] = "_class"; // class keyword startsExpr + const _extends = 88080; TokenType[TokenType["_extends"] = _extends] = "_extends"; // extends keyword + const _export = 89104; TokenType[TokenType["_export"] = _export] = "_export"; // export keyword + const _import = 90640; TokenType[TokenType["_import"] = _import] = "_import"; // import keyword startsExpr + const _yield = 91664; TokenType[TokenType["_yield"] = _yield] = "_yield"; // yield keyword startsExpr + const _null = 92688; TokenType[TokenType["_null"] = _null] = "_null"; // null keyword startsExpr + const _true = 93712; TokenType[TokenType["_true"] = _true] = "_true"; // true keyword startsExpr + const _false = 94736; TokenType[TokenType["_false"] = _false] = "_false"; // false keyword startsExpr + const _in = 95256; TokenType[TokenType["_in"] = _in] = "_in"; // in prec:8 keyword + const _instanceof = 96280; TokenType[TokenType["_instanceof"] = _instanceof] = "_instanceof"; // instanceof prec:8 keyword + const _typeof = 97936; TokenType[TokenType["_typeof"] = _typeof] = "_typeof"; // typeof keyword prefix startsExpr + const _void = 98960; TokenType[TokenType["_void"] = _void] = "_void"; // void keyword prefix startsExpr + const _delete = 99984; TokenType[TokenType["_delete"] = _delete] = "_delete"; // delete keyword prefix startsExpr + const _async = 100880; TokenType[TokenType["_async"] = _async] = "_async"; // async keyword startsExpr + const _get = 101904; TokenType[TokenType["_get"] = _get] = "_get"; // get keyword startsExpr + const _set = 102928; TokenType[TokenType["_set"] = _set] = "_set"; // set keyword startsExpr + const _declare = 103952; TokenType[TokenType["_declare"] = _declare] = "_declare"; // declare keyword startsExpr + const _readonly = 104976; TokenType[TokenType["_readonly"] = _readonly] = "_readonly"; // readonly keyword startsExpr + const _abstract = 106000; TokenType[TokenType["_abstract"] = _abstract] = "_abstract"; // abstract keyword startsExpr + const _static = 107024; TokenType[TokenType["_static"] = _static] = "_static"; // static keyword startsExpr + const _public = 107536; TokenType[TokenType["_public"] = _public] = "_public"; // public keyword + const _private = 108560; TokenType[TokenType["_private"] = _private] = "_private"; // private keyword + const _protected = 109584; TokenType[TokenType["_protected"] = _protected] = "_protected"; // protected keyword + const _override = 110608; TokenType[TokenType["_override"] = _override] = "_override"; // override keyword + const _as = 112144; TokenType[TokenType["_as"] = _as] = "_as"; // as keyword startsExpr + const _enum = 113168; TokenType[TokenType["_enum"] = _enum] = "_enum"; // enum keyword startsExpr + const _type = 114192; TokenType[TokenType["_type"] = _type] = "_type"; // type keyword startsExpr + const _implements = 115216; TokenType[TokenType["_implements"] = _implements] = "_implements"; // implements keyword startsExpr +})(TokenType || (exports.TokenType = TokenType = {})); + function formatTokenType(tokenType) { + switch (tokenType) { + case TokenType.num: + return "num"; + case TokenType.bigint: + return "bigint"; + case TokenType.decimal: + return "decimal"; + case TokenType.regexp: + return "regexp"; + case TokenType.string: + return "string"; + case TokenType.name: + return "name"; + case TokenType.eof: + return "eof"; + case TokenType.bracketL: + return "["; + case TokenType.bracketR: + return "]"; + case TokenType.braceL: + return "{"; + case TokenType.braceBarL: + return "{|"; + case TokenType.braceR: + return "}"; + case TokenType.braceBarR: + return "|}"; + case TokenType.parenL: + return "("; + case TokenType.parenR: + return ")"; + case TokenType.comma: + return ","; + case TokenType.semi: + return ";"; + case TokenType.colon: + return ":"; + case TokenType.doubleColon: + return "::"; + case TokenType.dot: + return "."; + case TokenType.question: + return "?"; + case TokenType.questionDot: + return "?."; + case TokenType.arrow: + return "=>"; + case TokenType.template: + return "template"; + case TokenType.ellipsis: + return "..."; + case TokenType.backQuote: + return "`"; + case TokenType.dollarBraceL: + return "${"; + case TokenType.at: + return "@"; + case TokenType.hash: + return "#"; + case TokenType.eq: + return "="; + case TokenType.assign: + return "_="; + case TokenType.preIncDec: + return "++/--"; + case TokenType.postIncDec: + return "++/--"; + case TokenType.bang: + return "!"; + case TokenType.tilde: + return "~"; + case TokenType.pipeline: + return "|>"; + case TokenType.nullishCoalescing: + return "??"; + case TokenType.logicalOR: + return "||"; + case TokenType.logicalAND: + return "&&"; + case TokenType.bitwiseOR: + return "|"; + case TokenType.bitwiseXOR: + return "^"; + case TokenType.bitwiseAND: + return "&"; + case TokenType.equality: + return "==/!="; + case TokenType.lessThan: + return "<"; + case TokenType.greaterThan: + return ">"; + case TokenType.relationalOrEqual: + return "<=/>="; + case TokenType.bitShiftL: + return "<<"; + case TokenType.bitShiftR: + return ">>/>>>"; + case TokenType.plus: + return "+"; + case TokenType.minus: + return "-"; + case TokenType.modulo: + return "%"; + case TokenType.star: + return "*"; + case TokenType.slash: + return "/"; + case TokenType.exponent: + return "**"; + case TokenType.jsxName: + return "jsxName"; + case TokenType.jsxText: + return "jsxText"; + case TokenType.jsxEmptyText: + return "jsxEmptyText"; + case TokenType.jsxTagStart: + return "jsxTagStart"; + case TokenType.jsxTagEnd: + return "jsxTagEnd"; + case TokenType.typeParameterStart: + return "typeParameterStart"; + case TokenType.nonNullAssertion: + return "nonNullAssertion"; + case TokenType._break: + return "break"; + case TokenType._case: + return "case"; + case TokenType._catch: + return "catch"; + case TokenType._continue: + return "continue"; + case TokenType._debugger: + return "debugger"; + case TokenType._default: + return "default"; + case TokenType._do: + return "do"; + case TokenType._else: + return "else"; + case TokenType._finally: + return "finally"; + case TokenType._for: + return "for"; + case TokenType._function: + return "function"; + case TokenType._if: + return "if"; + case TokenType._return: + return "return"; + case TokenType._switch: + return "switch"; + case TokenType._throw: + return "throw"; + case TokenType._try: + return "try"; + case TokenType._var: + return "var"; + case TokenType._let: + return "let"; + case TokenType._const: + return "const"; + case TokenType._while: + return "while"; + case TokenType._with: + return "with"; + case TokenType._new: + return "new"; + case TokenType._this: + return "this"; + case TokenType._super: + return "super"; + case TokenType._class: + return "class"; + case TokenType._extends: + return "extends"; + case TokenType._export: + return "export"; + case TokenType._import: + return "import"; + case TokenType._yield: + return "yield"; + case TokenType._null: + return "null"; + case TokenType._true: + return "true"; + case TokenType._false: + return "false"; + case TokenType._in: + return "in"; + case TokenType._instanceof: + return "instanceof"; + case TokenType._typeof: + return "typeof"; + case TokenType._void: + return "void"; + case TokenType._delete: + return "delete"; + case TokenType._async: + return "async"; + case TokenType._get: + return "get"; + case TokenType._set: + return "set"; + case TokenType._declare: + return "declare"; + case TokenType._readonly: + return "readonly"; + case TokenType._abstract: + return "abstract"; + case TokenType._static: + return "static"; + case TokenType._public: + return "public"; + case TokenType._private: + return "private"; + case TokenType._protected: + return "protected"; + case TokenType._override: + return "override"; + case TokenType._as: + return "as"; + case TokenType._enum: + return "enum"; + case TokenType._type: + return "type"; + case TokenType._implements: + return "implements"; + default: + return ""; + } +} exports.formatTokenType = formatTokenType; diff --git a/node_modules/sucrase/dist/parser/traverser/base.js b/node_modules/sucrase/dist/parser/traverser/base.js new file mode 100644 index 0000000..85c9c17 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/base.js @@ -0,0 +1,60 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _state = require('../tokenizer/state'); var _state2 = _interopRequireDefault(_state); +var _charcodes = require('../util/charcodes'); + + exports.isJSXEnabled; + exports.isTypeScriptEnabled; + exports.isFlowEnabled; + exports.state; + exports.input; + exports.nextContextId; + + function getNextContextId() { + return exports.nextContextId++; +} exports.getNextContextId = getNextContextId; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any + function augmentError(error) { + if ("pos" in error) { + const loc = locationForIndex(error.pos); + error.message += ` (${loc.line}:${loc.column})`; + error.loc = loc; + } + return error; +} exports.augmentError = augmentError; + + class Loc { + + + constructor(line, column) { + this.line = line; + this.column = column; + } +} exports.Loc = Loc; + + function locationForIndex(pos) { + let line = 1; + let column = 1; + for (let i = 0; i < pos; i++) { + if (exports.input.charCodeAt(i) === _charcodes.charCodes.lineFeed) { + line++; + column = 1; + } else { + column++; + } + } + return new Loc(line, column); +} exports.locationForIndex = locationForIndex; + + function initParser( + inputCode, + isJSXEnabledArg, + isTypeScriptEnabledArg, + isFlowEnabledArg, +) { + exports.input = inputCode; + exports.state = new (0, _state2.default)(); + exports.nextContextId = 1; + exports.isJSXEnabled = isJSXEnabledArg; + exports.isTypeScriptEnabled = isTypeScriptEnabledArg; + exports.isFlowEnabled = isFlowEnabledArg; +} exports.initParser = initParser; diff --git a/node_modules/sucrase/dist/parser/traverser/expression.js b/node_modules/sucrase/dist/parser/traverser/expression.js new file mode 100644 index 0000000..d0011e3 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/expression.js @@ -0,0 +1,1022 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});/* eslint max-len: 0 */ + +// A recursive descent parser operates by defining functions for all +// syntactic elements, and recursively calling those, each function +// advancing the input stream and returning an AST node. Precedence +// of constructs (for example, the fact that `!x[1]` means `!(x[1])` +// instead of `(!x)[1]` is handled by the fact that the parser +// function that parses unary prefix operators is called first, and +// in turn calls the function that parses `[]` subscripts — that +// way, it'll receive the node for `x[1]` already parsed, and wraps +// *that* in the unary operator node. +// +// Acorn uses an [operator precedence parser][opp] to handle binary +// operator precedence, because it is much more compact than using +// the technique outlined above, which uses different, nesting +// functions to specify precedence, for all of the ten binary +// precedence levels that JavaScript defines. +// +// [opp]: http://en.wikipedia.org/wiki/Operator-precedence_parser + + + + + + + + + + + +var _flow = require('../plugins/flow'); +var _index = require('../plugins/jsx/index'); +var _types = require('../plugins/types'); + + + + + + + + + +var _typescript = require('../plugins/typescript'); + + + + + + + + + + + + +var _index3 = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _state = require('../tokenizer/state'); +var _types3 = require('../tokenizer/types'); +var _charcodes = require('../util/charcodes'); +var _identifier = require('../util/identifier'); +var _base = require('./base'); + + + + + + +var _lval = require('./lval'); + + + + + + + +var _statement = require('./statement'); + + + + + + + + + +var _util = require('./util'); + + class StopState { + + constructor(stop) { + this.stop = stop; + } +} exports.StopState = StopState; + +// ### Expression parsing + +// These nest, from the most general expression type at the top to +// 'atomic', nondivisible expression types at the bottom. Most of +// the functions will simply let the function (s) below them parse, +// and, *if* the syntactic construct they handle is present, wrap +// the AST node that the inner parser gave them in another node. + function parseExpression(noIn = false) { + parseMaybeAssign(noIn); + if (_index3.match.call(void 0, _types3.TokenType.comma)) { + while (_index3.eat.call(void 0, _types3.TokenType.comma)) { + parseMaybeAssign(noIn); + } + } +} exports.parseExpression = parseExpression; + +/** + * noIn is used when parsing a for loop so that we don't interpret a following "in" as the binary + * operatior. + * isWithinParens is used to indicate that we're parsing something that might be a comma expression + * or might be an arrow function or might be a Flow type assertion (which requires explicit parens). + * In these cases, we should allow : and ?: after the initial "left" part. + */ + function parseMaybeAssign(noIn = false, isWithinParens = false) { + if (_base.isTypeScriptEnabled) { + return _typescript.tsParseMaybeAssign.call(void 0, noIn, isWithinParens); + } else if (_base.isFlowEnabled) { + return _flow.flowParseMaybeAssign.call(void 0, noIn, isWithinParens); + } else { + return baseParseMaybeAssign(noIn, isWithinParens); + } +} exports.parseMaybeAssign = parseMaybeAssign; + +// Parse an assignment expression. This includes applications of +// operators like `+=`. +// Returns true if the expression was an arrow function. + function baseParseMaybeAssign(noIn, isWithinParens) { + if (_index3.match.call(void 0, _types3.TokenType._yield)) { + parseYield(); + return false; + } + + if (_index3.match.call(void 0, _types3.TokenType.parenL) || _index3.match.call(void 0, _types3.TokenType.name) || _index3.match.call(void 0, _types3.TokenType._yield)) { + _base.state.potentialArrowAt = _base.state.start; + } + + const wasArrow = parseMaybeConditional(noIn); + if (isWithinParens) { + parseParenItem(); + } + if (_base.state.type & _types3.TokenType.IS_ASSIGN) { + _index3.next.call(void 0, ); + parseMaybeAssign(noIn); + return false; + } + return wasArrow; +} exports.baseParseMaybeAssign = baseParseMaybeAssign; + +// Parse a ternary conditional (`?:`) operator. +// Returns true if the expression was an arrow function. +function parseMaybeConditional(noIn) { + const wasArrow = parseExprOps(noIn); + if (wasArrow) { + return true; + } + parseConditional(noIn); + return false; +} + +function parseConditional(noIn) { + if (_base.isTypeScriptEnabled || _base.isFlowEnabled) { + _types.typedParseConditional.call(void 0, noIn); + } else { + baseParseConditional(noIn); + } +} + + function baseParseConditional(noIn) { + if (_index3.eat.call(void 0, _types3.TokenType.question)) { + parseMaybeAssign(); + _util.expect.call(void 0, _types3.TokenType.colon); + parseMaybeAssign(noIn); + } +} exports.baseParseConditional = baseParseConditional; + +// Start the precedence parser. +// Returns true if this was an arrow function +function parseExprOps(noIn) { + const startTokenIndex = _base.state.tokens.length; + const wasArrow = parseMaybeUnary(); + if (wasArrow) { + return true; + } + parseExprOp(startTokenIndex, -1, noIn); + return false; +} + +// Parse binary operators with the operator precedence parsing +// algorithm. `left` is the left-hand side of the operator. +// `minPrec` provides context that allows the function to stop and +// defer further parser to one of its callers when it encounters an +// operator that has a lower precedence than the set it is parsing. +function parseExprOp(startTokenIndex, minPrec, noIn) { + if ( + _base.isTypeScriptEnabled && + (_types3.TokenType._in & _types3.TokenType.PRECEDENCE_MASK) > minPrec && + !_util.hasPrecedingLineBreak.call(void 0, ) && + (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as) || _util.eatContextual.call(void 0, _keywords.ContextualKeyword._satisfies)) + ) { + const oldIsType = _index3.pushTypeContext.call(void 0, 1); + _typescript.tsParseType.call(void 0, ); + _index3.popTypeContext.call(void 0, oldIsType); + _index3.rescan_gt.call(void 0, ); + parseExprOp(startTokenIndex, minPrec, noIn); + return; + } + + const prec = _base.state.type & _types3.TokenType.PRECEDENCE_MASK; + if (prec > 0 && (!noIn || !_index3.match.call(void 0, _types3.TokenType._in))) { + if (prec > minPrec) { + const op = _base.state.type; + _index3.next.call(void 0, ); + if (op === _types3.TokenType.nullishCoalescing) { + _base.state.tokens[_base.state.tokens.length - 1].nullishStartIndex = startTokenIndex; + } + + const rhsStartTokenIndex = _base.state.tokens.length; + parseMaybeUnary(); + // Extend the right operand of this operator if possible. + parseExprOp(rhsStartTokenIndex, op & _types3.TokenType.IS_RIGHT_ASSOCIATIVE ? prec - 1 : prec, noIn); + if (op === _types3.TokenType.nullishCoalescing) { + _base.state.tokens[startTokenIndex].numNullishCoalesceStarts++; + _base.state.tokens[_base.state.tokens.length - 1].numNullishCoalesceEnds++; + } + // Continue with any future operator holding this expression as the left operand. + parseExprOp(startTokenIndex, minPrec, noIn); + } + } +} + +// Parse unary operators, both prefix and postfix. +// Returns true if this was an arrow function. + function parseMaybeUnary() { + if (_base.isTypeScriptEnabled && !_base.isJSXEnabled && _index3.eat.call(void 0, _types3.TokenType.lessThan)) { + _typescript.tsParseTypeAssertion.call(void 0, ); + return false; + } + if ( + _util.isContextual.call(void 0, _keywords.ContextualKeyword._module) && + _index3.lookaheadCharCode.call(void 0, ) === _charcodes.charCodes.leftCurlyBrace && + !_util.hasFollowingLineBreak.call(void 0, ) + ) { + parseModuleExpression(); + return false; + } + if (_base.state.type & _types3.TokenType.IS_PREFIX) { + _index3.next.call(void 0, ); + parseMaybeUnary(); + return false; + } + + const wasArrow = parseExprSubscripts(); + if (wasArrow) { + return true; + } + while (_base.state.type & _types3.TokenType.IS_POSTFIX && !_util.canInsertSemicolon.call(void 0, )) { + // The tokenizer calls everything a preincrement, so make it a postincrement when + // we see it in that context. + if (_base.state.type === _types3.TokenType.preIncDec) { + _base.state.type = _types3.TokenType.postIncDec; + } + _index3.next.call(void 0, ); + } + return false; +} exports.parseMaybeUnary = parseMaybeUnary; + +// Parse call, dot, and `[]`-subscript expressions. +// Returns true if this was an arrow function. + function parseExprSubscripts() { + const startTokenIndex = _base.state.tokens.length; + const wasArrow = parseExprAtom(); + if (wasArrow) { + return true; + } + parseSubscripts(startTokenIndex); + // If there was any optional chain operation, the start token would be marked + // as such, so also mark the end now. + if (_base.state.tokens.length > startTokenIndex && _base.state.tokens[startTokenIndex].isOptionalChainStart) { + _base.state.tokens[_base.state.tokens.length - 1].isOptionalChainEnd = true; + } + return false; +} exports.parseExprSubscripts = parseExprSubscripts; + +function parseSubscripts(startTokenIndex, noCalls = false) { + if (_base.isFlowEnabled) { + _flow.flowParseSubscripts.call(void 0, startTokenIndex, noCalls); + } else { + baseParseSubscripts(startTokenIndex, noCalls); + } +} + + function baseParseSubscripts(startTokenIndex, noCalls = false) { + const stopState = new StopState(false); + do { + parseSubscript(startTokenIndex, noCalls, stopState); + } while (!stopState.stop && !_base.state.error); +} exports.baseParseSubscripts = baseParseSubscripts; + +function parseSubscript(startTokenIndex, noCalls, stopState) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseSubscript.call(void 0, startTokenIndex, noCalls, stopState); + } else if (_base.isFlowEnabled) { + _flow.flowParseSubscript.call(void 0, startTokenIndex, noCalls, stopState); + } else { + baseParseSubscript(startTokenIndex, noCalls, stopState); + } +} + +/** Set 'state.stop = true' to indicate that we should stop parsing subscripts. */ + function baseParseSubscript( + startTokenIndex, + noCalls, + stopState, +) { + if (!noCalls && _index3.eat.call(void 0, _types3.TokenType.doubleColon)) { + parseNoCallExpr(); + stopState.stop = true; + // Propagate startTokenIndex so that `a::b?.()` will keep `a` as the first token. We may want + // to revisit this in the future when fully supporting bind syntax. + parseSubscripts(startTokenIndex, noCalls); + } else if (_index3.match.call(void 0, _types3.TokenType.questionDot)) { + _base.state.tokens[startTokenIndex].isOptionalChainStart = true; + if (noCalls && _index3.lookaheadType.call(void 0, ) === _types3.TokenType.parenL) { + stopState.stop = true; + return; + } + _index3.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + + if (_index3.eat.call(void 0, _types3.TokenType.bracketL)) { + parseExpression(); + _util.expect.call(void 0, _types3.TokenType.bracketR); + } else if (_index3.eat.call(void 0, _types3.TokenType.parenL)) { + parseCallExpressionArguments(); + } else { + parseMaybePrivateName(); + } + } else if (_index3.eat.call(void 0, _types3.TokenType.dot)) { + _base.state.tokens[_base.state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + parseMaybePrivateName(); + } else if (_index3.eat.call(void 0, _types3.TokenType.bracketL)) { + _base.state.tokens[_base.state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + parseExpression(); + _util.expect.call(void 0, _types3.TokenType.bracketR); + } else if (!noCalls && _index3.match.call(void 0, _types3.TokenType.parenL)) { + if (atPossibleAsync()) { + // We see "async", but it's possible it's a usage of the name "async". Parse as if it's a + // function call, and if we see an arrow later, backtrack and re-parse as a parameter list. + const snapshot = _base.state.snapshot(); + const asyncStartTokenIndex = _base.state.tokens.length; + _index3.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + + const callContextId = _base.getNextContextId.call(void 0, ); + + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + + if (shouldParseAsyncArrow()) { + // We hit an arrow, so backtrack and start again parsing function parameters. + _base.state.restoreFromSnapshot(snapshot); + stopState.stop = true; + _base.state.scopeDepth++; + + _statement.parseFunctionParams.call(void 0, ); + parseAsyncArrowFromCallExpression(asyncStartTokenIndex); + } + } else { + _index3.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].subscriptStartIndex = startTokenIndex; + const callContextId = _base.getNextContextId.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + parseCallExpressionArguments(); + _base.state.tokens[_base.state.tokens.length - 1].contextId = callContextId; + } + } else if (_index3.match.call(void 0, _types3.TokenType.backQuote)) { + // Tagged template expression. + parseTemplate(); + } else { + stopState.stop = true; + } +} exports.baseParseSubscript = baseParseSubscript; + + function atPossibleAsync() { + // This was made less strict than the original version to avoid passing around nodes, but it + // should be safe to have rare false positives here. + return ( + _base.state.tokens[_base.state.tokens.length - 1].contextualKeyword === _keywords.ContextualKeyword._async && + !_util.canInsertSemicolon.call(void 0, ) + ); +} exports.atPossibleAsync = atPossibleAsync; + + function parseCallExpressionArguments() { + let first = true; + while (!_index3.eat.call(void 0, _types3.TokenType.parenR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.eat.call(void 0, _types3.TokenType.parenR)) { + break; + } + } + + parseExprListItem(false); + } +} exports.parseCallExpressionArguments = parseCallExpressionArguments; + +function shouldParseAsyncArrow() { + return _index3.match.call(void 0, _types3.TokenType.colon) || _index3.match.call(void 0, _types3.TokenType.arrow); +} + +function parseAsyncArrowFromCallExpression(startTokenIndex) { + if (_base.isTypeScriptEnabled) { + _typescript.tsStartParseAsyncArrowFromCallExpression.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowStartParseAsyncArrowFromCallExpression.call(void 0, ); + } + _util.expect.call(void 0, _types3.TokenType.arrow); + parseArrowExpression(startTokenIndex); +} + +// Parse a no-call expression (like argument of `new` or `::` operators). + +function parseNoCallExpr() { + const startTokenIndex = _base.state.tokens.length; + parseExprAtom(); + parseSubscripts(startTokenIndex, true); +} + +// Parse an atomic expression — either a single token that is an +// expression, an expression started by a keyword like `function` or +// `new`, or an expression wrapped in punctuation like `()`, `[]`, +// or `{}`. +// Returns true if the parsed expression was an arrow function. + function parseExprAtom() { + if (_index3.eat.call(void 0, _types3.TokenType.modulo)) { + // V8 intrinsic expression. Just parse the identifier, and the function invocation is parsed + // naturally. + parseIdentifier(); + return false; + } + + if (_index3.match.call(void 0, _types3.TokenType.jsxText) || _index3.match.call(void 0, _types3.TokenType.jsxEmptyText)) { + parseLiteral(); + return false; + } else if (_index3.match.call(void 0, _types3.TokenType.lessThan) && _base.isJSXEnabled) { + _base.state.type = _types3.TokenType.jsxTagStart; + _index.jsxParseElement.call(void 0, ); + _index3.next.call(void 0, ); + return false; + } + + const canBeArrow = _base.state.potentialArrowAt === _base.state.start; + switch (_base.state.type) { + case _types3.TokenType.slash: + case _types3.TokenType.assign: + _index3.retokenizeSlashAsRegex.call(void 0, ); + // Fall through. + + case _types3.TokenType._super: + case _types3.TokenType._this: + case _types3.TokenType.regexp: + case _types3.TokenType.num: + case _types3.TokenType.bigint: + case _types3.TokenType.decimal: + case _types3.TokenType.string: + case _types3.TokenType._null: + case _types3.TokenType._true: + case _types3.TokenType._false: + _index3.next.call(void 0, ); + return false; + + case _types3.TokenType._import: + _index3.next.call(void 0, ); + if (_index3.match.call(void 0, _types3.TokenType.dot)) { + // import.meta + _base.state.tokens[_base.state.tokens.length - 1].type = _types3.TokenType.name; + _index3.next.call(void 0, ); + parseIdentifier(); + } + return false; + + case _types3.TokenType.name: { + const startTokenIndex = _base.state.tokens.length; + const functionStart = _base.state.start; + const contextualKeyword = _base.state.contextualKeyword; + parseIdentifier(); + if (contextualKeyword === _keywords.ContextualKeyword._await) { + parseAwait(); + return false; + } else if ( + contextualKeyword === _keywords.ContextualKeyword._async && + _index3.match.call(void 0, _types3.TokenType._function) && + !_util.canInsertSemicolon.call(void 0, ) + ) { + _index3.next.call(void 0, ); + _statement.parseFunction.call(void 0, functionStart, false); + return false; + } else if ( + canBeArrow && + contextualKeyword === _keywords.ContextualKeyword._async && + !_util.canInsertSemicolon.call(void 0, ) && + _index3.match.call(void 0, _types3.TokenType.name) + ) { + _base.state.scopeDepth++; + _lval.parseBindingIdentifier.call(void 0, false); + _util.expect.call(void 0, _types3.TokenType.arrow); + // let foo = async bar => {}; + parseArrowExpression(startTokenIndex); + return true; + } else if (_index3.match.call(void 0, _types3.TokenType._do) && !_util.canInsertSemicolon.call(void 0, )) { + _index3.next.call(void 0, ); + _statement.parseBlock.call(void 0, ); + return false; + } + + if (canBeArrow && !_util.canInsertSemicolon.call(void 0, ) && _index3.match.call(void 0, _types3.TokenType.arrow)) { + _base.state.scopeDepth++; + _lval.markPriorBindingIdentifier.call(void 0, false); + _util.expect.call(void 0, _types3.TokenType.arrow); + parseArrowExpression(startTokenIndex); + return true; + } + + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index3.IdentifierRole.Access; + return false; + } + + case _types3.TokenType._do: { + _index3.next.call(void 0, ); + _statement.parseBlock.call(void 0, ); + return false; + } + + case _types3.TokenType.parenL: { + const wasArrow = parseParenAndDistinguishExpression(canBeArrow); + return wasArrow; + } + + case _types3.TokenType.bracketL: + _index3.next.call(void 0, ); + parseExprList(_types3.TokenType.bracketR, true); + return false; + + case _types3.TokenType.braceL: + parseObj(false, false); + return false; + + case _types3.TokenType._function: + parseFunctionExpression(); + return false; + + case _types3.TokenType.at: + _statement.parseDecorators.call(void 0, ); + // Fall through. + + case _types3.TokenType._class: + _statement.parseClass.call(void 0, false); + return false; + + case _types3.TokenType._new: + parseNew(); + return false; + + case _types3.TokenType.backQuote: + parseTemplate(); + return false; + + case _types3.TokenType.doubleColon: { + _index3.next.call(void 0, ); + parseNoCallExpr(); + return false; + } + + case _types3.TokenType.hash: { + const code = _index3.lookaheadCharCode.call(void 0, ); + if (_identifier.IS_IDENTIFIER_START[code] || code === _charcodes.charCodes.backslash) { + parseMaybePrivateName(); + } else { + _index3.next.call(void 0, ); + } + // Smart pipeline topic reference. + return false; + } + + default: + _util.unexpected.call(void 0, ); + return false; + } +} exports.parseExprAtom = parseExprAtom; + +function parseMaybePrivateName() { + _index3.eat.call(void 0, _types3.TokenType.hash); + parseIdentifier(); +} + +function parseFunctionExpression() { + const functionStart = _base.state.start; + parseIdentifier(); + if (_index3.eat.call(void 0, _types3.TokenType.dot)) { + // function.sent + parseIdentifier(); + } + _statement.parseFunction.call(void 0, functionStart, false); +} + + function parseLiteral() { + _index3.next.call(void 0, ); +} exports.parseLiteral = parseLiteral; + + function parseParenExpression() { + _util.expect.call(void 0, _types3.TokenType.parenL); + parseExpression(); + _util.expect.call(void 0, _types3.TokenType.parenR); +} exports.parseParenExpression = parseParenExpression; + +// Returns true if this was an arrow expression. +function parseParenAndDistinguishExpression(canBeArrow) { + // Assume this is a normal parenthesized expression, but if we see an arrow, we'll bail and + // start over as a parameter list. + const snapshot = _base.state.snapshot(); + + const startTokenIndex = _base.state.tokens.length; + _util.expect.call(void 0, _types3.TokenType.parenL); + + let first = true; + + while (!_index3.match.call(void 0, _types3.TokenType.parenR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.match.call(void 0, _types3.TokenType.parenR)) { + break; + } + } + + if (_index3.match.call(void 0, _types3.TokenType.ellipsis)) { + _lval.parseRest.call(void 0, false /* isBlockScope */); + parseParenItem(); + break; + } else { + parseMaybeAssign(false, true); + } + } + + _util.expect.call(void 0, _types3.TokenType.parenR); + + if (canBeArrow && shouldParseArrow()) { + const wasArrow = parseArrow(); + if (wasArrow) { + // It was an arrow function this whole time, so start over and parse it as params so that we + // get proper token annotations. + _base.state.restoreFromSnapshot(snapshot); + _base.state.scopeDepth++; + // Don't specify a context ID because arrow functions don't need a context ID. + _statement.parseFunctionParams.call(void 0, ); + parseArrow(); + parseArrowExpression(startTokenIndex); + if (_base.state.error) { + // Nevermind! This must have been something that looks very much like an + // arrow function but where its "parameter list" isn't actually a valid + // parameter list. Force non-arrow parsing. + // See https://github.com/alangpierce/sucrase/issues/666 for an example. + _base.state.restoreFromSnapshot(snapshot); + parseParenAndDistinguishExpression(false); + return false; + } + return true; + } + } + + return false; +} + +function shouldParseArrow() { + return _index3.match.call(void 0, _types3.TokenType.colon) || !_util.canInsertSemicolon.call(void 0, ); +} + +// Returns whether there was an arrow token. + function parseArrow() { + if (_base.isTypeScriptEnabled) { + return _typescript.tsParseArrow.call(void 0, ); + } else if (_base.isFlowEnabled) { + return _flow.flowParseArrow.call(void 0, ); + } else { + return _index3.eat.call(void 0, _types3.TokenType.arrow); + } +} exports.parseArrow = parseArrow; + +function parseParenItem() { + if (_base.isTypeScriptEnabled || _base.isFlowEnabled) { + _types.typedParseParenItem.call(void 0, ); + } +} + +// New's precedence is slightly tricky. It must allow its argument to +// be a `[]` or dot subscript expression, but not a call — at least, +// not without wrapping it in parentheses. Thus, it uses the noCalls +// argument to parseSubscripts to prevent it from consuming the +// argument list. +function parseNew() { + _util.expect.call(void 0, _types3.TokenType._new); + if (_index3.eat.call(void 0, _types3.TokenType.dot)) { + // new.target + parseIdentifier(); + return; + } + parseNewCallee(); + if (_base.isFlowEnabled) { + _flow.flowStartParseNewArguments.call(void 0, ); + } + if (_index3.eat.call(void 0, _types3.TokenType.parenL)) { + parseExprList(_types3.TokenType.parenR); + } +} + +function parseNewCallee() { + parseNoCallExpr(); + _index3.eat.call(void 0, _types3.TokenType.questionDot); +} + + function parseTemplate() { + // Finish `, read quasi + _index3.nextTemplateToken.call(void 0, ); + // Finish quasi, read ${ + _index3.nextTemplateToken.call(void 0, ); + while (!_index3.match.call(void 0, _types3.TokenType.backQuote) && !_base.state.error) { + _util.expect.call(void 0, _types3.TokenType.dollarBraceL); + parseExpression(); + // Finish }, read quasi + _index3.nextTemplateToken.call(void 0, ); + // Finish quasi, read either ${ or ` + _index3.nextTemplateToken.call(void 0, ); + } + _index3.next.call(void 0, ); +} exports.parseTemplate = parseTemplate; + +// Parse an object literal or binding pattern. + function parseObj(isPattern, isBlockScope) { + // Attach a context ID to the object open and close brace and each object key. + const contextId = _base.getNextContextId.call(void 0, ); + let first = true; + + _index3.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + + while (!_index3.eat.call(void 0, _types3.TokenType.braceR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.eat.call(void 0, _types3.TokenType.braceR)) { + break; + } + } + + let isGenerator = false; + if (_index3.match.call(void 0, _types3.TokenType.ellipsis)) { + const previousIndex = _base.state.tokens.length; + _lval.parseSpread.call(void 0, ); + if (isPattern) { + // Mark role when the only thing being spread over is an identifier. + if (_base.state.tokens.length === previousIndex + 2) { + _lval.markPriorBindingIdentifier.call(void 0, isBlockScope); + } + if (_index3.eat.call(void 0, _types3.TokenType.braceR)) { + break; + } + } + continue; + } + + if (!isPattern) { + isGenerator = _index3.eat.call(void 0, _types3.TokenType.star); + } + + if (!isPattern && _util.isContextual.call(void 0, _keywords.ContextualKeyword._async)) { + if (isGenerator) _util.unexpected.call(void 0, ); + + parseIdentifier(); + if ( + _index3.match.call(void 0, _types3.TokenType.colon) || + _index3.match.call(void 0, _types3.TokenType.parenL) || + _index3.match.call(void 0, _types3.TokenType.braceR) || + _index3.match.call(void 0, _types3.TokenType.eq) || + _index3.match.call(void 0, _types3.TokenType.comma) + ) { + // This is a key called "async" rather than an async function. + } else { + if (_index3.match.call(void 0, _types3.TokenType.star)) { + _index3.next.call(void 0, ); + isGenerator = true; + } + parsePropertyName(contextId); + } + } else { + parsePropertyName(contextId); + } + + parseObjPropValue(isPattern, isBlockScope, contextId); + } + + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; +} exports.parseObj = parseObj; + +function isGetterOrSetterMethod(isPattern) { + // We go off of the next and don't bother checking if the node key is actually "get" or "set". + // This lets us avoid generating a node, and should only make the validation worse. + return ( + !isPattern && + (_index3.match.call(void 0, _types3.TokenType.string) || // get "string"() {} + _index3.match.call(void 0, _types3.TokenType.num) || // get 1() {} + _index3.match.call(void 0, _types3.TokenType.bracketL) || // get ["string"]() {} + _index3.match.call(void 0, _types3.TokenType.name) || // get foo() {} + !!(_base.state.type & _types3.TokenType.IS_KEYWORD)) // get debugger() {} + ); +} + +// Returns true if this was a method. +function parseObjectMethod(isPattern, objectContextId) { + // We don't need to worry about modifiers because object methods can't have optional bodies, so + // the start will never be used. + const functionStart = _base.state.start; + if (_index3.match.call(void 0, _types3.TokenType.parenL)) { + if (isPattern) _util.unexpected.call(void 0, ); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + + if (isGetterOrSetterMethod(isPattern)) { + parsePropertyName(objectContextId); + parseMethod(functionStart, /* isConstructor */ false); + return true; + } + return false; +} + +function parseObjectProperty(isPattern, isBlockScope) { + if (_index3.eat.call(void 0, _types3.TokenType.colon)) { + if (isPattern) { + _lval.parseMaybeDefault.call(void 0, isBlockScope); + } else { + parseMaybeAssign(false); + } + return; + } + + // Since there's no colon, we assume this is an object shorthand. + + // If we're in a destructuring, we've now discovered that the key was actually an assignee, so + // we need to tag it as a declaration with the appropriate scope. Otherwise, we might need to + // transform it on access, so mark it as a normal object shorthand. + let identifierRole; + if (isPattern) { + if (_base.state.scopeDepth === 0) { + identifierRole = _index3.IdentifierRole.ObjectShorthandTopLevelDeclaration; + } else if (isBlockScope) { + identifierRole = _index3.IdentifierRole.ObjectShorthandBlockScopedDeclaration; + } else { + identifierRole = _index3.IdentifierRole.ObjectShorthandFunctionScopedDeclaration; + } + } else { + identifierRole = _index3.IdentifierRole.ObjectShorthand; + } + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = identifierRole; + + // Regardless of whether we know this to be a pattern or if we're in an ambiguous context, allow + // parsing as if there's a default value. + _lval.parseMaybeDefault.call(void 0, isBlockScope, true); +} + +function parseObjPropValue( + isPattern, + isBlockScope, + objectContextId, +) { + if (_base.isTypeScriptEnabled) { + _typescript.tsStartParseObjPropValue.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowStartParseObjPropValue.call(void 0, ); + } + const wasMethod = parseObjectMethod(isPattern, objectContextId); + if (!wasMethod) { + parseObjectProperty(isPattern, isBlockScope); + } +} + + function parsePropertyName(objectContextId) { + if (_base.isFlowEnabled) { + _flow.flowParseVariance.call(void 0, ); + } + if (_index3.eat.call(void 0, _types3.TokenType.bracketL)) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = objectContextId; + parseMaybeAssign(); + _util.expect.call(void 0, _types3.TokenType.bracketR); + _base.state.tokens[_base.state.tokens.length - 1].contextId = objectContextId; + } else { + if (_index3.match.call(void 0, _types3.TokenType.num) || _index3.match.call(void 0, _types3.TokenType.string) || _index3.match.call(void 0, _types3.TokenType.bigint) || _index3.match.call(void 0, _types3.TokenType.decimal)) { + parseExprAtom(); + } else { + parseMaybePrivateName(); + } + + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index3.IdentifierRole.ObjectKey; + _base.state.tokens[_base.state.tokens.length - 1].contextId = objectContextId; + } +} exports.parsePropertyName = parsePropertyName; + +// Parse object or class method. + function parseMethod(functionStart, isConstructor) { + const funcContextId = _base.getNextContextId.call(void 0, ); + + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + const allowModifiers = isConstructor; // For TypeScript parameter properties + _statement.parseFunctionParams.call(void 0, allowModifiers, funcContextId); + parseFunctionBodyAndFinish(functionStart, funcContextId); + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; +} exports.parseMethod = parseMethod; + +// Parse arrow function expression. +// If the parameters are provided, they will be converted to an +// assignable list. + function parseArrowExpression(startTokenIndex) { + parseFunctionBody(true); + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; +} exports.parseArrowExpression = parseArrowExpression; + + function parseFunctionBodyAndFinish(functionStart, funcContextId = 0) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseFunctionBodyAndFinish.call(void 0, functionStart, funcContextId); + } else if (_base.isFlowEnabled) { + _flow.flowParseFunctionBodyAndFinish.call(void 0, funcContextId); + } else { + parseFunctionBody(false, funcContextId); + } +} exports.parseFunctionBodyAndFinish = parseFunctionBodyAndFinish; + + function parseFunctionBody(allowExpression, funcContextId = 0) { + const isExpression = allowExpression && !_index3.match.call(void 0, _types3.TokenType.braceL); + + if (isExpression) { + parseMaybeAssign(); + } else { + _statement.parseBlock.call(void 0, true /* isFunctionScope */, funcContextId); + } +} exports.parseFunctionBody = parseFunctionBody; + +// Parses a comma-separated list of expressions, and returns them as +// an array. `close` is the token type that ends the list, and +// `allowEmpty` can be turned on to allow subsequent commas with +// nothing in between them to be parsed as `null` (which is needed +// for array literals). + +function parseExprList(close, allowEmpty = false) { + let first = true; + while (!_index3.eat.call(void 0, close) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types3.TokenType.comma); + if (_index3.eat.call(void 0, close)) break; + } + parseExprListItem(allowEmpty); + } +} + +function parseExprListItem(allowEmpty) { + if (allowEmpty && _index3.match.call(void 0, _types3.TokenType.comma)) { + // Empty item; nothing more to parse for this item. + } else if (_index3.match.call(void 0, _types3.TokenType.ellipsis)) { + _lval.parseSpread.call(void 0, ); + parseParenItem(); + } else if (_index3.match.call(void 0, _types3.TokenType.question)) { + // Partial function application proposal. + _index3.next.call(void 0, ); + } else { + parseMaybeAssign(false, true); + } +} + +// Parse the next token as an identifier. + function parseIdentifier() { + _index3.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types3.TokenType.name; +} exports.parseIdentifier = parseIdentifier; + +// Parses await expression inside async function. +function parseAwait() { + parseMaybeUnary(); +} + +// Parses yield expression inside generator. +function parseYield() { + _index3.next.call(void 0, ); + if (!_index3.match.call(void 0, _types3.TokenType.semi) && !_util.canInsertSemicolon.call(void 0, )) { + _index3.eat.call(void 0, _types3.TokenType.star); + parseMaybeAssign(); + } +} + +// https://github.com/tc39/proposal-js-module-blocks +function parseModuleExpression() { + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._module); + _util.expect.call(void 0, _types3.TokenType.braceL); + // For now, just call parseBlockBody to parse the block. In the future when we + // implement full support, we'll want to emit scopes and possibly other + // information. + _statement.parseBlockBody.call(void 0, _types3.TokenType.braceR); +} diff --git a/node_modules/sucrase/dist/parser/traverser/index.js b/node_modules/sucrase/dist/parser/traverser/index.js new file mode 100644 index 0000000..72e4130 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/index.js @@ -0,0 +1,18 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); +var _index = require('../tokenizer/index'); +var _charcodes = require('../util/charcodes'); +var _base = require('./base'); +var _statement = require('./statement'); + + function parseFile() { + // If enabled, skip leading hashbang line. + if ( + _base.state.pos === 0 && + _base.input.charCodeAt(0) === _charcodes.charCodes.numberSign && + _base.input.charCodeAt(1) === _charcodes.charCodes.exclamationMark + ) { + _index.skipLineComment.call(void 0, 2); + } + _index.nextToken.call(void 0, ); + return _statement.parseTopLevel.call(void 0, ); +} exports.parseFile = parseFile; diff --git a/node_modules/sucrase/dist/parser/traverser/lval.js b/node_modules/sucrase/dist/parser/traverser/lval.js new file mode 100644 index 0000000..9057497 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/lval.js @@ -0,0 +1,159 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _flow = require('../plugins/flow'); +var _typescript = require('../plugins/typescript'); + + + + + + + +var _index = require('../tokenizer/index'); +var _keywords = require('../tokenizer/keywords'); +var _types = require('../tokenizer/types'); +var _base = require('./base'); +var _expression = require('./expression'); +var _util = require('./util'); + + function parseSpread() { + _index.next.call(void 0, ); + _expression.parseMaybeAssign.call(void 0, false); +} exports.parseSpread = parseSpread; + + function parseRest(isBlockScope) { + _index.next.call(void 0, ); + parseBindingAtom(isBlockScope); +} exports.parseRest = parseRest; + + function parseBindingIdentifier(isBlockScope) { + _expression.parseIdentifier.call(void 0, ); + markPriorBindingIdentifier(isBlockScope); +} exports.parseBindingIdentifier = parseBindingIdentifier; + + function parseImportedIdentifier() { + _expression.parseIdentifier.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ImportDeclaration; +} exports.parseImportedIdentifier = parseImportedIdentifier; + + function markPriorBindingIdentifier(isBlockScope) { + let identifierRole; + if (_base.state.scopeDepth === 0) { + identifierRole = _index.IdentifierRole.TopLevelDeclaration; + } else if (isBlockScope) { + identifierRole = _index.IdentifierRole.BlockScopedDeclaration; + } else { + identifierRole = _index.IdentifierRole.FunctionScopedDeclaration; + } + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = identifierRole; +} exports.markPriorBindingIdentifier = markPriorBindingIdentifier; + +// Parses lvalue (assignable) atom. + function parseBindingAtom(isBlockScope) { + switch (_base.state.type) { + case _types.TokenType._this: { + // In TypeScript, "this" may be the name of a parameter, so allow it. + const oldIsType = _index.pushTypeContext.call(void 0, 0); + _index.next.call(void 0, ); + _index.popTypeContext.call(void 0, oldIsType); + return; + } + + case _types.TokenType._yield: + case _types.TokenType.name: { + _base.state.type = _types.TokenType.name; + parseBindingIdentifier(isBlockScope); + return; + } + + case _types.TokenType.bracketL: { + _index.next.call(void 0, ); + parseBindingList(_types.TokenType.bracketR, isBlockScope, true /* allowEmpty */); + return; + } + + case _types.TokenType.braceL: + _expression.parseObj.call(void 0, true, isBlockScope); + return; + + default: + _util.unexpected.call(void 0, ); + } +} exports.parseBindingAtom = parseBindingAtom; + + function parseBindingList( + close, + isBlockScope, + allowEmpty = false, + allowModifiers = false, + contextId = 0, +) { + let first = true; + + let hasRemovedComma = false; + const firstItemTokenIndex = _base.state.tokens.length; + + while (!_index.eat.call(void 0, close) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types.TokenType.comma); + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + // After a "this" type in TypeScript, we need to set the following comma (if any) to also be + // a type token so that it will be removed. + if (!hasRemovedComma && _base.state.tokens[firstItemTokenIndex].isType) { + _base.state.tokens[_base.state.tokens.length - 1].isType = true; + hasRemovedComma = true; + } + } + if (allowEmpty && _index.match.call(void 0, _types.TokenType.comma)) { + // Empty item; nothing further to parse for this item. + } else if (_index.eat.call(void 0, close)) { + break; + } else if (_index.match.call(void 0, _types.TokenType.ellipsis)) { + parseRest(isBlockScope); + parseAssignableListItemTypes(); + // Support rest element trailing commas allowed by TypeScript <2.9. + _index.eat.call(void 0, _types.TokenType.comma); + _util.expect.call(void 0, close); + break; + } else { + parseAssignableListItem(allowModifiers, isBlockScope); + } + } +} exports.parseBindingList = parseBindingList; + +function parseAssignableListItem(allowModifiers, isBlockScope) { + if (allowModifiers) { + _typescript.tsParseModifiers.call(void 0, [ + _keywords.ContextualKeyword._public, + _keywords.ContextualKeyword._protected, + _keywords.ContextualKeyword._private, + _keywords.ContextualKeyword._readonly, + _keywords.ContextualKeyword._override, + ]); + } + + parseMaybeDefault(isBlockScope); + parseAssignableListItemTypes(); + parseMaybeDefault(isBlockScope, true /* leftAlreadyParsed */); +} + +function parseAssignableListItemTypes() { + if (_base.isFlowEnabled) { + _flow.flowParseAssignableListItemTypes.call(void 0, ); + } else if (_base.isTypeScriptEnabled) { + _typescript.tsParseAssignableListItemTypes.call(void 0, ); + } +} + +// Parses assignment pattern around given atom if possible. + function parseMaybeDefault(isBlockScope, leftAlreadyParsed = false) { + if (!leftAlreadyParsed) { + parseBindingAtom(isBlockScope); + } + if (!_index.eat.call(void 0, _types.TokenType.eq)) { + return; + } + const eqIndex = _base.state.tokens.length - 1; + _expression.parseMaybeAssign.call(void 0, ); + _base.state.tokens[eqIndex].rhsEndIndex = _base.state.tokens.length; +} exports.parseMaybeDefault = parseMaybeDefault; diff --git a/node_modules/sucrase/dist/parser/traverser/statement.js b/node_modules/sucrase/dist/parser/traverser/statement.js new file mode 100644 index 0000000..9ec98cd --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/statement.js @@ -0,0 +1,1281 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});/* eslint max-len: 0 */ + +var _index = require('../index'); + + + + + + + + + + + + + + + + +var _flow = require('../plugins/flow'); + + + + + + + + + + + + + + + + + + +var _typescript = require('../plugins/typescript'); + + + + + + + + + + + + +var _tokenizer = require('../tokenizer'); +var _keywords = require('../tokenizer/keywords'); +var _state = require('../tokenizer/state'); +var _types = require('../tokenizer/types'); +var _charcodes = require('../util/charcodes'); +var _base = require('./base'); + + + + + + + + + + + + +var _expression = require('./expression'); + + + + + +var _lval = require('./lval'); + + + + + + + + + + + + +var _util = require('./util'); + + function parseTopLevel() { + parseBlockBody(_types.TokenType.eof); + _base.state.scopes.push(new (0, _state.Scope)(0, _base.state.tokens.length, true)); + if (_base.state.scopeDepth !== 0) { + throw new Error(`Invalid scope depth at end of file: ${_base.state.scopeDepth}`); + } + return new (0, _index.File)(_base.state.tokens, _base.state.scopes); +} exports.parseTopLevel = parseTopLevel; + +// Parse a single statement. +// +// If expecting a statement and finding a slash operator, parse a +// regular expression literal. This is to handle cases like +// `if (foo) /blah/.exec(foo)`, where looking at the previous token +// does not help. + + function parseStatement(declaration) { + if (_base.isFlowEnabled) { + if (_flow.flowTryParseStatement.call(void 0, )) { + return; + } + } + if (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorators(); + } + parseStatementContent(declaration); +} exports.parseStatement = parseStatement; + +function parseStatementContent(declaration) { + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseStatementContent.call(void 0, )) { + return; + } + } + + const starttype = _base.state.type; + + // Most types of statements are recognized by the keyword they + // start with. Many are trivial to parse, some require a bit of + // complexity. + + switch (starttype) { + case _types.TokenType._break: + case _types.TokenType._continue: + parseBreakContinueStatement(); + return; + case _types.TokenType._debugger: + parseDebuggerStatement(); + return; + case _types.TokenType._do: + parseDoStatement(); + return; + case _types.TokenType._for: + parseForStatement(); + return; + case _types.TokenType._function: + if (_tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.dot) break; + if (!declaration) _util.unexpected.call(void 0, ); + parseFunctionStatement(); + return; + + case _types.TokenType._class: + if (!declaration) _util.unexpected.call(void 0, ); + parseClass(true); + return; + + case _types.TokenType._if: + parseIfStatement(); + return; + case _types.TokenType._return: + parseReturnStatement(); + return; + case _types.TokenType._switch: + parseSwitchStatement(); + return; + case _types.TokenType._throw: + parseThrowStatement(); + return; + case _types.TokenType._try: + parseTryStatement(); + return; + + case _types.TokenType._let: + case _types.TokenType._const: + if (!declaration) _util.unexpected.call(void 0, ); // NOTE: falls through to _var + + case _types.TokenType._var: + parseVarStatement(starttype !== _types.TokenType._var); + return; + + case _types.TokenType._while: + parseWhileStatement(); + return; + case _types.TokenType.braceL: + parseBlock(); + return; + case _types.TokenType.semi: + parseEmptyStatement(); + return; + case _types.TokenType._export: + case _types.TokenType._import: { + const nextType = _tokenizer.lookaheadType.call(void 0, ); + if (nextType === _types.TokenType.parenL || nextType === _types.TokenType.dot) { + break; + } + _tokenizer.next.call(void 0, ); + if (starttype === _types.TokenType._import) { + parseImport(); + } else { + parseExport(); + } + return; + } + case _types.TokenType.name: + if (_base.state.contextualKeyword === _keywords.ContextualKeyword._async) { + const functionStart = _base.state.start; + // peek ahead and see if next token is a function + const snapshot = _base.state.snapshot(); + _tokenizer.next.call(void 0, ); + if (_tokenizer.match.call(void 0, _types.TokenType._function) && !_util.canInsertSemicolon.call(void 0, )) { + _util.expect.call(void 0, _types.TokenType._function); + parseFunction(functionStart, true); + return; + } else { + _base.state.restoreFromSnapshot(snapshot); + } + } else if ( + _base.state.contextualKeyword === _keywords.ContextualKeyword._using && + !_util.hasFollowingLineBreak.call(void 0, ) && + // Statements like `using[0]` and `using in foo` aren't actual using + // declarations. + _tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.name + ) { + parseVarStatement(true); + return; + } + default: + // Do nothing. + break; + } + + // If the statement does not start with a statement keyword or a + // brace, it's an ExpressionStatement or LabeledStatement. We + // simply start parsing an expression, and afterwards, if the + // next token is a colon and the expression was a simple + // Identifier node, we switch to interpreting it as a label. + const initialTokensLength = _base.state.tokens.length; + _expression.parseExpression.call(void 0, ); + let simpleName = null; + if (_base.state.tokens.length === initialTokensLength + 1) { + const token = _base.state.tokens[_base.state.tokens.length - 1]; + if (token.type === _types.TokenType.name) { + simpleName = token.contextualKeyword; + } + } + if (simpleName == null) { + _util.semicolon.call(void 0, ); + return; + } + if (_tokenizer.eat.call(void 0, _types.TokenType.colon)) { + parseLabeledStatement(); + } else { + // This was an identifier, so we might want to handle flow/typescript-specific cases. + parseIdentifierStatement(simpleName); + } +} + + function parseDecorators() { + while (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorator(); + } +} exports.parseDecorators = parseDecorators; + +function parseDecorator() { + _tokenizer.next.call(void 0, ); + if (_tokenizer.eat.call(void 0, _types.TokenType.parenL)) { + _expression.parseExpression.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); + } else { + _expression.parseIdentifier.call(void 0, ); + while (_tokenizer.eat.call(void 0, _types.TokenType.dot)) { + _expression.parseIdentifier.call(void 0, ); + } + parseMaybeDecoratorArguments(); + } +} + +function parseMaybeDecoratorArguments() { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseMaybeDecoratorArguments.call(void 0, ); + } else { + baseParseMaybeDecoratorArguments(); + } +} + + function baseParseMaybeDecoratorArguments() { + if (_tokenizer.eat.call(void 0, _types.TokenType.parenL)) { + _expression.parseCallExpressionArguments.call(void 0, ); + } +} exports.baseParseMaybeDecoratorArguments = baseParseMaybeDecoratorArguments; + +function parseBreakContinueStatement() { + _tokenizer.next.call(void 0, ); + if (!_util.isLineTerminator.call(void 0, )) { + _expression.parseIdentifier.call(void 0, ); + _util.semicolon.call(void 0, ); + } +} + +function parseDebuggerStatement() { + _tokenizer.next.call(void 0, ); + _util.semicolon.call(void 0, ); +} + +function parseDoStatement() { + _tokenizer.next.call(void 0, ); + parseStatement(false); + _util.expect.call(void 0, _types.TokenType._while); + _expression.parseParenExpression.call(void 0, ); + _tokenizer.eat.call(void 0, _types.TokenType.semi); +} + +function parseForStatement() { + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + parseAmbiguousForStatement(); + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; +} + +/** + * Determine if this token is a `using` declaration (explicit resource + * management) as part of a loop. + * https://github.com/tc39/proposal-explicit-resource-management + */ +function isUsingInLoop() { + if (!_util.isContextual.call(void 0, _keywords.ContextualKeyword._using)) { + return false; + } + // This must be `for (using of`, where `using` is the name of the loop + // variable. + if (_util.isLookaheadContextual.call(void 0, _keywords.ContextualKeyword._of)) { + return false; + } + return true; +} + +// Disambiguating between a `for` and a `for`/`in` or `for`/`of` +// loop is non-trivial. Basically, we have to parse the init `var` +// statement or expression, disallowing the `in` operator (see +// the second parameter to `parseExpression`), and then check +// whether the next token is `in` or `of`. When there is no init +// part (semicolon immediately after the opening parenthesis), it +// is a regular `for` loop. +function parseAmbiguousForStatement() { + _tokenizer.next.call(void 0, ); + + let forAwait = false; + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._await)) { + forAwait = true; + _tokenizer.next.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.parenL); + + if (_tokenizer.match.call(void 0, _types.TokenType.semi)) { + if (forAwait) { + _util.unexpected.call(void 0, ); + } + parseFor(); + return; + } + + if (_tokenizer.match.call(void 0, _types.TokenType._var) || _tokenizer.match.call(void 0, _types.TokenType._let) || _tokenizer.match.call(void 0, _types.TokenType._const) || isUsingInLoop()) { + _tokenizer.next.call(void 0, ); + parseVar(true, _base.state.type !== _types.TokenType._var); + if (_tokenizer.match.call(void 0, _types.TokenType._in) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + parseFor(); + return; + } + + _expression.parseExpression.call(void 0, true); + if (_tokenizer.match.call(void 0, _types.TokenType._in) || _util.isContextual.call(void 0, _keywords.ContextualKeyword._of)) { + parseForIn(forAwait); + return; + } + if (forAwait) { + _util.unexpected.call(void 0, ); + } + parseFor(); +} + +function parseFunctionStatement() { + const functionStart = _base.state.start; + _tokenizer.next.call(void 0, ); + parseFunction(functionStart, true); +} + +function parseIfStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseParenExpression.call(void 0, ); + parseStatement(false); + if (_tokenizer.eat.call(void 0, _types.TokenType._else)) { + parseStatement(false); + } +} + +function parseReturnStatement() { + _tokenizer.next.call(void 0, ); + + // In `return` (and `break`/`continue`), the keywords with + // optional arguments, we eagerly look for a semicolon or the + // possibility to insert one. + + if (!_util.isLineTerminator.call(void 0, )) { + _expression.parseExpression.call(void 0, ); + _util.semicolon.call(void 0, ); + } +} + +function parseSwitchStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseParenExpression.call(void 0, ); + _base.state.scopeDepth++; + const startTokenIndex = _base.state.tokens.length; + _util.expect.call(void 0, _types.TokenType.braceL); + + // Don't bother validation; just go through any sequence of cases, defaults, and statements. + while (!_tokenizer.match.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (_tokenizer.match.call(void 0, _types.TokenType._case) || _tokenizer.match.call(void 0, _types.TokenType._default)) { + const isCase = _tokenizer.match.call(void 0, _types.TokenType._case); + _tokenizer.next.call(void 0, ); + if (isCase) { + _expression.parseExpression.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.colon); + } else { + parseStatement(true); + } + } + _tokenizer.next.call(void 0, ); // Closing brace + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; +} + +function parseThrowStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseExpression.call(void 0, ); + _util.semicolon.call(void 0, ); +} + +function parseCatchClauseParam() { + _lval.parseBindingAtom.call(void 0, true /* isBlockScope */); + + if (_base.isTypeScriptEnabled) { + _typescript.tsTryParseTypeAnnotation.call(void 0, ); + } +} + +function parseTryStatement() { + _tokenizer.next.call(void 0, ); + + parseBlock(); + + if (_tokenizer.match.call(void 0, _types.TokenType._catch)) { + _tokenizer.next.call(void 0, ); + let catchBindingStartTokenIndex = null; + if (_tokenizer.match.call(void 0, _types.TokenType.parenL)) { + _base.state.scopeDepth++; + catchBindingStartTokenIndex = _base.state.tokens.length; + _util.expect.call(void 0, _types.TokenType.parenL); + parseCatchClauseParam(); + _util.expect.call(void 0, _types.TokenType.parenR); + } + parseBlock(); + if (catchBindingStartTokenIndex != null) { + // We need a special scope for the catch binding which includes the binding itself and the + // catch block. + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(catchBindingStartTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; + } + } + if (_tokenizer.eat.call(void 0, _types.TokenType._finally)) { + parseBlock(); + } +} + + function parseVarStatement(isBlockScope) { + _tokenizer.next.call(void 0, ); + parseVar(false, isBlockScope); + _util.semicolon.call(void 0, ); +} exports.parseVarStatement = parseVarStatement; + +function parseWhileStatement() { + _tokenizer.next.call(void 0, ); + _expression.parseParenExpression.call(void 0, ); + parseStatement(false); +} + +function parseEmptyStatement() { + _tokenizer.next.call(void 0, ); +} + +function parseLabeledStatement() { + parseStatement(true); +} + +/** + * Parse a statement starting with an identifier of the given name. Subclasses match on the name + * to handle statements like "declare". + */ +function parseIdentifierStatement(contextualKeyword) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseIdentifierStatement.call(void 0, contextualKeyword); + } else if (_base.isFlowEnabled) { + _flow.flowParseIdentifierStatement.call(void 0, contextualKeyword); + } else { + _util.semicolon.call(void 0, ); + } +} + +// Parse a semicolon-enclosed block of statements. + function parseBlock(isFunctionScope = false, contextId = 0) { + const startTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + _util.expect.call(void 0, _types.TokenType.braceL); + if (contextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + } + parseBlockBody(_types.TokenType.braceR); + if (contextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + } + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, isFunctionScope)); + _base.state.scopeDepth--; +} exports.parseBlock = parseBlock; + + function parseBlockBody(end) { + while (!_tokenizer.eat.call(void 0, end) && !_base.state.error) { + parseStatement(true); + } +} exports.parseBlockBody = parseBlockBody; + +// Parse a regular `for` loop. The disambiguation code in +// `parseStatement` will already have parsed the init statement or +// expression. + +function parseFor() { + _util.expect.call(void 0, _types.TokenType.semi); + if (!_tokenizer.match.call(void 0, _types.TokenType.semi)) { + _expression.parseExpression.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.semi); + if (!_tokenizer.match.call(void 0, _types.TokenType.parenR)) { + _expression.parseExpression.call(void 0, ); + } + _util.expect.call(void 0, _types.TokenType.parenR); + parseStatement(false); +} + +// Parse a `for`/`in` and `for`/`of` loop, which are almost +// same from parser's perspective. + +function parseForIn(forAwait) { + if (forAwait) { + _util.eatContextual.call(void 0, _keywords.ContextualKeyword._of); + } else { + _tokenizer.next.call(void 0, ); + } + _expression.parseExpression.call(void 0, ); + _util.expect.call(void 0, _types.TokenType.parenR); + parseStatement(false); +} + +// Parse a list of variable declarations. + +function parseVar(isFor, isBlockScope) { + while (true) { + parseVarHead(isBlockScope); + if (_tokenizer.eat.call(void 0, _types.TokenType.eq)) { + const eqIndex = _base.state.tokens.length - 1; + _expression.parseMaybeAssign.call(void 0, isFor); + _base.state.tokens[eqIndex].rhsEndIndex = _base.state.tokens.length; + } + if (!_tokenizer.eat.call(void 0, _types.TokenType.comma)) { + break; + } + } +} + +function parseVarHead(isBlockScope) { + _lval.parseBindingAtom.call(void 0, isBlockScope); + if (_base.isTypeScriptEnabled) { + _typescript.tsAfterParseVarHead.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowAfterParseVarHead.call(void 0, ); + } +} + +// Parse a function declaration or literal (depending on the +// `isStatement` parameter). + + function parseFunction( + functionStart, + isStatement, + optionalId = false, +) { + if (_tokenizer.match.call(void 0, _types.TokenType.star)) { + _tokenizer.next.call(void 0, ); + } + + if (isStatement && !optionalId && !_tokenizer.match.call(void 0, _types.TokenType.name) && !_tokenizer.match.call(void 0, _types.TokenType._yield)) { + _util.unexpected.call(void 0, ); + } + + let nameScopeStartTokenIndex = null; + + if (_tokenizer.match.call(void 0, _types.TokenType.name)) { + // Expression-style functions should limit their name's scope to the function body, so we make + // a new function scope to enforce that. + if (!isStatement) { + nameScopeStartTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + } + _lval.parseBindingIdentifier.call(void 0, false); + } + + const startTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + parseFunctionParams(); + _expression.parseFunctionBodyAndFinish.call(void 0, functionStart); + const endTokenIndex = _base.state.tokens.length; + // In addition to the block scope of the function body, we need a separate function-style scope + // that includes the params. + _base.state.scopes.push(new (0, _state.Scope)(startTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; + if (nameScopeStartTokenIndex !== null) { + _base.state.scopes.push(new (0, _state.Scope)(nameScopeStartTokenIndex, endTokenIndex, true)); + _base.state.scopeDepth--; + } +} exports.parseFunction = parseFunction; + + function parseFunctionParams( + allowModifiers = false, + funcContextId = 0, +) { + if (_base.isTypeScriptEnabled) { + _typescript.tsStartParseFunctionParams.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowStartParseFunctionParams.call(void 0, ); + } + + _util.expect.call(void 0, _types.TokenType.parenL); + if (funcContextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = funcContextId; + } + _lval.parseBindingList.call(void 0, + _types.TokenType.parenR, + false /* isBlockScope */, + false /* allowEmpty */, + allowModifiers, + funcContextId, + ); + if (funcContextId) { + _base.state.tokens[_base.state.tokens.length - 1].contextId = funcContextId; + } +} exports.parseFunctionParams = parseFunctionParams; + +// Parse a class declaration or literal (depending on the +// `isStatement` parameter). + + function parseClass(isStatement, optionalId = false) { + // Put a context ID on the class keyword, the open-brace, and the close-brace, so that later + // code can easily navigate to meaningful points on the class. + const contextId = _base.getNextContextId.call(void 0, ); + + _tokenizer.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + _base.state.tokens[_base.state.tokens.length - 1].isExpression = !isStatement; + // Like with functions, we declare a special "name scope" from the start of the name to the end + // of the class, but only with expression-style classes, to represent the fact that the name is + // available to the body of the class but not an outer declaration. + let nameScopeStartTokenIndex = null; + if (!isStatement) { + nameScopeStartTokenIndex = _base.state.tokens.length; + _base.state.scopeDepth++; + } + parseClassId(isStatement, optionalId); + parseClassSuper(); + const openBraceIndex = _base.state.tokens.length; + parseClassBody(contextId); + if (_base.state.error) { + return; + } + _base.state.tokens[openBraceIndex].contextId = contextId; + _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId; + if (nameScopeStartTokenIndex !== null) { + const endTokenIndex = _base.state.tokens.length; + _base.state.scopes.push(new (0, _state.Scope)(nameScopeStartTokenIndex, endTokenIndex, false)); + _base.state.scopeDepth--; + } +} exports.parseClass = parseClass; + +function isClassProperty() { + return _tokenizer.match.call(void 0, _types.TokenType.eq) || _tokenizer.match.call(void 0, _types.TokenType.semi) || _tokenizer.match.call(void 0, _types.TokenType.braceR) || _tokenizer.match.call(void 0, _types.TokenType.bang) || _tokenizer.match.call(void 0, _types.TokenType.colon); +} + +function isClassMethod() { + return _tokenizer.match.call(void 0, _types.TokenType.parenL) || _tokenizer.match.call(void 0, _types.TokenType.lessThan); +} + +function parseClassBody(classContextId) { + _util.expect.call(void 0, _types.TokenType.braceL); + + while (!_tokenizer.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (_tokenizer.eat.call(void 0, _types.TokenType.semi)) { + continue; + } + + if (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorator(); + continue; + } + const memberStart = _base.state.start; + parseClassMember(memberStart, classContextId); + } +} + +function parseClassMember(memberStart, classContextId) { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseModifiers.call(void 0, [ + _keywords.ContextualKeyword._declare, + _keywords.ContextualKeyword._public, + _keywords.ContextualKeyword._protected, + _keywords.ContextualKeyword._private, + _keywords.ContextualKeyword._override, + ]); + } + let isStatic = false; + if (_tokenizer.match.call(void 0, _types.TokenType.name) && _base.state.contextualKeyword === _keywords.ContextualKeyword._static) { + _expression.parseIdentifier.call(void 0, ); // eats 'static' + if (isClassMethod()) { + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } else if (isClassProperty()) { + parseClassProperty(); + return; + } + // otherwise something static + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._static; + isStatic = true; + + if (_tokenizer.match.call(void 0, _types.TokenType.braceL)) { + // This is a static block. Mark the word "static" with the class context ID for class element + // detection and parse as a regular block. + _base.state.tokens[_base.state.tokens.length - 1].contextId = classContextId; + parseBlock(); + return; + } + } + + parseClassMemberWithIsStatic(memberStart, isStatic, classContextId); +} + +function parseClassMemberWithIsStatic( + memberStart, + isStatic, + classContextId, +) { + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseClassMemberWithIsStatic.call(void 0, isStatic)) { + return; + } + } + if (_tokenizer.eat.call(void 0, _types.TokenType.star)) { + // a generator + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + return; + } + + // Get the identifier name so we can tell if it's actually a keyword like "async", "get", or + // "set". + parseClassPropertyName(classContextId); + let isConstructor = false; + const token = _base.state.tokens[_base.state.tokens.length - 1]; + // We allow "constructor" as either an identifier or a string. + if (token.contextualKeyword === _keywords.ContextualKeyword._constructor) { + isConstructor = true; + } + parsePostMemberNameModifiers(); + + if (isClassMethod()) { + parseClassMethod(memberStart, isConstructor); + } else if (isClassProperty()) { + parseClassProperty(); + } else if (token.contextualKeyword === _keywords.ContextualKeyword._async && !_util.isLineTerminator.call(void 0, )) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._async; + // an async method + const isGenerator = _tokenizer.match.call(void 0, _types.TokenType.star); + if (isGenerator) { + _tokenizer.next.call(void 0, ); + } + + // The so-called parsed name would have been "async": get the real name. + parseClassPropertyName(classContextId); + parsePostMemberNameModifiers(); + parseClassMethod(memberStart, false /* isConstructor */); + } else if ( + (token.contextualKeyword === _keywords.ContextualKeyword._get || + token.contextualKeyword === _keywords.ContextualKeyword._set) && + !(_util.isLineTerminator.call(void 0, ) && _tokenizer.match.call(void 0, _types.TokenType.star)) + ) { + if (token.contextualKeyword === _keywords.ContextualKeyword._get) { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._get; + } else { + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._set; + } + // `get\n*` is an uninitialized property named 'get' followed by a generator. + // a getter or setter + // The so-called parsed name would have been "get/set": get the real name. + parseClassPropertyName(classContextId); + parseClassMethod(memberStart, /* isConstructor */ false); + } else if (token.contextualKeyword === _keywords.ContextualKeyword._accessor && !_util.isLineTerminator.call(void 0, )) { + parseClassPropertyName(classContextId); + parseClassProperty(); + } else if (_util.isLineTerminator.call(void 0, )) { + // an uninitialized class property (due to ASI, since we don't otherwise recognize the next token) + parseClassProperty(); + } else { + _util.unexpected.call(void 0, ); + } +} + +function parseClassMethod(functionStart, isConstructor) { + if (_base.isTypeScriptEnabled) { + _typescript.tsTryParseTypeParameters.call(void 0, ); + } else if (_base.isFlowEnabled) { + if (_tokenizer.match.call(void 0, _types.TokenType.lessThan)) { + _flow.flowParseTypeParameterDeclaration.call(void 0, ); + } + } + _expression.parseMethod.call(void 0, functionStart, isConstructor); +} + +// Return the name of the class property, if it is a simple identifier. + function parseClassPropertyName(classContextId) { + _expression.parsePropertyName.call(void 0, classContextId); +} exports.parseClassPropertyName = parseClassPropertyName; + + function parsePostMemberNameModifiers() { + if (_base.isTypeScriptEnabled) { + const oldIsType = _tokenizer.pushTypeContext.call(void 0, 0); + _tokenizer.eat.call(void 0, _types.TokenType.question); + _tokenizer.popTypeContext.call(void 0, oldIsType); + } +} exports.parsePostMemberNameModifiers = parsePostMemberNameModifiers; + + function parseClassProperty() { + if (_base.isTypeScriptEnabled) { + _tokenizer.eatTypeToken.call(void 0, _types.TokenType.bang); + _typescript.tsTryParseTypeAnnotation.call(void 0, ); + } else if (_base.isFlowEnabled) { + if (_tokenizer.match.call(void 0, _types.TokenType.colon)) { + _flow.flowParseTypeAnnotation.call(void 0, ); + } + } + + if (_tokenizer.match.call(void 0, _types.TokenType.eq)) { + const equalsTokenIndex = _base.state.tokens.length; + _tokenizer.next.call(void 0, ); + _expression.parseMaybeAssign.call(void 0, ); + _base.state.tokens[equalsTokenIndex].rhsEndIndex = _base.state.tokens.length; + } + _util.semicolon.call(void 0, ); +} exports.parseClassProperty = parseClassProperty; + +function parseClassId(isStatement, optionalId = false) { + if ( + _base.isTypeScriptEnabled && + (!isStatement || optionalId) && + _util.isContextual.call(void 0, _keywords.ContextualKeyword._implements) + ) { + return; + } + + if (_tokenizer.match.call(void 0, _types.TokenType.name)) { + _lval.parseBindingIdentifier.call(void 0, true); + } + + if (_base.isTypeScriptEnabled) { + _typescript.tsTryParseTypeParameters.call(void 0, ); + } else if (_base.isFlowEnabled) { + if (_tokenizer.match.call(void 0, _types.TokenType.lessThan)) { + _flow.flowParseTypeParameterDeclaration.call(void 0, ); + } + } +} + +// Returns true if there was a superclass. +function parseClassSuper() { + let hasSuper = false; + if (_tokenizer.eat.call(void 0, _types.TokenType._extends)) { + _expression.parseExprSubscripts.call(void 0, ); + hasSuper = true; + } else { + hasSuper = false; + } + if (_base.isTypeScriptEnabled) { + _typescript.tsAfterParseClassSuper.call(void 0, hasSuper); + } else if (_base.isFlowEnabled) { + _flow.flowAfterParseClassSuper.call(void 0, hasSuper); + } +} + +// Parses module export declaration. + + function parseExport() { + const exportIndex = _base.state.tokens.length - 1; + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseExport.call(void 0, )) { + return; + } + } + // export * from '...' + if (shouldParseExportStar()) { + parseExportStar(); + } else if (isExportDefaultSpecifier()) { + // export default from + _expression.parseIdentifier.call(void 0, ); + if (_tokenizer.match.call(void 0, _types.TokenType.comma) && _tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.star) { + _util.expect.call(void 0, _types.TokenType.comma); + _util.expect.call(void 0, _types.TokenType.star); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._as); + _expression.parseIdentifier.call(void 0, ); + } else { + parseExportSpecifiersMaybe(); + } + parseExportFrom(); + } else if (_tokenizer.eat.call(void 0, _types.TokenType._default)) { + // export default ... + parseExportDefaultExpression(); + } else if (shouldParseExportDeclaration()) { + parseExportDeclaration(); + } else { + // export { x, y as z } [from '...'] + parseExportSpecifiers(); + parseExportFrom(); + } + _base.state.tokens[exportIndex].rhsEndIndex = _base.state.tokens.length; +} exports.parseExport = parseExport; + +function parseExportDefaultExpression() { + if (_base.isTypeScriptEnabled) { + if (_typescript.tsTryParseExportDefaultExpression.call(void 0, )) { + return; + } + } + if (_base.isFlowEnabled) { + if (_flow.flowTryParseExportDefaultExpression.call(void 0, )) { + return; + } + } + const functionStart = _base.state.start; + if (_tokenizer.eat.call(void 0, _types.TokenType._function)) { + parseFunction(functionStart, true, true); + } else if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._async) && _tokenizer.lookaheadType.call(void 0, ) === _types.TokenType._function) { + // async function declaration + _util.eatContextual.call(void 0, _keywords.ContextualKeyword._async); + _tokenizer.eat.call(void 0, _types.TokenType._function); + parseFunction(functionStart, true, true); + } else if (_tokenizer.match.call(void 0, _types.TokenType._class)) { + parseClass(true, true); + } else if (_tokenizer.match.call(void 0, _types.TokenType.at)) { + parseDecorators(); + parseClass(true, true); + } else { + _expression.parseMaybeAssign.call(void 0, ); + _util.semicolon.call(void 0, ); + } +} + +function parseExportDeclaration() { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseExportDeclaration.call(void 0, ); + } else if (_base.isFlowEnabled) { + _flow.flowParseExportDeclaration.call(void 0, ); + } else { + parseStatement(true); + } +} + +function isExportDefaultSpecifier() { + if (_base.isTypeScriptEnabled && _typescript.tsIsDeclarationStart.call(void 0, )) { + return false; + } else if (_base.isFlowEnabled && _flow.flowShouldDisallowExportDefaultSpecifier.call(void 0, )) { + return false; + } + if (_tokenizer.match.call(void 0, _types.TokenType.name)) { + return _base.state.contextualKeyword !== _keywords.ContextualKeyword._async; + } + + if (!_tokenizer.match.call(void 0, _types.TokenType._default)) { + return false; + } + + const _next = _tokenizer.nextTokenStart.call(void 0, ); + const lookahead = _tokenizer.lookaheadTypeAndKeyword.call(void 0, ); + const hasFrom = + lookahead.type === _types.TokenType.name && lookahead.contextualKeyword === _keywords.ContextualKeyword._from; + if (lookahead.type === _types.TokenType.comma) { + return true; + } + // lookahead again when `export default from` is seen + if (hasFrom) { + const nextAfterFrom = _base.input.charCodeAt(_tokenizer.nextTokenStartSince.call(void 0, _next + 4)); + return nextAfterFrom === _charcodes.charCodes.quotationMark || nextAfterFrom === _charcodes.charCodes.apostrophe; + } + return false; +} + +function parseExportSpecifiersMaybe() { + if (_tokenizer.eat.call(void 0, _types.TokenType.comma)) { + parseExportSpecifiers(); + } +} + + function parseExportFrom() { + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._from)) { + _expression.parseExprAtom.call(void 0, ); + maybeParseImportAssertions(); + } + _util.semicolon.call(void 0, ); +} exports.parseExportFrom = parseExportFrom; + +function shouldParseExportStar() { + if (_base.isFlowEnabled) { + return _flow.flowShouldParseExportStar.call(void 0, ); + } else { + return _tokenizer.match.call(void 0, _types.TokenType.star); + } +} + +function parseExportStar() { + if (_base.isFlowEnabled) { + _flow.flowParseExportStar.call(void 0, ); + } else { + baseParseExportStar(); + } +} + + function baseParseExportStar() { + _util.expect.call(void 0, _types.TokenType.star); + + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._as)) { + parseExportNamespace(); + } else { + parseExportFrom(); + } +} exports.baseParseExportStar = baseParseExportStar; + +function parseExportNamespace() { + _tokenizer.next.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].type = _types.TokenType._as; + _expression.parseIdentifier.call(void 0, ); + parseExportSpecifiersMaybe(); + parseExportFrom(); +} + +function shouldParseExportDeclaration() { + return ( + (_base.isTypeScriptEnabled && _typescript.tsIsDeclarationStart.call(void 0, )) || + (_base.isFlowEnabled && _flow.flowShouldParseExportDeclaration.call(void 0, )) || + _base.state.type === _types.TokenType._var || + _base.state.type === _types.TokenType._const || + _base.state.type === _types.TokenType._let || + _base.state.type === _types.TokenType._function || + _base.state.type === _types.TokenType._class || + _util.isContextual.call(void 0, _keywords.ContextualKeyword._async) || + _tokenizer.match.call(void 0, _types.TokenType.at) + ); +} + +// Parses a comma-separated list of module exports. + function parseExportSpecifiers() { + let first = true; + + // export { x, y as z } [from '...'] + _util.expect.call(void 0, _types.TokenType.braceL); + + while (!_tokenizer.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (first) { + first = false; + } else { + _util.expect.call(void 0, _types.TokenType.comma); + if (_tokenizer.eat.call(void 0, _types.TokenType.braceR)) { + break; + } + } + parseExportSpecifier(); + } +} exports.parseExportSpecifiers = parseExportSpecifiers; + +function parseExportSpecifier() { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseExportSpecifier.call(void 0, ); + return; + } + _expression.parseIdentifier.call(void 0, ); + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _tokenizer.IdentifierRole.ExportAccess; + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _expression.parseIdentifier.call(void 0, ); + } +} + +/** + * Starting at the `module` token in an import, determine if it was truly an + * import reflection token or just looks like one. + * + * Returns true for: + * import module foo from "foo"; + * import module from from "foo"; + * + * Returns false for: + * import module from "foo"; + * import module, {bar} from "foo"; + */ +function isImportReflection() { + const snapshot = _base.state.snapshot(); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._module); + if (_util.eatContextual.call(void 0, _keywords.ContextualKeyword._from)) { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._from)) { + _base.state.restoreFromSnapshot(snapshot); + return true; + } else { + _base.state.restoreFromSnapshot(snapshot); + return false; + } + } else if (_tokenizer.match.call(void 0, _types.TokenType.comma)) { + _base.state.restoreFromSnapshot(snapshot); + return false; + } else { + _base.state.restoreFromSnapshot(snapshot); + return true; + } +} + +/** + * Eat the "module" token from the import reflection proposal. + * https://github.com/tc39/proposal-import-reflection + */ +function parseMaybeImportReflection() { + // isImportReflection does snapshot/restore, so only run it if we see the word + // "module". + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._module) && isImportReflection()) { + _tokenizer.next.call(void 0, ); + } +} + +// Parses import declaration. + + function parseImport() { + if (_base.isTypeScriptEnabled && _tokenizer.match.call(void 0, _types.TokenType.name) && _tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.eq) { + _typescript.tsParseImportEqualsDeclaration.call(void 0, ); + return; + } + if (_base.isTypeScriptEnabled && _util.isContextual.call(void 0, _keywords.ContextualKeyword._type)) { + const lookahead = _tokenizer.lookaheadTypeAndKeyword.call(void 0, ); + if (lookahead.type === _types.TokenType.name && lookahead.contextualKeyword !== _keywords.ContextualKeyword._from) { + // One of these `import type` cases: + // import type T = require('T'); + // import type A from 'A'; + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._type); + if (_tokenizer.lookaheadType.call(void 0, ) === _types.TokenType.eq) { + _typescript.tsParseImportEqualsDeclaration.call(void 0, ); + return; + } + // If this is an `import type...from` statement, then we already ate the + // type token, so proceed to the regular import parser. + } else if (lookahead.type === _types.TokenType.star || lookahead.type === _types.TokenType.braceL) { + // One of these `import type` cases, in which case we can eat the type token + // and proceed as normal: + // import type * as A from 'A'; + // import type {a} from 'A'; + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._type); + } + // Otherwise, we are importing the name "type". + } + + // import '...' + if (_tokenizer.match.call(void 0, _types.TokenType.string)) { + _expression.parseExprAtom.call(void 0, ); + } else { + parseMaybeImportReflection(); + parseImportSpecifiers(); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._from); + _expression.parseExprAtom.call(void 0, ); + } + maybeParseImportAssertions(); + _util.semicolon.call(void 0, ); +} exports.parseImport = parseImport; + +// eslint-disable-next-line no-unused-vars +function shouldParseDefaultImport() { + return _tokenizer.match.call(void 0, _types.TokenType.name); +} + +function parseImportSpecifierLocal() { + _lval.parseImportedIdentifier.call(void 0, ); +} + +// Parses a comma-separated list of module imports. +function parseImportSpecifiers() { + if (_base.isFlowEnabled) { + _flow.flowStartParseImportSpecifiers.call(void 0, ); + } + + let first = true; + if (shouldParseDefaultImport()) { + // import defaultObj, { x, y as z } from '...' + parseImportSpecifierLocal(); + + if (!_tokenizer.eat.call(void 0, _types.TokenType.comma)) return; + } + + if (_tokenizer.match.call(void 0, _types.TokenType.star)) { + _tokenizer.next.call(void 0, ); + _util.expectContextual.call(void 0, _keywords.ContextualKeyword._as); + + parseImportSpecifierLocal(); + + return; + } + + _util.expect.call(void 0, _types.TokenType.braceL); + while (!_tokenizer.eat.call(void 0, _types.TokenType.braceR) && !_base.state.error) { + if (first) { + first = false; + } else { + // Detect an attempt to deep destructure + if (_tokenizer.eat.call(void 0, _types.TokenType.colon)) { + _util.unexpected.call(void 0, + "ES2015 named imports do not destructure. Use another statement for destructuring after the import.", + ); + } + + _util.expect.call(void 0, _types.TokenType.comma); + if (_tokenizer.eat.call(void 0, _types.TokenType.braceR)) { + break; + } + } + + parseImportSpecifier(); + } +} + +function parseImportSpecifier() { + if (_base.isTypeScriptEnabled) { + _typescript.tsParseImportSpecifier.call(void 0, ); + return; + } + if (_base.isFlowEnabled) { + _flow.flowParseImportSpecifier.call(void 0, ); + return; + } + _lval.parseImportedIdentifier.call(void 0, ); + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._as)) { + _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _tokenizer.IdentifierRole.ImportAccess; + _tokenizer.next.call(void 0, ); + _lval.parseImportedIdentifier.call(void 0, ); + } +} + +/** + * Parse import assertions like `assert {type: "json"}`. + * + * Import assertions technically have their own syntax, but are always parseable + * as a plain JS object, so just do that for simplicity. + */ +function maybeParseImportAssertions() { + if (_util.isContextual.call(void 0, _keywords.ContextualKeyword._assert) && !_util.hasPrecedingLineBreak.call(void 0, )) { + _tokenizer.next.call(void 0, ); + _expression.parseObj.call(void 0, false, false); + } +} diff --git a/node_modules/sucrase/dist/parser/traverser/util.js b/node_modules/sucrase/dist/parser/traverser/util.js new file mode 100644 index 0000000..8ade800 --- /dev/null +++ b/node_modules/sucrase/dist/parser/traverser/util.js @@ -0,0 +1,104 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _index = require('../tokenizer/index'); + +var _types = require('../tokenizer/types'); +var _charcodes = require('../util/charcodes'); +var _base = require('./base'); + +// ## Parser utilities + +// Tests whether parsed token is a contextual keyword. + function isContextual(contextualKeyword) { + return _base.state.contextualKeyword === contextualKeyword; +} exports.isContextual = isContextual; + + function isLookaheadContextual(contextualKeyword) { + const l = _index.lookaheadTypeAndKeyword.call(void 0, ); + return l.type === _types.TokenType.name && l.contextualKeyword === contextualKeyword; +} exports.isLookaheadContextual = isLookaheadContextual; + +// Consumes contextual keyword if possible. + function eatContextual(contextualKeyword) { + return _base.state.contextualKeyword === contextualKeyword && _index.eat.call(void 0, _types.TokenType.name); +} exports.eatContextual = eatContextual; + +// Asserts that following token is given contextual keyword. + function expectContextual(contextualKeyword) { + if (!eatContextual(contextualKeyword)) { + unexpected(); + } +} exports.expectContextual = expectContextual; + +// Test whether a semicolon can be inserted at the current position. + function canInsertSemicolon() { + return _index.match.call(void 0, _types.TokenType.eof) || _index.match.call(void 0, _types.TokenType.braceR) || hasPrecedingLineBreak(); +} exports.canInsertSemicolon = canInsertSemicolon; + + function hasPrecedingLineBreak() { + const prevToken = _base.state.tokens[_base.state.tokens.length - 1]; + const lastTokEnd = prevToken ? prevToken.end : 0; + for (let i = lastTokEnd; i < _base.state.start; i++) { + const code = _base.input.charCodeAt(i); + if ( + code === _charcodes.charCodes.lineFeed || + code === _charcodes.charCodes.carriageReturn || + code === 0x2028 || + code === 0x2029 + ) { + return true; + } + } + return false; +} exports.hasPrecedingLineBreak = hasPrecedingLineBreak; + + function hasFollowingLineBreak() { + const nextStart = _index.nextTokenStart.call(void 0, ); + for (let i = _base.state.end; i < nextStart; i++) { + const code = _base.input.charCodeAt(i); + if ( + code === _charcodes.charCodes.lineFeed || + code === _charcodes.charCodes.carriageReturn || + code === 0x2028 || + code === 0x2029 + ) { + return true; + } + } + return false; +} exports.hasFollowingLineBreak = hasFollowingLineBreak; + + function isLineTerminator() { + return _index.eat.call(void 0, _types.TokenType.semi) || canInsertSemicolon(); +} exports.isLineTerminator = isLineTerminator; + +// Consume a semicolon, or, failing that, see if we are allowed to +// pretend that there is a semicolon at this position. + function semicolon() { + if (!isLineTerminator()) { + unexpected('Unexpected token, expected ";"'); + } +} exports.semicolon = semicolon; + +// Expect a token of a given type. If found, consume it, otherwise, +// raise an unexpected token error at given pos. + function expect(type) { + const matched = _index.eat.call(void 0, type); + if (!matched) { + unexpected(`Unexpected token, expected "${_types.formatTokenType.call(void 0, type)}"`); + } +} exports.expect = expect; + +/** + * Transition the parser to an error state. All code needs to be written to naturally unwind in this + * state, which allows us to backtrack without exceptions and without error plumbing everywhere. + */ + function unexpected(message = "Unexpected token", pos = _base.state.start) { + if (_base.state.error) { + return; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const err = new SyntaxError(message); + err.pos = pos; + _base.state.error = err; + _base.state.pos = _base.input.length; + _index.finishToken.call(void 0, _types.TokenType.eof); +} exports.unexpected = unexpected; diff --git a/node_modules/sucrase/dist/parser/util/charcodes.js b/node_modules/sucrase/dist/parser/util/charcodes.js new file mode 100644 index 0000000..52bebc4 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/charcodes.js @@ -0,0 +1,115 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var charCodes; (function (charCodes) { + const backSpace = 8; charCodes[charCodes["backSpace"] = backSpace] = "backSpace"; + const lineFeed = 10; charCodes[charCodes["lineFeed"] = lineFeed] = "lineFeed"; // '\n' + const tab = 9; charCodes[charCodes["tab"] = tab] = "tab"; // '\t' + const carriageReturn = 13; charCodes[charCodes["carriageReturn"] = carriageReturn] = "carriageReturn"; // '\r' + const shiftOut = 14; charCodes[charCodes["shiftOut"] = shiftOut] = "shiftOut"; + const space = 32; charCodes[charCodes["space"] = space] = "space"; + const exclamationMark = 33; charCodes[charCodes["exclamationMark"] = exclamationMark] = "exclamationMark"; // '!' + const quotationMark = 34; charCodes[charCodes["quotationMark"] = quotationMark] = "quotationMark"; // '"' + const numberSign = 35; charCodes[charCodes["numberSign"] = numberSign] = "numberSign"; // '#' + const dollarSign = 36; charCodes[charCodes["dollarSign"] = dollarSign] = "dollarSign"; // '$' + const percentSign = 37; charCodes[charCodes["percentSign"] = percentSign] = "percentSign"; // '%' + const ampersand = 38; charCodes[charCodes["ampersand"] = ampersand] = "ampersand"; // '&' + const apostrophe = 39; charCodes[charCodes["apostrophe"] = apostrophe] = "apostrophe"; // ''' + const leftParenthesis = 40; charCodes[charCodes["leftParenthesis"] = leftParenthesis] = "leftParenthesis"; // '(' + const rightParenthesis = 41; charCodes[charCodes["rightParenthesis"] = rightParenthesis] = "rightParenthesis"; // ')' + const asterisk = 42; charCodes[charCodes["asterisk"] = asterisk] = "asterisk"; // '*' + const plusSign = 43; charCodes[charCodes["plusSign"] = plusSign] = "plusSign"; // '+' + const comma = 44; charCodes[charCodes["comma"] = comma] = "comma"; // ',' + const dash = 45; charCodes[charCodes["dash"] = dash] = "dash"; // '-' + const dot = 46; charCodes[charCodes["dot"] = dot] = "dot"; // '.' + const slash = 47; charCodes[charCodes["slash"] = slash] = "slash"; // '/' + const digit0 = 48; charCodes[charCodes["digit0"] = digit0] = "digit0"; // '0' + const digit1 = 49; charCodes[charCodes["digit1"] = digit1] = "digit1"; // '1' + const digit2 = 50; charCodes[charCodes["digit2"] = digit2] = "digit2"; // '2' + const digit3 = 51; charCodes[charCodes["digit3"] = digit3] = "digit3"; // '3' + const digit4 = 52; charCodes[charCodes["digit4"] = digit4] = "digit4"; // '4' + const digit5 = 53; charCodes[charCodes["digit5"] = digit5] = "digit5"; // '5' + const digit6 = 54; charCodes[charCodes["digit6"] = digit6] = "digit6"; // '6' + const digit7 = 55; charCodes[charCodes["digit7"] = digit7] = "digit7"; // '7' + const digit8 = 56; charCodes[charCodes["digit8"] = digit8] = "digit8"; // '8' + const digit9 = 57; charCodes[charCodes["digit9"] = digit9] = "digit9"; // '9' + const colon = 58; charCodes[charCodes["colon"] = colon] = "colon"; // ':' + const semicolon = 59; charCodes[charCodes["semicolon"] = semicolon] = "semicolon"; // ';' + const lessThan = 60; charCodes[charCodes["lessThan"] = lessThan] = "lessThan"; // '<' + const equalsTo = 61; charCodes[charCodes["equalsTo"] = equalsTo] = "equalsTo"; // '=' + const greaterThan = 62; charCodes[charCodes["greaterThan"] = greaterThan] = "greaterThan"; // '>' + const questionMark = 63; charCodes[charCodes["questionMark"] = questionMark] = "questionMark"; // '?' + const atSign = 64; charCodes[charCodes["atSign"] = atSign] = "atSign"; // '@' + const uppercaseA = 65; charCodes[charCodes["uppercaseA"] = uppercaseA] = "uppercaseA"; // 'A' + const uppercaseB = 66; charCodes[charCodes["uppercaseB"] = uppercaseB] = "uppercaseB"; // 'B' + const uppercaseC = 67; charCodes[charCodes["uppercaseC"] = uppercaseC] = "uppercaseC"; // 'C' + const uppercaseD = 68; charCodes[charCodes["uppercaseD"] = uppercaseD] = "uppercaseD"; // 'D' + const uppercaseE = 69; charCodes[charCodes["uppercaseE"] = uppercaseE] = "uppercaseE"; // 'E' + const uppercaseF = 70; charCodes[charCodes["uppercaseF"] = uppercaseF] = "uppercaseF"; // 'F' + const uppercaseG = 71; charCodes[charCodes["uppercaseG"] = uppercaseG] = "uppercaseG"; // 'G' + const uppercaseH = 72; charCodes[charCodes["uppercaseH"] = uppercaseH] = "uppercaseH"; // 'H' + const uppercaseI = 73; charCodes[charCodes["uppercaseI"] = uppercaseI] = "uppercaseI"; // 'I' + const uppercaseJ = 74; charCodes[charCodes["uppercaseJ"] = uppercaseJ] = "uppercaseJ"; // 'J' + const uppercaseK = 75; charCodes[charCodes["uppercaseK"] = uppercaseK] = "uppercaseK"; // 'K' + const uppercaseL = 76; charCodes[charCodes["uppercaseL"] = uppercaseL] = "uppercaseL"; // 'L' + const uppercaseM = 77; charCodes[charCodes["uppercaseM"] = uppercaseM] = "uppercaseM"; // 'M' + const uppercaseN = 78; charCodes[charCodes["uppercaseN"] = uppercaseN] = "uppercaseN"; // 'N' + const uppercaseO = 79; charCodes[charCodes["uppercaseO"] = uppercaseO] = "uppercaseO"; // 'O' + const uppercaseP = 80; charCodes[charCodes["uppercaseP"] = uppercaseP] = "uppercaseP"; // 'P' + const uppercaseQ = 81; charCodes[charCodes["uppercaseQ"] = uppercaseQ] = "uppercaseQ"; // 'Q' + const uppercaseR = 82; charCodes[charCodes["uppercaseR"] = uppercaseR] = "uppercaseR"; // 'R' + const uppercaseS = 83; charCodes[charCodes["uppercaseS"] = uppercaseS] = "uppercaseS"; // 'S' + const uppercaseT = 84; charCodes[charCodes["uppercaseT"] = uppercaseT] = "uppercaseT"; // 'T' + const uppercaseU = 85; charCodes[charCodes["uppercaseU"] = uppercaseU] = "uppercaseU"; // 'U' + const uppercaseV = 86; charCodes[charCodes["uppercaseV"] = uppercaseV] = "uppercaseV"; // 'V' + const uppercaseW = 87; charCodes[charCodes["uppercaseW"] = uppercaseW] = "uppercaseW"; // 'W' + const uppercaseX = 88; charCodes[charCodes["uppercaseX"] = uppercaseX] = "uppercaseX"; // 'X' + const uppercaseY = 89; charCodes[charCodes["uppercaseY"] = uppercaseY] = "uppercaseY"; // 'Y' + const uppercaseZ = 90; charCodes[charCodes["uppercaseZ"] = uppercaseZ] = "uppercaseZ"; // 'Z' + const leftSquareBracket = 91; charCodes[charCodes["leftSquareBracket"] = leftSquareBracket] = "leftSquareBracket"; // '[' + const backslash = 92; charCodes[charCodes["backslash"] = backslash] = "backslash"; // '\ ' + const rightSquareBracket = 93; charCodes[charCodes["rightSquareBracket"] = rightSquareBracket] = "rightSquareBracket"; // ']' + const caret = 94; charCodes[charCodes["caret"] = caret] = "caret"; // '^' + const underscore = 95; charCodes[charCodes["underscore"] = underscore] = "underscore"; // '_' + const graveAccent = 96; charCodes[charCodes["graveAccent"] = graveAccent] = "graveAccent"; // '`' + const lowercaseA = 97; charCodes[charCodes["lowercaseA"] = lowercaseA] = "lowercaseA"; // 'a' + const lowercaseB = 98; charCodes[charCodes["lowercaseB"] = lowercaseB] = "lowercaseB"; // 'b' + const lowercaseC = 99; charCodes[charCodes["lowercaseC"] = lowercaseC] = "lowercaseC"; // 'c' + const lowercaseD = 100; charCodes[charCodes["lowercaseD"] = lowercaseD] = "lowercaseD"; // 'd' + const lowercaseE = 101; charCodes[charCodes["lowercaseE"] = lowercaseE] = "lowercaseE"; // 'e' + const lowercaseF = 102; charCodes[charCodes["lowercaseF"] = lowercaseF] = "lowercaseF"; // 'f' + const lowercaseG = 103; charCodes[charCodes["lowercaseG"] = lowercaseG] = "lowercaseG"; // 'g' + const lowercaseH = 104; charCodes[charCodes["lowercaseH"] = lowercaseH] = "lowercaseH"; // 'h' + const lowercaseI = 105; charCodes[charCodes["lowercaseI"] = lowercaseI] = "lowercaseI"; // 'i' + const lowercaseJ = 106; charCodes[charCodes["lowercaseJ"] = lowercaseJ] = "lowercaseJ"; // 'j' + const lowercaseK = 107; charCodes[charCodes["lowercaseK"] = lowercaseK] = "lowercaseK"; // 'k' + const lowercaseL = 108; charCodes[charCodes["lowercaseL"] = lowercaseL] = "lowercaseL"; // 'l' + const lowercaseM = 109; charCodes[charCodes["lowercaseM"] = lowercaseM] = "lowercaseM"; // 'm' + const lowercaseN = 110; charCodes[charCodes["lowercaseN"] = lowercaseN] = "lowercaseN"; // 'n' + const lowercaseO = 111; charCodes[charCodes["lowercaseO"] = lowercaseO] = "lowercaseO"; // 'o' + const lowercaseP = 112; charCodes[charCodes["lowercaseP"] = lowercaseP] = "lowercaseP"; // 'p' + const lowercaseQ = 113; charCodes[charCodes["lowercaseQ"] = lowercaseQ] = "lowercaseQ"; // 'q' + const lowercaseR = 114; charCodes[charCodes["lowercaseR"] = lowercaseR] = "lowercaseR"; // 'r' + const lowercaseS = 115; charCodes[charCodes["lowercaseS"] = lowercaseS] = "lowercaseS"; // 's' + const lowercaseT = 116; charCodes[charCodes["lowercaseT"] = lowercaseT] = "lowercaseT"; // 't' + const lowercaseU = 117; charCodes[charCodes["lowercaseU"] = lowercaseU] = "lowercaseU"; // 'u' + const lowercaseV = 118; charCodes[charCodes["lowercaseV"] = lowercaseV] = "lowercaseV"; // 'v' + const lowercaseW = 119; charCodes[charCodes["lowercaseW"] = lowercaseW] = "lowercaseW"; // 'w' + const lowercaseX = 120; charCodes[charCodes["lowercaseX"] = lowercaseX] = "lowercaseX"; // 'x' + const lowercaseY = 121; charCodes[charCodes["lowercaseY"] = lowercaseY] = "lowercaseY"; // 'y' + const lowercaseZ = 122; charCodes[charCodes["lowercaseZ"] = lowercaseZ] = "lowercaseZ"; // 'z' + const leftCurlyBrace = 123; charCodes[charCodes["leftCurlyBrace"] = leftCurlyBrace] = "leftCurlyBrace"; // '{' + const verticalBar = 124; charCodes[charCodes["verticalBar"] = verticalBar] = "verticalBar"; // '|' + const rightCurlyBrace = 125; charCodes[charCodes["rightCurlyBrace"] = rightCurlyBrace] = "rightCurlyBrace"; // '}' + const tilde = 126; charCodes[charCodes["tilde"] = tilde] = "tilde"; // '~' + const nonBreakingSpace = 160; charCodes[charCodes["nonBreakingSpace"] = nonBreakingSpace] = "nonBreakingSpace"; + // eslint-disable-next-line no-irregular-whitespace + const oghamSpaceMark = 5760; charCodes[charCodes["oghamSpaceMark"] = oghamSpaceMark] = "oghamSpaceMark"; // ' ' + const lineSeparator = 8232; charCodes[charCodes["lineSeparator"] = lineSeparator] = "lineSeparator"; + const paragraphSeparator = 8233; charCodes[charCodes["paragraphSeparator"] = paragraphSeparator] = "paragraphSeparator"; +})(charCodes || (exports.charCodes = charCodes = {})); + + function isDigit(code) { + return ( + (code >= charCodes.digit0 && code <= charCodes.digit9) || + (code >= charCodes.lowercaseA && code <= charCodes.lowercaseF) || + (code >= charCodes.uppercaseA && code <= charCodes.uppercaseF) + ); +} exports.isDigit = isDigit; diff --git a/node_modules/sucrase/dist/parser/util/identifier.js b/node_modules/sucrase/dist/parser/util/identifier.js new file mode 100644 index 0000000..9a2813c --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/identifier.js @@ -0,0 +1,34 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _charcodes = require('./charcodes'); +var _whitespace = require('./whitespace'); + +function computeIsIdentifierChar(code) { + if (code < 48) return code === 36; + if (code < 58) return true; + if (code < 65) return false; + if (code < 91) return true; + if (code < 97) return code === 95; + if (code < 123) return true; + if (code < 128) return false; + throw new Error("Should not be called with non-ASCII char code."); +} + + const IS_IDENTIFIER_CHAR = new Uint8Array(65536); exports.IS_IDENTIFIER_CHAR = IS_IDENTIFIER_CHAR; +for (let i = 0; i < 128; i++) { + exports.IS_IDENTIFIER_CHAR[i] = computeIsIdentifierChar(i) ? 1 : 0; +} +for (let i = 128; i < 65536; i++) { + exports.IS_IDENTIFIER_CHAR[i] = 1; +} +// Aside from whitespace and newlines, all characters outside the ASCII space are either +// identifier characters or invalid. Since we're not performing code validation, we can just +// treat all invalid characters as identifier characters. +for (const whitespaceChar of _whitespace.WHITESPACE_CHARS) { + exports.IS_IDENTIFIER_CHAR[whitespaceChar] = 0; +} +exports.IS_IDENTIFIER_CHAR[0x2028] = 0; +exports.IS_IDENTIFIER_CHAR[0x2029] = 0; + + const IS_IDENTIFIER_START = exports.IS_IDENTIFIER_CHAR.slice(); exports.IS_IDENTIFIER_START = IS_IDENTIFIER_START; +for (let numChar = _charcodes.charCodes.digit0; numChar <= _charcodes.charCodes.digit9; numChar++) { + exports.IS_IDENTIFIER_START[numChar] = 0; +} diff --git a/node_modules/sucrase/dist/parser/util/whitespace.js b/node_modules/sucrase/dist/parser/util/whitespace.js new file mode 100644 index 0000000..55bb994 --- /dev/null +++ b/node_modules/sucrase/dist/parser/util/whitespace.js @@ -0,0 +1,33 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _charcodes = require('./charcodes'); + +// https://tc39.github.io/ecma262/#sec-white-space + const WHITESPACE_CHARS = [ + 0x0009, + 0x000b, + 0x000c, + _charcodes.charCodes.space, + _charcodes.charCodes.nonBreakingSpace, + _charcodes.charCodes.oghamSpaceMark, + 0x2000, // EN QUAD + 0x2001, // EM QUAD + 0x2002, // EN SPACE + 0x2003, // EM SPACE + 0x2004, // THREE-PER-EM SPACE + 0x2005, // FOUR-PER-EM SPACE + 0x2006, // SIX-PER-EM SPACE + 0x2007, // FIGURE SPACE + 0x2008, // PUNCTUATION SPACE + 0x2009, // THIN SPACE + 0x200a, // HAIR SPACE + 0x202f, // NARROW NO-BREAK SPACE + 0x205f, // MEDIUM MATHEMATICAL SPACE + 0x3000, // IDEOGRAPHIC SPACE + 0xfeff, // ZERO WIDTH NO-BREAK SPACE +]; exports.WHITESPACE_CHARS = WHITESPACE_CHARS; + + const skipWhiteSpace = /(?:\s|\/\/.*|\/\*[^]*?\*\/)*/g; exports.skipWhiteSpace = skipWhiteSpace; + + const IS_WHITESPACE = new Uint8Array(65536); exports.IS_WHITESPACE = IS_WHITESPACE; +for (const char of exports.WHITESPACE_CHARS) { + exports.IS_WHITESPACE[char] = 1; +} diff --git a/node_modules/sucrase/dist/register.js b/node_modules/sucrase/dist/register.js new file mode 100644 index 0000000..d560377 --- /dev/null +++ b/node_modules/sucrase/dist/register.js @@ -0,0 +1,83 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }var _pirates = require('pirates'); var pirates = _interopRequireWildcard(_pirates); + +var _index = require('./index'); + + + + + + + + + function addHook( + extension, + options, + hookOptions, +) { + return pirates.addHook( + (code, filePath) => { + const {code: transformedCode, sourceMap} = _index.transform.call(void 0, code, { + ...options, + sourceMapOptions: {compiledFilename: filePath}, + filePath, + }); + const mapBase64 = Buffer.from(JSON.stringify(sourceMap)).toString("base64"); + const suffix = `//# sourceMappingURL=data:application/json;charset=utf-8;base64,${mapBase64}`; + return `${transformedCode}\n${suffix}`; + }, + {...hookOptions, exts: [extension]}, + ); +} exports.addHook = addHook; + + function registerJS(hookOptions) { + return addHook(".js", {transforms: ["imports", "flow", "jsx"]}, hookOptions); +} exports.registerJS = registerJS; + + function registerJSX(hookOptions) { + return addHook(".jsx", {transforms: ["imports", "flow", "jsx"]}, hookOptions); +} exports.registerJSX = registerJSX; + + function registerTS(hookOptions) { + return addHook(".ts", {transforms: ["imports", "typescript"]}, hookOptions); +} exports.registerTS = registerTS; + + function registerTSX(hookOptions) { + return addHook(".tsx", {transforms: ["imports", "typescript", "jsx"]}, hookOptions); +} exports.registerTSX = registerTSX; + + function registerTSLegacyModuleInterop(hookOptions) { + return addHook( + ".ts", + { + transforms: ["imports", "typescript"], + enableLegacyTypeScriptModuleInterop: true, + }, + hookOptions, + ); +} exports.registerTSLegacyModuleInterop = registerTSLegacyModuleInterop; + + function registerTSXLegacyModuleInterop(hookOptions) { + return addHook( + ".tsx", + { + transforms: ["imports", "typescript", "jsx"], + enableLegacyTypeScriptModuleInterop: true, + }, + hookOptions, + ); +} exports.registerTSXLegacyModuleInterop = registerTSXLegacyModuleInterop; + + function registerAll(hookOptions) { + const reverts = [ + registerJS(hookOptions), + registerJSX(hookOptions), + registerTS(hookOptions), + registerTSX(hookOptions), + ]; + + return () => { + for (const fn of reverts) { + fn(); + } + }; +} exports.registerAll = registerAll; diff --git a/node_modules/sucrase/dist/transformers/CJSImportTransformer.js b/node_modules/sucrase/dist/transformers/CJSImportTransformer.js new file mode 100644 index 0000000..51b01be --- /dev/null +++ b/node_modules/sucrase/dist/transformers/CJSImportTransformer.js @@ -0,0 +1,859 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + + +var _tokenizer = require('../parser/tokenizer'); +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + +var _elideImportEquals = require('../util/elideImportEquals'); var _elideImportEquals2 = _interopRequireDefault(_elideImportEquals); + + + +var _getDeclarationInfo = require('../util/getDeclarationInfo'); var _getDeclarationInfo2 = _interopRequireDefault(_getDeclarationInfo); +var _getImportExportSpecifierInfo = require('../util/getImportExportSpecifierInfo'); var _getImportExportSpecifierInfo2 = _interopRequireDefault(_getImportExportSpecifierInfo); +var _removeMaybeImportAssertion = require('../util/removeMaybeImportAssertion'); +var _shouldElideDefaultExport = require('../util/shouldElideDefaultExport'); var _shouldElideDefaultExport2 = _interopRequireDefault(_shouldElideDefaultExport); + + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +/** + * Class for editing import statements when we are transforming to commonjs. + */ + class CJSImportTransformer extends _Transformer2.default { + __init() {this.hadExport = false} + __init2() {this.hadNamedExport = false} + __init3() {this.hadDefaultExport = false} + + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + helperManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + enableLegacyTypeScriptModuleInterop, + isTypeScriptTransformEnabled, + preserveDynamicImport, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.helperManager = helperManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.enableLegacyBabel5ModuleInterop = enableLegacyBabel5ModuleInterop;this.enableLegacyTypeScriptModuleInterop = enableLegacyTypeScriptModuleInterop;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;this.preserveDynamicImport = preserveDynamicImport;CJSImportTransformer.prototype.__init.call(this);CJSImportTransformer.prototype.__init2.call(this);CJSImportTransformer.prototype.__init3.call(this);; + this.declarationInfo = isTypeScriptTransformEnabled + ? _getDeclarationInfo2.default.call(void 0, tokens) + : _getDeclarationInfo.EMPTY_DECLARATION_INFO; + } + + getPrefixCode() { + let prefix = ""; + if (this.hadExport) { + prefix += 'Object.defineProperty(exports, "__esModule", {value: true});'; + } + return prefix; + } + + getSuffixCode() { + if (this.enableLegacyBabel5ModuleInterop && this.hadDefaultExport && !this.hadNamedExport) { + return "\nmodule.exports = exports.default;\n"; + } + return ""; + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(_types.TokenType._import, _types.TokenType.name, _types.TokenType.eq)) { + return this.processImportEquals(); + } + if (this.tokens.matches1(_types.TokenType._import)) { + this.processImport(); + return true; + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if (this.tokens.matches1(_types.TokenType._export) && !this.tokens.currentToken().isType) { + this.hadExport = true; + return this.processExport(); + } + if (this.tokens.matches2(_types.TokenType.name, _types.TokenType.postIncDec)) { + // Fall through to normal identifier matching if this doesn't apply. + if (this.processPostIncDec()) { + return true; + } + } + if (this.tokens.matches1(_types.TokenType.name) || this.tokens.matches1(_types.TokenType.jsxName)) { + return this.processIdentifier(); + } + if (this.tokens.matches1(_types.TokenType.eq)) { + return this.processAssignment(); + } + if (this.tokens.matches1(_types.TokenType.assign)) { + return this.processComplexAssignment(); + } + if (this.tokens.matches1(_types.TokenType.preIncDec)) { + return this.processPreIncDec(); + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.importProcessor.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + _elideImportEquals2.default.call(void 0, this.tokens); + } else { + // Otherwise, switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + /** + * Transform this: + * import foo, {bar} from 'baz'; + * into + * var _baz = require('baz'); var _baz2 = _interopRequireDefault(_baz); + * + * The import code was already generated in the import preprocessing step, so + * we just need to look it up. + */ + processImport() { + if (this.tokens.matches2(_types.TokenType._import, _types.TokenType.parenL)) { + if (this.preserveDynamicImport) { + // Bail out, only making progress for this one token. + this.tokens.copyToken(); + return; + } + const requireWrapper = this.enableLegacyTypeScriptModuleInterop + ? "" + : `${this.helperManager.getHelperName("interopRequireWildcard")}(`; + this.tokens.replaceToken(`Promise.resolve().then(() => ${requireWrapper}require`); + const contextId = this.tokens.currentToken().contextId; + if (contextId == null) { + throw new Error("Expected context ID on dynamic import invocation."); + } + this.tokens.copyToken(); + while (!this.tokens.matchesContextIdAndLabel(_types.TokenType.parenR, contextId)) { + this.rootTransformer.processToken(); + } + this.tokens.replaceToken(requireWrapper ? ")))" : "))"); + return; + } + + const wasOnlyTypes = this.removeImportAndDetectIfType(); + + if (wasOnlyTypes) { + this.tokens.removeToken(); + } else { + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + this.tokens.appendCode(this.importProcessor.claimImportCode(path)); + } + _removeMaybeImportAssertion.removeMaybeImportAssertion.call(void 0, this.tokens); + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + + /** + * Erase this import, and return true if it was either of the form "import type" or contained only + * "type" named imports. Such imports should not even do a side-effect import. + * + * The position should end at the import string. + */ + removeImportAndDetectIfType() { + this.tokens.removeInitialToken(); + if ( + this.tokens.matchesContextual(_keywords.ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, _types.TokenType.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, _keywords.ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + this.removeRemainingImport(); + return true; + } + + if (this.tokens.matches1(_types.TokenType.name) || this.tokens.matches1(_types.TokenType.star)) { + // We have a default import or namespace import, so there must be some + // non-type import. + this.removeRemainingImport(); + return false; + } + + if (this.tokens.matches1(_types.TokenType.string)) { + // This is a bare import, so we should proceed with the import. + return false; + } + + let foundNonType = false; + while (!this.tokens.matches1(_types.TokenType.string)) { + // Check if any named imports are of the form "foo" or "foo as bar", with + // no leading "type". + if ((!foundNonType && this.tokens.matches1(_types.TokenType.braceL)) || this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + if ( + this.tokens.matches2(_types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches2(_types.TokenType.name, _types.TokenType.braceR) || + this.tokens.matches4(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.comma) || + this.tokens.matches4(_types.TokenType.name, _types.TokenType.name, _types.TokenType.name, _types.TokenType.braceR) + ) { + foundNonType = true; + } + } + this.tokens.removeToken(); + } + return !foundNonType; + } + + removeRemainingImport() { + while (!this.tokens.matches1(_types.TokenType.string)) { + this.tokens.removeToken(); + } + } + + processIdentifier() { + const token = this.tokens.currentToken(); + if (token.shadowsGlobal) { + return false; + } + + if (token.identifierRole === _tokenizer.IdentifierRole.ObjectShorthand) { + return this.processObjectShorthand(); + } + + if (token.identifierRole !== _tokenizer.IdentifierRole.Access) { + return false; + } + const replacement = this.importProcessor.getIdentifierReplacement( + this.tokens.identifierNameForToken(token), + ); + if (!replacement) { + return false; + } + // Tolerate any number of closing parens while looking for an opening paren + // that indicates a function call. + let possibleOpenParenIndex = this.tokens.currentIndex() + 1; + while ( + possibleOpenParenIndex < this.tokens.tokens.length && + this.tokens.tokens[possibleOpenParenIndex].type === _types.TokenType.parenR + ) { + possibleOpenParenIndex++; + } + // Avoid treating imported functions as methods of their `exports` object + // by using `(0, f)` when the identifier is in a paren expression. Else + // use `Function.prototype.call` when the identifier is a guaranteed + // function call. When using `call`, pass undefined as the context. + if (this.tokens.tokens[possibleOpenParenIndex].type === _types.TokenType.parenL) { + if ( + this.tokens.tokenAtRelativeIndex(1).type === _types.TokenType.parenL && + this.tokens.tokenAtRelativeIndex(-1).type !== _types.TokenType._new + ) { + this.tokens.replaceToken(`${replacement}.call(void 0, `); + // Remove the old paren. + this.tokens.removeToken(); + // Balance out the new paren. + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + } else { + // See here: http://2ality.com/2015/12/references.html + this.tokens.replaceToken(`(0, ${replacement})`); + } + } else { + this.tokens.replaceToken(replacement); + } + return true; + } + + processObjectShorthand() { + const identifier = this.tokens.identifierName(); + const replacement = this.importProcessor.getIdentifierReplacement(identifier); + if (!replacement) { + return false; + } + this.tokens.replaceToken(`${identifier}: ${replacement}`); + return true; + } + + processExport() { + if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._enum) || + this.tokens.matches3(_types.TokenType._export, _types.TokenType._const, _types.TokenType._enum) + ) { + // Let the TypeScript transform handle it. + return false; + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType._default)) { + this.hadDefaultExport = true; + if (this.tokens.matches3(_types.TokenType._export, _types.TokenType._default, _types.TokenType._enum)) { + // Flow export default enums need some special handling, so handle them + // in that tranform rather than this one. + return false; + } + this.processExportDefault(); + return true; + } + this.hadNamedExport = true; + if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._var) || + this.tokens.matches2(_types.TokenType._export, _types.TokenType._let) || + this.tokens.matches2(_types.TokenType._export, _types.TokenType._const) + ) { + this.processExportVar(); + return true; + } else if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._function) || + // export async function + this.tokens.matches3(_types.TokenType._export, _types.TokenType.name, _types.TokenType._function) + ) { + this.processExportFunction(); + return true; + } else if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._class) || + this.tokens.matches3(_types.TokenType._export, _types.TokenType._abstract, _types.TokenType._class) || + this.tokens.matches2(_types.TokenType._export, _types.TokenType.at) + ) { + this.processExportClass(); + return true; + } else if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.braceL)) { + this.processExportBindings(); + return true; + } else if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.star)) { + this.processExportStar(); + return true; + } else if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType.name) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, _keywords.ContextualKeyword._type) + ) { + // export type {a}; + // export type {a as b}; + // export type {a} from './b'; + // export type * from './b'; + // export type * as ns from './b'; + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.braceL)) { + while (!this.tokens.matches1(_types.TokenType.braceR)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + } else { + // * + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType._as)) { + // as + this.tokens.removeToken(); + // ns + this.tokens.removeToken(); + } + } + // Remove type re-export `... } from './T'` + if ( + this.tokens.matchesContextual(_keywords.ContextualKeyword._from) && + this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, _types.TokenType.string) + ) { + this.tokens.removeToken(); + this.tokens.removeToken(); + _removeMaybeImportAssertion.removeMaybeImportAssertion.call(void 0, this.tokens); + } + return true; + } else { + throw new Error("Unrecognized export syntax."); + } + } + + processAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + // If the LHS is a type identifier, this must be a declaration like `let a: b = c;`, + // with `b` as the identifier, so nothing needs to be done in that case. + if (identifierToken.isType || identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, _types.TokenType.dot)) { + return false; + } + if (index >= 2 && [_types.TokenType._var, _types.TokenType._let, _types.TokenType._const].includes(this.tokens.tokens[index - 2].type)) { + // Declarations don't need an extra assignment. This doesn't avoid the + // assignment for comma-separated declarations, but it's still correct + // since the assignment is just redundant. + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.copyToken(); + this.tokens.appendCode(` ${assignmentSnippet} =`); + return true; + } + + /** + * Process something like `a += 3`, where `a` might be an exported value. + */ + processComplexAssignment() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index - 1]; + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 2 && this.tokens.matches1AtIndex(index - 2, _types.TokenType.dot)) { + return false; + } + const assignmentSnippet = this.importProcessor.resolveExportBinding( + this.tokens.identifierNameForToken(identifierToken), + ); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(` = ${assignmentSnippet}`); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `++a`, where `a` might be an exported value. + */ + processPreIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + // Ignore things like ++a.b and ++a[b] and ++a().b. + if ( + index + 2 < this.tokens.tokens.length && + (this.tokens.matches1AtIndex(index + 2, _types.TokenType.dot) || + this.tokens.matches1AtIndex(index + 2, _types.TokenType.bracketL) || + this.tokens.matches1AtIndex(index + 2, _types.TokenType.parenL)) + ) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + this.tokens.appendCode(`${assignmentSnippet} = `); + this.tokens.copyToken(); + return true; + } + + /** + * Process something like `a++`, where `a` might be an exported value. + * This starts at the `a`, not at the `++`. + */ + processPostIncDec() { + const index = this.tokens.currentIndex(); + const identifierToken = this.tokens.tokens[index]; + const operatorToken = this.tokens.tokens[index + 1]; + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + if (identifierToken.shadowsGlobal) { + return false; + } + if (index >= 1 && this.tokens.matches1AtIndex(index - 1, _types.TokenType.dot)) { + return false; + } + const identifierName = this.tokens.identifierNameForToken(identifierToken); + const assignmentSnippet = this.importProcessor.resolveExportBinding(identifierName); + if (!assignmentSnippet) { + return false; + } + const operatorCode = this.tokens.rawCodeForToken(operatorToken); + // We might also replace the identifier with something like exports.x, so + // do that replacement here as well. + const base = this.importProcessor.getIdentifierReplacement(identifierName) || identifierName; + if (operatorCode === "++") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} + 1, ${base} - 1)`); + } else if (operatorCode === "--") { + this.tokens.replaceToken(`(${base} = ${assignmentSnippet} = ${base} - 1, ${base} + 1)`); + } else { + throw new Error(`Unexpected operator: ${operatorCode}`); + } + this.tokens.removeToken(); + return true; + } + + processExportDefault() { + if ( + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._function, _types.TokenType.name) || + // export default async function + (this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType.name, _types.TokenType._function, _types.TokenType.name) && + this.tokens.matchesContextualAtIndex( + this.tokens.currentIndex() + 2, + _keywords.ContextualKeyword._async, + )) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + // Named function export case: change it to a top-level function + // declaration followed by exports statement. + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.default = ${name};`); + } else if ( + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._class, _types.TokenType.name) || + this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType._abstract, _types.TokenType._class, _types.TokenType.name) || + this.tokens.matches3(_types.TokenType._export, _types.TokenType._default, _types.TokenType.at) + ) { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.copyDecorators(); + if (this.tokens.matches1(_types.TokenType._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.default = ${name};`); + // After this point, this is a plain "export default E" statement. + } else if ( + _shouldElideDefaultExport2.default.call(void 0, this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else if (this.reactHotLoaderTransformer) { + // We need to assign E to a variable. Change "export default E" to + // "let _default; exports.default = _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; exports.`); + this.tokens.copyToken(); + this.tokens.appendCode(` = ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + } else { + // Change "export default E" to "exports.default = E" + this.tokens.replaceToken("exports."); + this.tokens.copyToken(); + this.tokens.appendCode(" ="); + } + } + + copyDecorators() { + while (this.tokens.matches1(_types.TokenType.at)) { + this.tokens.copyToken(); + if (this.tokens.matches1(_types.TokenType.parenL)) { + this.tokens.copyExpectedToken(_types.TokenType.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + } else { + this.tokens.copyExpectedToken(_types.TokenType.name); + while (this.tokens.matches1(_types.TokenType.dot)) { + this.tokens.copyExpectedToken(_types.TokenType.dot); + this.tokens.copyExpectedToken(_types.TokenType.name); + } + if (this.tokens.matches1(_types.TokenType.parenL)) { + this.tokens.copyExpectedToken(_types.TokenType.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + } + } + } + } + + /** + * Transform a declaration like `export var`, `export let`, or `export const`. + */ + processExportVar() { + if (this.isSimpleExportVar()) { + this.processSimpleExportVar(); + } else { + this.processComplexExportVar(); + } + } + + /** + * Determine if the export is of the form: + * export var/let/const [varName] = [expr]; + * In other words, determine if function name inference might apply. + */ + isSimpleExportVar() { + let tokenIndex = this.tokens.currentIndex(); + // export + tokenIndex++; + // var/let/const + tokenIndex++; + if (!this.tokens.matches1AtIndex(tokenIndex, _types.TokenType.name)) { + return false; + } + tokenIndex++; + while (tokenIndex < this.tokens.tokens.length && this.tokens.tokens[tokenIndex].isType) { + tokenIndex++; + } + if (!this.tokens.matches1AtIndex(tokenIndex, _types.TokenType.eq)) { + return false; + } + return true; + } + + /** + * Transform an `export var` declaration initializing a single variable. + * + * For example, this: + * export const f = () => {}; + * becomes this: + * const f = () => {}; exports.f = f; + * + * The variable is unused (e.g. exports.f has the true value of the export). + * We need to produce an assignment of this form so that the function will + * have an inferred name of "f", which wouldn't happen in the more general + * case below. + */ + processSimpleExportVar() { + // export + this.tokens.removeInitialToken(); + // var/let/const + this.tokens.copyToken(); + const varName = this.tokens.identifierName(); + // x: number -> x + while (!this.tokens.matches1(_types.TokenType.eq)) { + this.rootTransformer.processToken(); + } + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(`; exports.${varName} = ${varName}`); + } + + /** + * Transform normal declaration exports, including handling destructuring. + * For example, this: + * export const {x: [a = 2, b], c} = d; + * becomes this: + * ({x: [exports.a = 2, exports.b], c: exports.c} = d;) + */ + processComplexExportVar() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + const needsParens = this.tokens.matches1(_types.TokenType.braceL); + if (needsParens) { + this.tokens.appendCode("("); + } + + let depth = 0; + while (true) { + if ( + this.tokens.matches1(_types.TokenType.braceL) || + this.tokens.matches1(_types.TokenType.dollarBraceL) || + this.tokens.matches1(_types.TokenType.bracketL) + ) { + depth++; + this.tokens.copyToken(); + } else if (this.tokens.matches1(_types.TokenType.braceR) || this.tokens.matches1(_types.TokenType.bracketR)) { + depth--; + this.tokens.copyToken(); + } else if ( + depth === 0 && + !this.tokens.matches1(_types.TokenType.name) && + !this.tokens.currentToken().isType + ) { + break; + } else if (this.tokens.matches1(_types.TokenType.eq)) { + // Default values might have assignments in the RHS that we want to ignore, so skip past + // them. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + } else { + const token = this.tokens.currentToken(); + if (_tokenizer.isDeclaration.call(void 0, token)) { + const name = this.tokens.identifierName(); + let replacement = this.importProcessor.getIdentifierReplacement(name); + if (replacement === null) { + throw new Error(`Expected a replacement for ${name} in \`export var\` syntax.`); + } + if (_tokenizer.isObjectShorthandDeclaration.call(void 0, token)) { + replacement = `${name}: ${replacement}`; + } + this.tokens.replaceToken(replacement); + } else { + this.rootTransformer.processToken(); + } + } + } + + if (needsParens) { + // Seek to the end of the RHS. + const endIndex = this.tokens.currentToken().rhsEndIndex; + if (endIndex == null) { + throw new Error("Expected = token with an end index."); + } + while (this.tokens.currentIndex() < endIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(")"); + } + } + + /** + * Transform this: + * export function foo() {} + * into this: + * function foo() {} exports.foo = foo; + */ + processExportFunction() { + this.tokens.replaceToken(""); + const name = this.processNamedFunction(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Skip past a function with a name and return that name. + */ + processNamedFunction() { + if (this.tokens.matches1(_types.TokenType._function)) { + this.tokens.copyToken(); + } else if (this.tokens.matches2(_types.TokenType.name, _types.TokenType._function)) { + if (!this.tokens.matchesContextual(_keywords.ContextualKeyword._async)) { + throw new Error("Expected async keyword in function export."); + } + this.tokens.copyToken(); + this.tokens.copyToken(); + } + if (this.tokens.matches1(_types.TokenType.star)) { + this.tokens.copyToken(); + } + if (!this.tokens.matches1(_types.TokenType.name)) { + throw new Error("Expected identifier for exported function name."); + } + const name = this.tokens.identifierName(); + this.tokens.copyToken(); + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + } + this.tokens.copyExpectedToken(_types.TokenType.parenL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + this.rootTransformer.processPossibleTypeRange(); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.braceR); + return name; + } + + /** + * Transform this: + * export class A {} + * into this: + * class A {} exports.A = A; + */ + processExportClass() { + this.tokens.removeInitialToken(); + this.copyDecorators(); + if (this.tokens.matches1(_types.TokenType._abstract)) { + this.tokens.removeToken(); + } + const name = this.rootTransformer.processNamedClass(); + this.tokens.appendCode(` exports.${name} = ${name};`); + } + + /** + * Transform this: + * export {a, b as c}; + * into this: + * exports.a = a; exports.c = b; + * + * OR + * + * Transform this: + * export {a, b as c} from './foo'; + * into the pre-generated Object.defineProperty code from the ImportProcessor. + * + * For the first case, if the TypeScript transform is enabled, we need to skip + * exports that are only defined as types. + */ + processExportBindings() { + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + + const exportStatements = []; + while (true) { + if (this.tokens.matches1(_types.TokenType.braceR)) { + this.tokens.removeToken(); + break; + } + + const specifierInfo = _getImportExportSpecifierInfo2.default.call(void 0, this.tokens); + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.removeToken(); + } + if (!specifierInfo.isType && !this.shouldElideExportedIdentifier(specifierInfo.leftName)) { + const localName = specifierInfo.leftName; + const exportedName = specifierInfo.rightName; + const newLocalName = this.importProcessor.getIdentifierReplacement(localName); + exportStatements.push(`exports.${exportedName} = ${newLocalName || localName};`); + } + + if (this.tokens.matches1(_types.TokenType.braceR)) { + this.tokens.removeToken(); + break; + } + if (this.tokens.matches2(_types.TokenType.comma, _types.TokenType.braceR)) { + this.tokens.removeToken(); + this.tokens.removeToken(); + break; + } else if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } else { + throw new Error(`Unexpected token: ${JSON.stringify(this.tokens.currentToken())}`); + } + } + + if (this.tokens.matchesContextual(_keywords.ContextualKeyword._from)) { + // This is an export...from, so throw away the normal named export code + // and use the Object.defineProperty code from ImportProcessor. + this.tokens.removeToken(); + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + _removeMaybeImportAssertion.removeMaybeImportAssertion.call(void 0, this.tokens); + } else { + // This is a normal named export, so use that. + this.tokens.appendCode(exportStatements.join(" ")); + } + + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + + processExportStar() { + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(_types.TokenType.string)) { + this.tokens.removeToken(); + } + const path = this.tokens.stringValue(); + this.tokens.replaceTokenTrimmingLeftWhitespace(this.importProcessor.claimImportCode(path)); + _removeMaybeImportAssertion.removeMaybeImportAssertion.call(void 0, this.tokens); + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + + shouldElideExportedIdentifier(name) { + return this.isTypeScriptTransformEnabled && !this.declarationInfo.valueDeclarations.has(name); + } +} exports.default = CJSImportTransformer; diff --git a/node_modules/sucrase/dist/transformers/ESMImportTransformer.js b/node_modules/sucrase/dist/transformers/ESMImportTransformer.js new file mode 100644 index 0000000..18e81ee --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ESMImportTransformer.js @@ -0,0 +1,363 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + + +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + +var _elideImportEquals = require('../util/elideImportEquals'); var _elideImportEquals2 = _interopRequireDefault(_elideImportEquals); + + + +var _getDeclarationInfo = require('../util/getDeclarationInfo'); var _getDeclarationInfo2 = _interopRequireDefault(_getDeclarationInfo); +var _getImportExportSpecifierInfo = require('../util/getImportExportSpecifierInfo'); var _getImportExportSpecifierInfo2 = _interopRequireDefault(_getImportExportSpecifierInfo); +var _getNonTypeIdentifiers = require('../util/getNonTypeIdentifiers'); +var _removeMaybeImportAssertion = require('../util/removeMaybeImportAssertion'); +var _shouldElideDefaultExport = require('../util/shouldElideDefaultExport'); var _shouldElideDefaultExport2 = _interopRequireDefault(_shouldElideDefaultExport); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +/** + * Class for editing import statements when we are keeping the code as ESM. We still need to remove + * type-only imports in TypeScript and Flow. + */ + class ESMImportTransformer extends _Transformer2.default { + + + + + constructor( + tokens, + nameManager, + helperManager, + reactHotLoaderTransformer, + isTypeScriptTransformEnabled, + options, + ) { + super();this.tokens = tokens;this.nameManager = nameManager;this.helperManager = helperManager;this.reactHotLoaderTransformer = reactHotLoaderTransformer;this.isTypeScriptTransformEnabled = isTypeScriptTransformEnabled;; + this.nonTypeIdentifiers = isTypeScriptTransformEnabled + ? _getNonTypeIdentifiers.getNonTypeIdentifiers.call(void 0, tokens, options) + : new Set(); + this.declarationInfo = isTypeScriptTransformEnabled + ? _getDeclarationInfo2.default.call(void 0, tokens) + : _getDeclarationInfo.EMPTY_DECLARATION_INFO; + this.injectCreateRequireForImportRequire = Boolean(options.injectCreateRequireForImportRequire); + } + + process() { + // TypeScript `import foo = require('foo');` should always just be translated to plain require. + if (this.tokens.matches3(_types.TokenType._import, _types.TokenType.name, _types.TokenType.eq)) { + return this.processImportEquals(); + } + if ( + this.tokens.matches4(_types.TokenType._import, _types.TokenType.name, _types.TokenType.name, _types.TokenType.eq) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, _keywords.ContextualKeyword._type) + ) { + // import type T = require('T') + this.tokens.removeInitialToken(); + // This construct is always exactly 8 tokens long, so remove the 7 remaining tokens. + for (let i = 0; i < 7; i++) { + this.tokens.removeToken(); + } + return true; + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.eq)) { + this.tokens.replaceToken("module.exports"); + return true; + } + if ( + this.tokens.matches5(_types.TokenType._export, _types.TokenType._import, _types.TokenType.name, _types.TokenType.name, _types.TokenType.eq) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 2, _keywords.ContextualKeyword._type) + ) { + // export import type T = require('T') + this.tokens.removeInitialToken(); + // This construct is always exactly 9 tokens long, so remove the 8 remaining tokens. + for (let i = 0; i < 8; i++) { + this.tokens.removeToken(); + } + return true; + } + if (this.tokens.matches1(_types.TokenType._import)) { + return this.processImport(); + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType._default)) { + return this.processExportDefault(); + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType.braceL)) { + return this.processNamedExports(); + } + if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType.name) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, _keywords.ContextualKeyword._type) + ) { + // export type {a}; + // export type {a as b}; + // export type {a} from './b'; + // export type * from './b'; + // export type * as ns from './b'; + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.braceL)) { + while (!this.tokens.matches1(_types.TokenType.braceR)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + } else { + // * + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType._as)) { + // as + this.tokens.removeToken(); + // ns + this.tokens.removeToken(); + } + } + // Remove type re-export `... } from './T'` + if ( + this.tokens.matchesContextual(_keywords.ContextualKeyword._from) && + this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, _types.TokenType.string) + ) { + this.tokens.removeToken(); + this.tokens.removeToken(); + _removeMaybeImportAssertion.removeMaybeImportAssertion.call(void 0, this.tokens); + } + return true; + } + return false; + } + + processImportEquals() { + const importName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + if (this.isTypeName(importName)) { + // If this name is only used as a type, elide the whole import. + _elideImportEquals2.default.call(void 0, this.tokens); + } else if (this.injectCreateRequireForImportRequire) { + // We're using require in an environment (Node ESM) that doesn't provide + // it as a global, so generate a helper to import it. + // import -> const + this.tokens.replaceToken("const"); + // Foo + this.tokens.copyToken(); + // = + this.tokens.copyToken(); + // require + this.tokens.replaceToken(this.helperManager.getHelperName("require")); + } else { + // Otherwise, just switch `import` to `const`. + this.tokens.replaceToken("const"); + } + return true; + } + + processImport() { + if (this.tokens.matches2(_types.TokenType._import, _types.TokenType.parenL)) { + // Dynamic imports don't need to be transformed. + return false; + } + + const snapshot = this.tokens.snapshot(); + const allImportsRemoved = this.removeImportTypeBindings(); + if (allImportsRemoved) { + this.tokens.restoreToSnapshot(snapshot); + while (!this.tokens.matches1(_types.TokenType.string)) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + _removeMaybeImportAssertion.removeMaybeImportAssertion.call(void 0, this.tokens); + if (this.tokens.matches1(_types.TokenType.semi)) { + this.tokens.removeToken(); + } + } + return true; + } + + /** + * Remove type bindings from this import, leaving the rest of the import intact. + * + * Return true if this import was ONLY types, and thus is eligible for removal. This will bail out + * of the replacement operation, so we can return early here. + */ + removeImportTypeBindings() { + this.tokens.copyExpectedToken(_types.TokenType._import); + if ( + this.tokens.matchesContextual(_keywords.ContextualKeyword._type) && + !this.tokens.matches1AtIndex(this.tokens.currentIndex() + 1, _types.TokenType.comma) && + !this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 1, _keywords.ContextualKeyword._from) + ) { + // This is an "import type" statement, so exit early. + return true; + } + + if (this.tokens.matches1(_types.TokenType.string)) { + // This is a bare import, so we should proceed with the import. + this.tokens.copyToken(); + return false; + } + + // Skip the "module" token in import reflection. + if ( + this.tokens.matchesContextual(_keywords.ContextualKeyword._module) && + this.tokens.matchesContextualAtIndex(this.tokens.currentIndex() + 2, _keywords.ContextualKeyword._from) + ) { + this.tokens.copyToken(); + } + + let foundNonTypeImport = false; + let needsComma = false; + + if (this.tokens.matches1(_types.TokenType.name)) { + if (this.isTypeName(this.tokens.identifierName())) { + this.tokens.removeToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + this.tokens.copyToken(); + if (this.tokens.matches1(_types.TokenType.comma)) { + // We're in a statement like: + // import A, * as B from './A'; + // or + // import A, {foo} from './A'; + // where the `A` is being kept. The comma should be removed if an only + // if the next part of the import statement is elided, but that's hard + // to determine at this point in the code. Instead, always remove it + // and set a flag to add it back if necessary. + needsComma = true; + this.tokens.removeToken(); + } + } + } + + if (this.tokens.matches1(_types.TokenType.star)) { + if (this.isTypeName(this.tokens.identifierNameAtRelativeIndex(2))) { + this.tokens.removeToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + } else { + if (needsComma) { + this.tokens.appendCode(","); + } + foundNonTypeImport = true; + this.tokens.copyExpectedToken(_types.TokenType.star); + this.tokens.copyExpectedToken(_types.TokenType.name); + this.tokens.copyExpectedToken(_types.TokenType.name); + } + } else if (this.tokens.matches1(_types.TokenType.braceL)) { + if (needsComma) { + this.tokens.appendCode(","); + } + this.tokens.copyToken(); + while (!this.tokens.matches1(_types.TokenType.braceR)) { + const specifierInfo = _getImportExportSpecifierInfo2.default.call(void 0, this.tokens); + if (specifierInfo.isType || this.isTypeName(specifierInfo.rightName)) { + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.removeToken(); + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else { + foundNonTypeImport = true; + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.copyToken(); + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.copyToken(); + } + } + } + this.tokens.copyExpectedToken(_types.TokenType.braceR); + } + + return !foundNonTypeImport; + } + + isTypeName(name) { + return this.isTypeScriptTransformEnabled && !this.nonTypeIdentifiers.has(name); + } + + processExportDefault() { + if ( + _shouldElideDefaultExport2.default.call(void 0, this.isTypeScriptTransformEnabled, this.tokens, this.declarationInfo) + ) { + // If the exported value is just an identifier and should be elided by TypeScript + // rules, then remove it entirely. It will always have the form `export default e`, + // where `e` is an identifier. + this.tokens.removeInitialToken(); + this.tokens.removeToken(); + this.tokens.removeToken(); + return true; + } + + const alreadyHasName = + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._function, _types.TokenType.name) || + // export default async function + (this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType.name, _types.TokenType._function, _types.TokenType.name) && + this.tokens.matchesContextualAtIndex( + this.tokens.currentIndex() + 2, + _keywords.ContextualKeyword._async, + )) || + this.tokens.matches4(_types.TokenType._export, _types.TokenType._default, _types.TokenType._class, _types.TokenType.name) || + this.tokens.matches5(_types.TokenType._export, _types.TokenType._default, _types.TokenType._abstract, _types.TokenType._class, _types.TokenType.name); + + if (!alreadyHasName && this.reactHotLoaderTransformer) { + // This is a plain "export default E" statement and we need to assign E to a variable. + // Change "export default E" to "let _default; export default _default = E" + const defaultVarName = this.nameManager.claimFreeName("_default"); + this.tokens.replaceToken(`let ${defaultVarName}; export`); + this.tokens.copyToken(); + this.tokens.appendCode(` ${defaultVarName} =`); + this.reactHotLoaderTransformer.setExtractedDefaultExportName(defaultVarName); + return true; + } + return false; + } + + /** + * In TypeScript, we need to remove named exports that were never declared or only declared as a + * type. + */ + processNamedExports() { + if (!this.isTypeScriptTransformEnabled) { + return false; + } + this.tokens.copyExpectedToken(_types.TokenType._export); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + + while (!this.tokens.matches1(_types.TokenType.braceR)) { + const specifierInfo = _getImportExportSpecifierInfo2.default.call(void 0, this.tokens); + if (specifierInfo.isType || this.shouldElideExportedName(specifierInfo.leftName)) { + // Type export, so remove all tokens, including any comma. + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.removeToken(); + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + } else { + // Non-type export, so copy all tokens, including any comma. + while (this.tokens.currentIndex() < specifierInfo.endIndex) { + this.tokens.copyToken(); + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.copyToken(); + } + } + } + this.tokens.copyExpectedToken(_types.TokenType.braceR); + return true; + } + + /** + * ESM elides all imports with the rule that we only elide if we see that it's + * a type and never see it as a value. This is in contrast to CJS, which + * elides imports that are completely unknown. + */ + shouldElideExportedName(name) { + return ( + this.isTypeScriptTransformEnabled && + this.declarationInfo.typeDeclarations.has(name) && + !this.declarationInfo.valueDeclarations.has(name) + ); + } +} exports.default = ESMImportTransformer; diff --git a/node_modules/sucrase/dist/transformers/FlowTransformer.js b/node_modules/sucrase/dist/transformers/FlowTransformer.js new file mode 100644 index 0000000..31c9744 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/FlowTransformer.js @@ -0,0 +1,182 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class FlowTransformer extends _Transformer2.default { + constructor( + rootTransformer, + tokens, + isImportsTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.isImportsTransformEnabled = isImportsTransformEnabled;; + } + + process() { + if ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ) { + return true; + } + if (this.tokens.matches1(_types.TokenType._enum)) { + this.processEnum(); + return true; + } + if (this.tokens.matches2(_types.TokenType._export, _types.TokenType._enum)) { + this.processNamedExportEnum(); + return true; + } + if (this.tokens.matches3(_types.TokenType._export, _types.TokenType._default, _types.TokenType._enum)) { + this.processDefaultExportEnum(); + return true; + } + return false; + } + + /** + * Handle a declaration like: + * export enum E ... + * + * With this imports transform, this becomes: + * const E = [[enum]]; exports.E = E; + * + * otherwise, it becomes: + * export const E = [[enum]]; + */ + processNamedExportEnum() { + if (this.isImportsTransformEnabled) { + // export + this.tokens.removeInitialToken(); + const enumName = this.tokens.identifierNameAtRelativeIndex(1); + this.processEnum(); + this.tokens.appendCode(` exports.${enumName} = ${enumName};`); + } else { + this.tokens.copyToken(); + this.processEnum(); + } + } + + /** + * Handle a declaration like: + * export default enum E + * + * With the imports transform, this becomes: + * const E = [[enum]]; exports.default = E; + * + * otherwise, it becomes: + * const E = [[enum]]; export default E; + */ + processDefaultExportEnum() { + // export + this.tokens.removeInitialToken(); + // default + this.tokens.removeToken(); + const enumName = this.tokens.identifierNameAtRelativeIndex(1); + this.processEnum(); + if (this.isImportsTransformEnabled) { + this.tokens.appendCode(` exports.default = ${enumName};`); + } else { + this.tokens.appendCode(` export default ${enumName};`); + } + } + + /** + * Transpile flow enums to invoke the "flow-enums-runtime" library. + * + * Currently, the transpiled code always uses `require("flow-enums-runtime")`, + * but if future flexibility is needed, we could expose a config option for + * this string (similar to configurable JSX). Even when targeting ESM, the + * default behavior of babel-plugin-transform-flow-enums is to use require + * rather than injecting an import. + * + * Flow enums are quite a bit simpler than TS enums and have some convenient + * constraints: + * - Element initializers must be either always present or always absent. That + * means that we can use fixed lookahead on the first element (if any) and + * assume that all elements are like that. + * - The right-hand side of an element initializer must be a literal value, + * not a complex expression and not referencing other elements. That means + * we can simply copy a single token. + * + * Enums can be broken up into three basic cases: + * + * Mirrored enums: + * enum E {A, B} + * -> + * const E = require("flow-enums-runtime").Mirrored(["A", "B"]); + * + * Initializer enums: + * enum E {A = 1, B = 2} + * -> + * const E = require("flow-enums-runtime")({A: 1, B: 2}); + * + * Symbol enums: + * enum E of symbol {A, B} + * -> + * const E = require("flow-enums-runtime")({A: Symbol("A"), B: Symbol("B")}); + * + * We can statically detect which of the three cases this is by looking at the + * "of" declaration (if any) and seeing if the first element has an initializer. + * Since the other transform details are so similar between the three cases, we + * use a single implementation and vary the transform within processEnumElement + * based on case. + */ + processEnum() { + // enum E -> const E + this.tokens.replaceToken("const"); + this.tokens.copyExpectedToken(_types.TokenType.name); + + let isSymbolEnum = false; + if (this.tokens.matchesContextual(_keywords.ContextualKeyword._of)) { + this.tokens.removeToken(); + isSymbolEnum = this.tokens.matchesContextual(_keywords.ContextualKeyword._symbol); + this.tokens.removeToken(); + } + const hasInitializers = this.tokens.matches3(_types.TokenType.braceL, _types.TokenType.name, _types.TokenType.eq); + this.tokens.appendCode(' = require("flow-enums-runtime")'); + + const isMirrored = !isSymbolEnum && !hasInitializers; + this.tokens.replaceTokenTrimmingLeftWhitespace(isMirrored ? ".Mirrored([" : "({"); + + while (!this.tokens.matches1(_types.TokenType.braceR)) { + // ... is allowed at the end and has no runtime behavior. + if (this.tokens.matches1(_types.TokenType.ellipsis)) { + this.tokens.removeToken(); + break; + } + this.processEnumElement(isSymbolEnum, hasInitializers); + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.copyToken(); + } + } + + this.tokens.replaceToken(isMirrored ? "]);" : "});"); + } + + /** + * Process an individual enum element, producing either an array element or an + * object element based on what type of enum this is. + */ + processEnumElement(isSymbolEnum, hasInitializers) { + if (isSymbolEnum) { + // Symbol enums never have initializers and are expanded to object elements. + // A, -> A: Symbol("A"), + const elementName = this.tokens.identifierName(); + this.tokens.copyToken(); + this.tokens.appendCode(`: Symbol("${elementName}")`); + } else if (hasInitializers) { + // Initializers are expanded to object elements. + // A = 1, -> A: 1, + this.tokens.copyToken(); + this.tokens.replaceTokenTrimmingLeftWhitespace(":"); + this.tokens.copyToken(); + } else { + // Enum elements without initializers become string literal array elements. + // A, -> "A", + this.tokens.replaceToken(`"${this.tokens.identifierName()}"`); + } + } +} exports.default = FlowTransformer; diff --git a/node_modules/sucrase/dist/transformers/JSXTransformer.js b/node_modules/sucrase/dist/transformers/JSXTransformer.js new file mode 100644 index 0000000..df51be3 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/JSXTransformer.js @@ -0,0 +1,733 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + + +var _xhtml = require('../parser/plugins/jsx/xhtml'); var _xhtml2 = _interopRequireDefault(_xhtml); +var _tokenizer = require('../parser/tokenizer'); +var _types = require('../parser/tokenizer/types'); +var _charcodes = require('../parser/util/charcodes'); + +var _getJSXPragmaInfo = require('../util/getJSXPragmaInfo'); var _getJSXPragmaInfo2 = _interopRequireDefault(_getJSXPragmaInfo); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class JSXTransformer extends _Transformer2.default { + + + + + // State for calculating the line number of each JSX tag in development. + __init() {this.lastLineNumber = 1} + __init2() {this.lastIndex = 0} + + // In development, variable name holding the name of the current file. + __init3() {this.filenameVarName = null} + // Mapping of claimed names for imports in the automatic transform, e,g. + // {jsx: "_jsx"}. This determines which imports to generate in the prefix. + __init4() {this.esmAutomaticImportNameResolutions = {}} + // When automatically adding imports in CJS mode, we store the variable name + // holding the imported CJS module so we can require it in the prefix. + __init5() {this.cjsAutomaticModuleNameResolutions = {}} + + constructor( + rootTransformer, + tokens, + importProcessor, + nameManager, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.nameManager = nameManager;this.options = options;JSXTransformer.prototype.__init.call(this);JSXTransformer.prototype.__init2.call(this);JSXTransformer.prototype.__init3.call(this);JSXTransformer.prototype.__init4.call(this);JSXTransformer.prototype.__init5.call(this);; + this.jsxPragmaInfo = _getJSXPragmaInfo2.default.call(void 0, options); + this.isAutomaticRuntime = options.jsxRuntime === "automatic"; + this.jsxImportSource = options.jsxImportSource || "react"; + } + + process() { + if (this.tokens.matches1(_types.TokenType.jsxTagStart)) { + this.processJSXTag(); + return true; + } + return false; + } + + getPrefixCode() { + let prefix = ""; + if (this.filenameVarName) { + prefix += `const ${this.filenameVarName} = ${JSON.stringify(this.options.filePath || "")};`; + } + if (this.isAutomaticRuntime) { + if (this.importProcessor) { + // CJS mode: emit require statements for all modules that were referenced. + for (const [path, resolvedName] of Object.entries(this.cjsAutomaticModuleNameResolutions)) { + prefix += `var ${resolvedName} = require("${path}");`; + } + } else { + // ESM mode: consolidate and emit import statements for referenced names. + const {createElement: createElementResolution, ...otherResolutions} = + this.esmAutomaticImportNameResolutions; + if (createElementResolution) { + prefix += `import {createElement as ${createElementResolution}} from "${this.jsxImportSource}";`; + } + const importSpecifiers = Object.entries(otherResolutions) + .map(([name, resolvedName]) => `${name} as ${resolvedName}`) + .join(", "); + if (importSpecifiers) { + const importPath = + this.jsxImportSource + (this.options.production ? "/jsx-runtime" : "/jsx-dev-runtime"); + prefix += `import {${importSpecifiers}} from "${importPath}";`; + } + } + } + return prefix; + } + + processJSXTag() { + const {jsxRole, start} = this.tokens.currentToken(); + // Calculate line number information at the very start (if in development + // mode) so that the information is guaranteed to be queried in token order. + const elementLocationCode = this.options.production ? null : this.getElementLocationCode(start); + if (this.isAutomaticRuntime && jsxRole !== _tokenizer.JSXRole.KeyAfterPropSpread) { + this.transformTagToJSXFunc(elementLocationCode, jsxRole); + } else { + this.transformTagToCreateElement(elementLocationCode); + } + } + + getElementLocationCode(firstTokenStart) { + const lineNumber = this.getLineNumberForIndex(firstTokenStart); + return `lineNumber: ${lineNumber}`; + } + + /** + * Get the line number for this source position. This is calculated lazily and + * must be called in increasing order by index. + */ + getLineNumberForIndex(index) { + const code = this.tokens.code; + while (this.lastIndex < index && this.lastIndex < code.length) { + if (code[this.lastIndex] === "\n") { + this.lastLineNumber++; + } + this.lastIndex++; + } + return this.lastLineNumber; + } + + /** + * Convert the current JSX element to a call to jsx, jsxs, or jsxDEV. This is + * the primary transformation for the automatic transform. + * + * Example: + *
Hello{x}
+ * becomes + * jsxs('div', {a: 1, children: ["Hello", x]}, 2) + */ + transformTagToJSXFunc(elementLocationCode, jsxRole) { + const isStatic = jsxRole === _tokenizer.JSXRole.StaticChildren; + // First tag is always jsxTagStart. + this.tokens.replaceToken(this.getJSXFuncInvocationCode(isStatic)); + + let keyCode = null; + if (this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + // Fragment syntax. + this.tokens.replaceToken(`${this.getFragmentCode()}, {`); + this.processAutomaticChildrenAndEndProps(jsxRole); + } else { + // Normal open tag or self-closing tag. + this.processTagIntro(); + this.tokens.appendCode(", {"); + keyCode = this.processProps(true); + + if (this.tokens.matches2(_types.TokenType.slash, _types.TokenType.jsxTagEnd)) { + // Self-closing tag, no children to add, so close the props. + this.tokens.appendCode("}"); + } else if (this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + // Tag with children. + this.tokens.removeToken(); + this.processAutomaticChildrenAndEndProps(jsxRole); + } else { + throw new Error("Expected either /> or > at the end of the tag."); + } + // If a key was present, move it to its own arg. Note that moving code + // like this will cause line numbers to get out of sync within the JSX + // element if the key expression has a newline in it. This is unfortunate, + // but hopefully should be rare. + if (keyCode) { + this.tokens.appendCode(`, ${keyCode}`); + } + } + if (!this.options.production) { + // If the key wasn't already added, add it now so we can correctly set + // positional args for jsxDEV. + if (keyCode === null) { + this.tokens.appendCode(", void 0"); + } + this.tokens.appendCode(`, ${isStatic}, ${this.getDevSource(elementLocationCode)}, this`); + } + // We're at the close-tag or the end of a self-closing tag, so remove + // everything else and close the function call. + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + this.tokens.removeToken(); + } + this.tokens.replaceToken(")"); + } + + /** + * Convert the current JSX element to a createElement call. In the classic + * runtime, this is the only case. In the automatic runtime, this is called + * as a fallback in some situations. + * + * Example: + *
Hello{x}
+ * becomes + * React.createElement('div', {a: 1, key: 2}, "Hello", x) + */ + transformTagToCreateElement(elementLocationCode) { + // First tag is always jsxTagStart. + this.tokens.replaceToken(this.getCreateElementInvocationCode()); + + if (this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + // Fragment syntax. + this.tokens.replaceToken(`${this.getFragmentCode()}, null`); + this.processChildren(true); + } else { + // Normal open tag or self-closing tag. + this.processTagIntro(); + this.processPropsObjectWithDevInfo(elementLocationCode); + + if (this.tokens.matches2(_types.TokenType.slash, _types.TokenType.jsxTagEnd)) { + // Self-closing tag; no children to process. + } else if (this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + // Tag with children and a close-tag; process the children as args. + this.tokens.removeToken(); + this.processChildren(true); + } else { + throw new Error("Expected either /> or > at the end of the tag."); + } + } + // We're at the close-tag or the end of a self-closing tag, so remove + // everything else and close the function call. + this.tokens.removeInitialToken(); + while (!this.tokens.matches1(_types.TokenType.jsxTagEnd)) { + this.tokens.removeToken(); + } + this.tokens.replaceToken(")"); + } + + /** + * Get the code for the relevant function for this context: jsx, jsxs, + * or jsxDEV. The following open-paren is included as well. + * + * These functions are only used for the automatic runtime, so they are always + * auto-imported, but the auto-import will be either CJS or ESM based on the + * target module format. + */ + getJSXFuncInvocationCode(isStatic) { + if (this.options.production) { + if (isStatic) { + return this.claimAutoImportedFuncInvocation("jsxs", "/jsx-runtime"); + } else { + return this.claimAutoImportedFuncInvocation("jsx", "/jsx-runtime"); + } + } else { + return this.claimAutoImportedFuncInvocation("jsxDEV", "/jsx-dev-runtime"); + } + } + + /** + * Return the code to use for the createElement function, e.g. + * `React.createElement`, including the following open-paren. + * + * This is the main function to use for the classic runtime. For the + * automatic runtime, this function is used as a fallback function to + * preserve behavior when there is a prop spread followed by an explicit + * key. In that automatic runtime case, the function should be automatically + * imported. + */ + getCreateElementInvocationCode() { + if (this.isAutomaticRuntime) { + return this.claimAutoImportedFuncInvocation("createElement", ""); + } else { + const {jsxPragmaInfo} = this; + const resolvedPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.base) || jsxPragmaInfo.base + : jsxPragmaInfo.base; + return `${resolvedPragmaBaseName}${jsxPragmaInfo.suffix}(`; + } + } + + /** + * Return the code to use as the component when compiling a shorthand + * fragment, e.g. `React.Fragment`. + * + * This may be called from either the classic or automatic runtime, and + * the value should be auto-imported for the automatic runtime. + */ + getFragmentCode() { + if (this.isAutomaticRuntime) { + return this.claimAutoImportedName( + "Fragment", + this.options.production ? "/jsx-runtime" : "/jsx-dev-runtime", + ); + } else { + const {jsxPragmaInfo} = this; + const resolvedFragmentPragmaBaseName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement(jsxPragmaInfo.fragmentBase) || + jsxPragmaInfo.fragmentBase + : jsxPragmaInfo.fragmentBase; + return resolvedFragmentPragmaBaseName + jsxPragmaInfo.fragmentSuffix; + } + } + + /** + * Return code that invokes the given function. + * + * When the imports transform is enabled, use the CJSImportTransformer + * strategy of using `.call(void 0, ...` to avoid passing a `this` value in a + * situation that would otherwise look like a method call. + */ + claimAutoImportedFuncInvocation(funcName, importPathSuffix) { + const funcCode = this.claimAutoImportedName(funcName, importPathSuffix); + if (this.importProcessor) { + return `${funcCode}.call(void 0, `; + } else { + return `${funcCode}(`; + } + } + + claimAutoImportedName(funcName, importPathSuffix) { + if (this.importProcessor) { + // CJS mode: claim a name for the module and mark it for import. + const path = this.jsxImportSource + importPathSuffix; + if (!this.cjsAutomaticModuleNameResolutions[path]) { + this.cjsAutomaticModuleNameResolutions[path] = + this.importProcessor.getFreeIdentifierForPath(path); + } + return `${this.cjsAutomaticModuleNameResolutions[path]}.${funcName}`; + } else { + // ESM mode: claim a name for this function and add it to the names that + // should be auto-imported when the prefix is generated. + if (!this.esmAutomaticImportNameResolutions[funcName]) { + this.esmAutomaticImportNameResolutions[funcName] = this.nameManager.claimFreeName( + `_${funcName}`, + ); + } + return this.esmAutomaticImportNameResolutions[funcName]; + } + } + + /** + * Process the first part of a tag, before any props. + */ + processTagIntro() { + // Walk forward until we see one of these patterns: + // jsxName to start the first prop, preceded by another jsxName to end the tag name. + // jsxName to start the first prop, preceded by greaterThan to end the type argument. + // [open brace] to start the first prop. + // [jsxTagEnd] to end the open-tag. + // [slash, jsxTagEnd] to end the self-closing tag. + let introEnd = this.tokens.currentIndex() + 1; + while ( + this.tokens.tokens[introEnd].isType || + (!this.tokens.matches2AtIndex(introEnd - 1, _types.TokenType.jsxName, _types.TokenType.jsxName) && + !this.tokens.matches2AtIndex(introEnd - 1, _types.TokenType.greaterThan, _types.TokenType.jsxName) && + !this.tokens.matches1AtIndex(introEnd, _types.TokenType.braceL) && + !this.tokens.matches1AtIndex(introEnd, _types.TokenType.jsxTagEnd) && + !this.tokens.matches2AtIndex(introEnd, _types.TokenType.slash, _types.TokenType.jsxTagEnd)) + ) { + introEnd++; + } + if (introEnd === this.tokens.currentIndex() + 1) { + const tagName = this.tokens.identifierName(); + if (startsWithLowerCase(tagName)) { + this.tokens.replaceToken(`'${tagName}'`); + } + } + while (this.tokens.currentIndex() < introEnd) { + this.rootTransformer.processToken(); + } + } + + /** + * Starting at the beginning of the props, add the props argument to + * React.createElement, including the comma before it. + */ + processPropsObjectWithDevInfo(elementLocationCode) { + const devProps = this.options.production + ? "" + : `__self: this, __source: ${this.getDevSource(elementLocationCode)}`; + if (!this.tokens.matches1(_types.TokenType.jsxName) && !this.tokens.matches1(_types.TokenType.braceL)) { + if (devProps) { + this.tokens.appendCode(`, {${devProps}}`); + } else { + this.tokens.appendCode(`, null`); + } + return; + } + this.tokens.appendCode(`, {`); + this.processProps(false); + if (devProps) { + this.tokens.appendCode(` ${devProps}}`); + } else { + this.tokens.appendCode("}"); + } + } + + /** + * Transform the core part of the props, assuming that a { has already been + * inserted before us and that a } will be inserted after us. + * + * If extractKeyCode is true (i.e. when using any jsx... function), any prop + * named "key" has its code captured and returned rather than being emitted to + * the output code. This shifts line numbers, and emitting the code later will + * correct line numbers again. If no key is found or if extractKeyCode is + * false, this function returns null. + */ + processProps(extractKeyCode) { + let keyCode = null; + while (true) { + if (this.tokens.matches2(_types.TokenType.jsxName, _types.TokenType.eq)) { + // This is a regular key={value} or key="value" prop. + const propName = this.tokens.identifierName(); + if (extractKeyCode && propName === "key") { + if (keyCode !== null) { + // The props list has multiple keys. Different implementations are + // inconsistent about what to do here: as of this writing, Babel and + // swc keep the *last* key and completely remove the rest, while + // TypeScript uses the *first* key and leaves the others as regular + // props. The React team collaborated with Babel on the + // implementation of this behavior, so presumably the Babel behavior + // is the one to use. + // Since we won't ever be emitting the previous key code, we need to + // at least emit its newlines here so that the line numbers match up + // in the long run. + this.tokens.appendCode(keyCode.replace(/[^\n]/g, "")); + } + // key + this.tokens.removeToken(); + // = + this.tokens.removeToken(); + const snapshot = this.tokens.snapshot(); + this.processPropValue(); + keyCode = this.tokens.dangerouslyGetAndRemoveCodeSinceSnapshot(snapshot); + // Don't add a comma + continue; + } else { + this.processPropName(propName); + this.tokens.replaceToken(": "); + this.processPropValue(); + } + } else if (this.tokens.matches1(_types.TokenType.jsxName)) { + // This is a shorthand prop like . + const propName = this.tokens.identifierName(); + this.processPropName(propName); + this.tokens.appendCode(": true"); + } else if (this.tokens.matches1(_types.TokenType.braceL)) { + // This is prop spread, like
, which we can pass + // through fairly directly as an object spread. + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else { + break; + } + this.tokens.appendCode(","); + } + return keyCode; + } + + processPropName(propName) { + if (propName.includes("-")) { + this.tokens.replaceToken(`'${propName}'`); + } else { + this.tokens.copyToken(); + } + } + + processPropValue() { + if (this.tokens.matches1(_types.TokenType.braceL)) { + this.tokens.replaceToken(""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + } else if (this.tokens.matches1(_types.TokenType.jsxTagStart)) { + this.processJSXTag(); + } else { + this.processStringPropValue(); + } + } + + processStringPropValue() { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start + 1, token.end - 1); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXStringValueLiteral(valueCode); + this.tokens.replaceToken(literalCode + replacementCode); + } + + /** + * Starting in the middle of the props object literal, produce an additional + * prop for the children and close the object literal. + */ + processAutomaticChildrenAndEndProps(jsxRole) { + if (jsxRole === _tokenizer.JSXRole.StaticChildren) { + this.tokens.appendCode(" children: ["); + this.processChildren(false); + this.tokens.appendCode("]}"); + } else { + // The parser information tells us whether we will see a real child or if + // all remaining children (if any) will resolve to empty. If there are no + // non-empty children, don't emit a children prop at all, but still + // process children so that we properly transform the code into nothing. + if (jsxRole === _tokenizer.JSXRole.OneChild) { + this.tokens.appendCode(" children: "); + } + this.processChildren(false); + this.tokens.appendCode("}"); + } + } + + /** + * Transform children into a comma-separated list, which will be either + * arguments to createElement or array elements of a children prop. + */ + processChildren(needsInitialComma) { + let needsComma = needsInitialComma; + while (true) { + if (this.tokens.matches2(_types.TokenType.jsxTagStart, _types.TokenType.slash)) { + // Closing tag, so no more children. + return; + } + let didEmitElement = false; + if (this.tokens.matches1(_types.TokenType.braceL)) { + if (this.tokens.matches2(_types.TokenType.braceL, _types.TokenType.braceR)) { + // Empty interpolations and comment-only interpolations are allowed + // and don't create an extra child arg. + this.tokens.replaceToken(""); + this.tokens.replaceToken(""); + } else { + // Interpolated expression. + this.tokens.replaceToken(needsComma ? ", " : ""); + this.rootTransformer.processBalancedCode(); + this.tokens.replaceToken(""); + didEmitElement = true; + } + } else if (this.tokens.matches1(_types.TokenType.jsxTagStart)) { + // Child JSX element + this.tokens.appendCode(needsComma ? ", " : ""); + this.processJSXTag(); + didEmitElement = true; + } else if (this.tokens.matches1(_types.TokenType.jsxText) || this.tokens.matches1(_types.TokenType.jsxEmptyText)) { + didEmitElement = this.processChildTextElement(needsComma); + } else { + throw new Error("Unexpected token when processing JSX children."); + } + if (didEmitElement) { + needsComma = true; + } + } + } + + /** + * Turn a JSX text element into a string literal, or nothing at all if the JSX + * text resolves to the empty string. + * + * Returns true if a string literal is emitted, false otherwise. + */ + processChildTextElement(needsComma) { + const token = this.tokens.currentToken(); + const valueCode = this.tokens.code.slice(token.start, token.end); + const replacementCode = formatJSXTextReplacement(valueCode); + const literalCode = formatJSXTextLiteral(valueCode); + if (literalCode === '""') { + this.tokens.replaceToken(replacementCode); + return false; + } else { + this.tokens.replaceToken(`${needsComma ? ", " : ""}${literalCode}${replacementCode}`); + return true; + } + } + + getDevSource(elementLocationCode) { + return `{fileName: ${this.getFilenameVarName()}, ${elementLocationCode}}`; + } + + getFilenameVarName() { + if (!this.filenameVarName) { + this.filenameVarName = this.nameManager.claimFreeName("_jsxFileName"); + } + return this.filenameVarName; + } +} exports.default = JSXTransformer; + +/** + * Spec for identifiers: https://tc39.github.io/ecma262/#prod-IdentifierStart. + * + * Really only treat anything starting with a-z as tag names. `_`, `$`, `é` + * should be treated as component names + */ + function startsWithLowerCase(s) { + const firstChar = s.charCodeAt(0); + return firstChar >= _charcodes.charCodes.lowercaseA && firstChar <= _charcodes.charCodes.lowercaseZ; +} exports.startsWithLowerCase = startsWithLowerCase; + +/** + * Turn the given jsxText string into a JS string literal. Leading and trailing + * whitespace on lines is removed, except immediately after the open-tag and + * before the close-tag. Empty lines are completely removed, and spaces are + * added between lines after that. + * + * We use JSON.stringify to introduce escape characters as necessary, and trim + * the start and end of each line and remove blank lines. + */ +function formatJSXTextLiteral(text) { + let result = ""; + let whitespace = ""; + + let isInInitialLineWhitespace = false; + let seenNonWhitespace = false; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === " " || c === "\t" || c === "\r") { + if (!isInInitialLineWhitespace) { + whitespace += c; + } + } else if (c === "\n") { + whitespace = ""; + isInInitialLineWhitespace = true; + } else { + if (seenNonWhitespace && isInInitialLineWhitespace) { + result += " "; + } + result += whitespace; + whitespace = ""; + if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + i = newI - 1; + result += entity; + } else { + result += c; + } + seenNonWhitespace = true; + isInInitialLineWhitespace = false; + } + } + if (!isInInitialLineWhitespace) { + result += whitespace; + } + return JSON.stringify(result); +} + +/** + * Produce the code that should be printed after the JSX text string literal, + * with most content removed, but all newlines preserved and all spacing at the + * end preserved. + */ +function formatJSXTextReplacement(text) { + let numNewlines = 0; + let numSpaces = 0; + for (const c of text) { + if (c === "\n") { + numNewlines++; + numSpaces = 0; + } else if (c === " ") { + numSpaces++; + } + } + return "\n".repeat(numNewlines) + " ".repeat(numSpaces); +} + +/** + * Format a string in the value position of a JSX prop. + * + * Use the same implementation as convertAttribute from + * babel-helper-builder-react-jsx. + */ +function formatJSXStringValueLiteral(text) { + let result = ""; + for (let i = 0; i < text.length; i++) { + const c = text[i]; + if (c === "\n") { + if (/\s/.test(text[i + 1])) { + result += " "; + while (i < text.length && /\s/.test(text[i + 1])) { + i++; + } + } else { + result += "\n"; + } + } else if (c === "&") { + const {entity, newI} = processEntity(text, i + 1); + result += entity; + i = newI - 1; + } else { + result += c; + } + } + return JSON.stringify(result); +} + +/** + * Starting at a &, see if there's an HTML entity (specified by name, decimal + * char code, or hex char code) and return it if so. + * + * Modified from jsxReadString in babel-parser. + */ +function processEntity(text, indexAfterAmpersand) { + let str = ""; + let count = 0; + let entity; + let i = indexAfterAmpersand; + + if (text[i] === "#") { + let radix = 10; + i++; + let numStart; + if (text[i] === "x") { + radix = 16; + i++; + numStart = i; + while (i < text.length && isHexDigit(text.charCodeAt(i))) { + i++; + } + } else { + numStart = i; + while (i < text.length && isDecimalDigit(text.charCodeAt(i))) { + i++; + } + } + if (text[i] === ";") { + const numStr = text.slice(numStart, i); + if (numStr) { + i++; + entity = String.fromCodePoint(parseInt(numStr, radix)); + } + } + } else { + while (i < text.length && count++ < 10) { + const ch = text[i]; + i++; + if (ch === ";") { + entity = _xhtml2.default.get(str); + break; + } + str += ch; + } + } + + if (!entity) { + return {entity: "&", newI: indexAfterAmpersand}; + } + return {entity, newI: i}; +} + +function isDecimalDigit(code) { + return code >= _charcodes.charCodes.digit0 && code <= _charcodes.charCodes.digit9; +} + +function isHexDigit(code) { + return ( + (code >= _charcodes.charCodes.digit0 && code <= _charcodes.charCodes.digit9) || + (code >= _charcodes.charCodes.lowercaseA && code <= _charcodes.charCodes.lowercaseF) || + (code >= _charcodes.charCodes.uppercaseA && code <= _charcodes.charCodes.uppercaseF) + ); +} diff --git a/node_modules/sucrase/dist/transformers/JestHoistTransformer.js b/node_modules/sucrase/dist/transformers/JestHoistTransformer.js new file mode 100644 index 0000000..fb7f0fd --- /dev/null +++ b/node_modules/sucrase/dist/transformers/JestHoistTransformer.js @@ -0,0 +1,111 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +var _types = require('../parser/tokenizer/types'); + + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +const JEST_GLOBAL_NAME = "jest"; +const HOISTED_METHODS = ["mock", "unmock", "enableAutomock", "disableAutomock"]; + +/** + * Implementation of babel-plugin-jest-hoist, which hoists up some jest method + * calls above the imports to allow them to override other imports. + * + * To preserve line numbers, rather than directly moving the jest.mock code, we + * wrap each invocation in a function statement and then call the function from + * the top of the file. + */ + class JestHoistTransformer extends _Transformer2.default { + __init() {this.hoistedFunctionNames = []} + + constructor( + rootTransformer, + tokens, + nameManager, + importProcessor, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.nameManager = nameManager;this.importProcessor = importProcessor;JestHoistTransformer.prototype.__init.call(this);; + } + + process() { + if ( + this.tokens.currentToken().scopeDepth === 0 && + this.tokens.matches4(_types.TokenType.name, _types.TokenType.dot, _types.TokenType.name, _types.TokenType.parenL) && + this.tokens.identifierName() === JEST_GLOBAL_NAME + ) { + // TODO: This only works if imports transform is active, which it will be for jest. + // But if jest adds module support and we no longer need the import transform, this needs fixing. + if (_optionalChain([this, 'access', _ => _.importProcessor, 'optionalAccess', _2 => _2.getGlobalNames, 'call', _3 => _3(), 'optionalAccess', _4 => _4.has, 'call', _5 => _5(JEST_GLOBAL_NAME)])) { + return false; + } + return this.extractHoistedCalls(); + } + + return false; + } + + getHoistedCode() { + if (this.hoistedFunctionNames.length > 0) { + // This will be placed before module interop code, but that's fine since + // imports aren't allowed in module mock factories. + return this.hoistedFunctionNames.map((name) => `${name}();`).join(""); + } + return ""; + } + + /** + * Extracts any methods calls on the jest-object that should be hoisted. + * + * According to the jest docs, https://jestjs.io/docs/en/jest-object#jestmockmodulename-factory-options, + * mock, unmock, enableAutomock, disableAutomock, are the methods that should be hoisted. + * + * We do not apply the same checks of the arguments as babel-plugin-jest-hoist does. + */ + extractHoistedCalls() { + // We're handling a chain of calls where `jest` may or may not need to be inserted for each call + // in the chain, so remove the initial `jest` to make the loop implementation cleaner. + this.tokens.removeToken(); + // Track some state so that multiple non-hoisted chained calls in a row keep their chaining + // syntax. + let followsNonHoistedJestCall = false; + + // Iterate through all chained calls on the jest object. + while (this.tokens.matches3(_types.TokenType.dot, _types.TokenType.name, _types.TokenType.parenL)) { + const methodName = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + const shouldHoist = HOISTED_METHODS.includes(methodName); + if (shouldHoist) { + // We've matched e.g. `.mock(...)` or similar call. + // Replace the initial `.` with `function __jestHoist(){jest.` + const hoistedFunctionName = this.nameManager.claimFreeName("__jestHoist"); + this.hoistedFunctionNames.push(hoistedFunctionName); + this.tokens.replaceToken(`function ${hoistedFunctionName}(){${JEST_GLOBAL_NAME}.`); + this.tokens.copyToken(); + this.tokens.copyToken(); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + this.tokens.appendCode(";}"); + followsNonHoistedJestCall = false; + } else { + // This is a non-hoisted method, so just transform the code as usual. + if (followsNonHoistedJestCall) { + // If we didn't hoist the previous call, we can leave the code as-is to chain off of the + // previous method call. It's important to preserve the code here because we don't know + // for sure that the method actually returned the jest object for chaining. + this.tokens.copyToken(); + } else { + // If we hoisted the previous call, we know it returns the jest object back, so we insert + // the identifier `jest` to continue the chain. + this.tokens.replaceToken(`${JEST_GLOBAL_NAME}.`); + } + this.tokens.copyToken(); + this.tokens.copyToken(); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + followsNonHoistedJestCall = true; + } + } + + return true; + } +} exports.default = JestHoistTransformer; diff --git a/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.js b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.js new file mode 100644 index 0000000..7a30f09 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/NumericSeparatorTransformer.js @@ -0,0 +1,20 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _types = require('../parser/tokenizer/types'); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class NumericSeparatorTransformer extends _Transformer2.default { + constructor( tokens) { + super();this.tokens = tokens;; + } + + process() { + if (this.tokens.matches1(_types.TokenType.num)) { + const code = this.tokens.currentTokenCode(); + if (code.includes("_")) { + this.tokens.replaceToken(code.replace(/_/g, "")); + return true; + } + } + return false; + } +} exports.default = NumericSeparatorTransformer; diff --git a/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.js b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.js new file mode 100644 index 0000000..79ae8bc --- /dev/null +++ b/node_modules/sucrase/dist/transformers/OptionalCatchBindingTransformer.js @@ -0,0 +1,19 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _types = require('../parser/tokenizer/types'); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class OptionalCatchBindingTransformer extends _Transformer2.default { + constructor( tokens, nameManager) { + super();this.tokens = tokens;this.nameManager = nameManager;; + } + + process() { + if (this.tokens.matches2(_types.TokenType._catch, _types.TokenType.braceL)) { + this.tokens.copyToken(); + this.tokens.appendCode(` (${this.nameManager.claimFreeName("e")})`); + return true; + } + return false; + } +} exports.default = OptionalCatchBindingTransformer; diff --git a/node_modules/sucrase/dist/transformers/OptionalChainingNullishTransformer.js b/node_modules/sucrase/dist/transformers/OptionalChainingNullishTransformer.js new file mode 100644 index 0000000..3a68a0a --- /dev/null +++ b/node_modules/sucrase/dist/transformers/OptionalChainingNullishTransformer.js @@ -0,0 +1,155 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _types = require('../parser/tokenizer/types'); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +/** + * Transformer supporting the optional chaining and nullish coalescing operators. + * + * Tech plan here: + * https://github.com/alangpierce/sucrase/wiki/Sucrase-Optional-Chaining-and-Nullish-Coalescing-Technical-Plan + * + * The prefix and suffix code snippets are handled by TokenProcessor, and this transformer handles + * the operators themselves. + */ + class OptionalChainingNullishTransformer extends _Transformer2.default { + constructor( tokens, nameManager) { + super();this.tokens = tokens;this.nameManager = nameManager;; + } + + process() { + if (this.tokens.matches1(_types.TokenType.nullishCoalescing)) { + const token = this.tokens.currentToken(); + if (this.tokens.tokens[token.nullishStartIndex].isAsyncOperation) { + this.tokens.replaceTokenTrimmingLeftWhitespace(", async () => ("); + } else { + this.tokens.replaceTokenTrimmingLeftWhitespace(", () => ("); + } + return true; + } + if (this.tokens.matches1(_types.TokenType._delete)) { + const nextToken = this.tokens.tokenAtRelativeIndex(1); + if (nextToken.isOptionalChainStart) { + this.tokens.removeInitialToken(); + return true; + } + } + const token = this.tokens.currentToken(); + const chainStart = token.subscriptStartIndex; + if ( + chainStart != null && + this.tokens.tokens[chainStart].isOptionalChainStart && + // Super subscripts can't be optional (since super is never null/undefined), and the syntax + // relies on the subscript being intact, so leave this token alone. + this.tokens.tokenAtRelativeIndex(-1).type !== _types.TokenType._super + ) { + const param = this.nameManager.claimFreeName("_"); + let arrowStartSnippet; + if ( + chainStart > 0 && + this.tokens.matches1AtIndex(chainStart - 1, _types.TokenType._delete) && + this.isLastSubscriptInChain() + ) { + // Delete operations are special: we already removed the delete keyword, and to still + // perform a delete, we need to insert a delete in the very last part of the chain, which + // in correct code will always be a property access. + arrowStartSnippet = `${param} => delete ${param}`; + } else { + arrowStartSnippet = `${param} => ${param}`; + } + if (this.tokens.tokens[chainStart].isAsyncOperation) { + arrowStartSnippet = `async ${arrowStartSnippet}`; + } + if ( + this.tokens.matches2(_types.TokenType.questionDot, _types.TokenType.parenL) || + this.tokens.matches2(_types.TokenType.questionDot, _types.TokenType.lessThan) + ) { + if (this.justSkippedSuper()) { + this.tokens.appendCode(".bind(this)"); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'optionalCall', ${arrowStartSnippet}`); + } else if (this.tokens.matches2(_types.TokenType.questionDot, _types.TokenType.bracketL)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'optionalAccess', ${arrowStartSnippet}`); + } else if (this.tokens.matches1(_types.TokenType.questionDot)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'optionalAccess', ${arrowStartSnippet}.`); + } else if (this.tokens.matches1(_types.TokenType.dot)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'access', ${arrowStartSnippet}.`); + } else if (this.tokens.matches1(_types.TokenType.bracketL)) { + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'access', ${arrowStartSnippet}[`); + } else if (this.tokens.matches1(_types.TokenType.parenL)) { + if (this.justSkippedSuper()) { + this.tokens.appendCode(".bind(this)"); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(`, 'call', ${arrowStartSnippet}(`); + } else { + throw new Error("Unexpected subscript operator in optional chain."); + } + return true; + } + return false; + } + + /** + * Determine if the current token is the last of its chain, so that we know whether it's eligible + * to have a delete op inserted. + * + * We can do this by walking forward until we determine one way or another. Each + * isOptionalChainStart token must be paired with exactly one isOptionalChainEnd token after it in + * a nesting way, so we can track depth and walk to the end of the chain (the point where the + * depth goes negative) and see if any other subscript token is after us in the chain. + */ + isLastSubscriptInChain() { + let depth = 0; + for (let i = this.tokens.currentIndex() + 1; ; i++) { + if (i >= this.tokens.tokens.length) { + throw new Error("Reached the end of the code while finding the end of the access chain."); + } + if (this.tokens.tokens[i].isOptionalChainStart) { + depth++; + } else if (this.tokens.tokens[i].isOptionalChainEnd) { + depth--; + } + if (depth < 0) { + return true; + } + + // This subscript token is a later one in the same chain. + if (depth === 0 && this.tokens.tokens[i].subscriptStartIndex != null) { + return false; + } + } + } + + /** + * Determine if we are the open-paren in an expression like super.a()?.b. + * + * We can do this by walking backward to find the previous subscript. If that subscript was + * preceded by a super, then we must be the subscript after it, so if this is a call expression, + * we'll need to attach the right context. + */ + justSkippedSuper() { + let depth = 0; + let index = this.tokens.currentIndex() - 1; + while (true) { + if (index < 0) { + throw new Error( + "Reached the start of the code while finding the start of the access chain.", + ); + } + if (this.tokens.tokens[index].isOptionalChainStart) { + depth--; + } else if (this.tokens.tokens[index].isOptionalChainEnd) { + depth++; + } + if (depth < 0) { + return false; + } + + // This subscript token is a later one in the same chain. + if (depth === 0 && this.tokens.tokens[index].subscriptStartIndex != null) { + return this.tokens.tokens[index - 1].type === _types.TokenType._super; + } + index--; + } + } +} exports.default = OptionalChainingNullishTransformer; diff --git a/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.js b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.js new file mode 100644 index 0000000..faf6f65 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactDisplayNameTransformer.js @@ -0,0 +1,160 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _tokenizer = require('../parser/tokenizer'); +var _types = require('../parser/tokenizer/types'); + + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + +/** + * Implementation of babel-plugin-transform-react-display-name, which adds a + * display name to usages of React.createClass and createReactClass. + */ + class ReactDisplayNameTransformer extends _Transformer2.default { + constructor( + rootTransformer, + tokens, + importProcessor, + options, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.importProcessor = importProcessor;this.options = options;; + } + + process() { + const startIndex = this.tokens.currentIndex(); + if (this.tokens.identifierName() === "createReactClass") { + const newName = + this.importProcessor && this.importProcessor.getIdentifierReplacement("createReactClass"); + if (newName) { + this.tokens.replaceToken(`(0, ${newName})`); + } else { + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + if ( + this.tokens.matches3(_types.TokenType.name, _types.TokenType.dot, _types.TokenType.name) && + this.tokens.identifierName() === "React" && + this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 2) === "createClass" + ) { + const newName = this.importProcessor + ? this.importProcessor.getIdentifierReplacement("React") || "React" + : "React"; + if (newName) { + this.tokens.replaceToken(newName); + this.tokens.copyToken(); + this.tokens.copyToken(); + } else { + this.tokens.copyToken(); + this.tokens.copyToken(); + this.tokens.copyToken(); + } + this.tryProcessCreateClassCall(startIndex); + return true; + } + return false; + } + + /** + * This is called with the token position at the open-paren. + */ + tryProcessCreateClassCall(startIndex) { + const displayName = this.findDisplayName(startIndex); + if (!displayName) { + return; + } + + if (this.classNeedsDisplayName()) { + this.tokens.copyExpectedToken(_types.TokenType.parenL); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + this.tokens.appendCode(`displayName: '${displayName}',`); + this.rootTransformer.processBalancedCode(); + this.tokens.copyExpectedToken(_types.TokenType.braceR); + this.tokens.copyExpectedToken(_types.TokenType.parenR); + } + } + + findDisplayName(startIndex) { + if (startIndex < 2) { + return null; + } + if (this.tokens.matches2AtIndex(startIndex - 2, _types.TokenType.name, _types.TokenType.eq)) { + // This is an assignment (or declaration) and the LHS is either an identifier or a member + // expression ending in an identifier, so use that identifier name. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if ( + startIndex >= 2 && + this.tokens.tokens[startIndex - 2].identifierRole === _tokenizer.IdentifierRole.ObjectKey + ) { + // This is an object literal value. + return this.tokens.identifierNameAtIndex(startIndex - 2); + } + if (this.tokens.matches2AtIndex(startIndex - 2, _types.TokenType._export, _types.TokenType._default)) { + return this.getDisplayNameFromFilename(); + } + return null; + } + + getDisplayNameFromFilename() { + const filePath = this.options.filePath || "unknown"; + const pathSegments = filePath.split("/"); + const filename = pathSegments[pathSegments.length - 1]; + const dotIndex = filename.lastIndexOf("."); + const baseFilename = dotIndex === -1 ? filename : filename.slice(0, dotIndex); + if (baseFilename === "index" && pathSegments[pathSegments.length - 2]) { + return pathSegments[pathSegments.length - 2]; + } else { + return baseFilename; + } + } + + /** + * We only want to add a display name when this is a function call containing + * one argument, which is an object literal without `displayName` as an + * existing key. + */ + classNeedsDisplayName() { + let index = this.tokens.currentIndex(); + if (!this.tokens.matches2(_types.TokenType.parenL, _types.TokenType.braceL)) { + return false; + } + // The block starts on the {, and we expect any displayName key to be in + // that context. We need to ignore other other contexts to avoid matching + // nested displayName keys. + const objectStartIndex = index + 1; + const objectContextId = this.tokens.tokens[objectStartIndex].contextId; + if (objectContextId == null) { + throw new Error("Expected non-null context ID on object open-brace."); + } + + for (; index < this.tokens.tokens.length; index++) { + const token = this.tokens.tokens[index]; + if (token.type === _types.TokenType.braceR && token.contextId === objectContextId) { + index++; + break; + } + + if ( + this.tokens.identifierNameAtIndex(index) === "displayName" && + this.tokens.tokens[index].identifierRole === _tokenizer.IdentifierRole.ObjectKey && + token.contextId === objectContextId + ) { + // We found a displayName key, so bail out. + return false; + } + } + + if (index === this.tokens.tokens.length) { + throw new Error("Unexpected end of input when processing React class."); + } + + // If we got this far, we know we have createClass with an object with no + // display name, so we want to proceed as long as that was the only argument. + return ( + this.tokens.matches1AtIndex(index, _types.TokenType.parenR) || + this.tokens.matches2AtIndex(index, _types.TokenType.comma, _types.TokenType.parenR) + ); + } +} exports.default = ReactDisplayNameTransformer; diff --git a/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.js b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.js new file mode 100644 index 0000000..3657c49 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/ReactHotLoaderTransformer.js @@ -0,0 +1,69 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _tokenizer = require('../parser/tokenizer'); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class ReactHotLoaderTransformer extends _Transformer2.default { + __init() {this.extractedDefaultExportName = null} + + constructor( tokens, filePath) { + super();this.tokens = tokens;this.filePath = filePath;ReactHotLoaderTransformer.prototype.__init.call(this);; + } + + setExtractedDefaultExportName(extractedDefaultExportName) { + this.extractedDefaultExportName = extractedDefaultExportName; + } + + getPrefixCode() { + return ` + (function () { + var enterModule = require('react-hot-loader').enterModule; + enterModule && enterModule(module); + })();` + .replace(/\s+/g, " ") + .trim(); + } + + getSuffixCode() { + const topLevelNames = new Set(); + for (const token of this.tokens.tokens) { + if ( + !token.isType && + _tokenizer.isTopLevelDeclaration.call(void 0, token) && + token.identifierRole !== _tokenizer.IdentifierRole.ImportDeclaration + ) { + topLevelNames.add(this.tokens.identifierNameForToken(token)); + } + } + const namesToRegister = Array.from(topLevelNames).map((name) => ({ + variableName: name, + uniqueLocalName: name, + })); + if (this.extractedDefaultExportName) { + namesToRegister.push({ + variableName: this.extractedDefaultExportName, + uniqueLocalName: "default", + }); + } + return ` +;(function () { + var reactHotLoader = require('react-hot-loader').default; + var leaveModule = require('react-hot-loader').leaveModule; + if (!reactHotLoader) { + return; + } +${namesToRegister + .map( + ({variableName, uniqueLocalName}) => + ` reactHotLoader.register(${variableName}, "${uniqueLocalName}", ${JSON.stringify( + this.filePath || "", + )});`, + ) + .join("\n")} + leaveModule(module); +})();`; + } + + process() { + return false; + } +} exports.default = ReactHotLoaderTransformer; diff --git a/node_modules/sucrase/dist/transformers/RootTransformer.js b/node_modules/sucrase/dist/transformers/RootTransformer.js new file mode 100644 index 0000000..2fabfe3 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/RootTransformer.js @@ -0,0 +1,458 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + + +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + +var _getClassInfo = require('../util/getClassInfo'); var _getClassInfo2 = _interopRequireDefault(_getClassInfo); +var _CJSImportTransformer = require('./CJSImportTransformer'); var _CJSImportTransformer2 = _interopRequireDefault(_CJSImportTransformer); +var _ESMImportTransformer = require('./ESMImportTransformer'); var _ESMImportTransformer2 = _interopRequireDefault(_ESMImportTransformer); +var _FlowTransformer = require('./FlowTransformer'); var _FlowTransformer2 = _interopRequireDefault(_FlowTransformer); +var _JestHoistTransformer = require('./JestHoistTransformer'); var _JestHoistTransformer2 = _interopRequireDefault(_JestHoistTransformer); +var _JSXTransformer = require('./JSXTransformer'); var _JSXTransformer2 = _interopRequireDefault(_JSXTransformer); +var _NumericSeparatorTransformer = require('./NumericSeparatorTransformer'); var _NumericSeparatorTransformer2 = _interopRequireDefault(_NumericSeparatorTransformer); +var _OptionalCatchBindingTransformer = require('./OptionalCatchBindingTransformer'); var _OptionalCatchBindingTransformer2 = _interopRequireDefault(_OptionalCatchBindingTransformer); +var _OptionalChainingNullishTransformer = require('./OptionalChainingNullishTransformer'); var _OptionalChainingNullishTransformer2 = _interopRequireDefault(_OptionalChainingNullishTransformer); +var _ReactDisplayNameTransformer = require('./ReactDisplayNameTransformer'); var _ReactDisplayNameTransformer2 = _interopRequireDefault(_ReactDisplayNameTransformer); +var _ReactHotLoaderTransformer = require('./ReactHotLoaderTransformer'); var _ReactHotLoaderTransformer2 = _interopRequireDefault(_ReactHotLoaderTransformer); + +var _TypeScriptTransformer = require('./TypeScriptTransformer'); var _TypeScriptTransformer2 = _interopRequireDefault(_TypeScriptTransformer); + + + + + + + + + class RootTransformer { + __init() {this.transformers = []} + + + __init2() {this.generatedVariables = []} + + + + + + constructor( + sucraseContext, + transforms, + enableLegacyBabel5ModuleInterop, + options, + ) {;RootTransformer.prototype.__init.call(this);RootTransformer.prototype.__init2.call(this); + this.nameManager = sucraseContext.nameManager; + this.helperManager = sucraseContext.helperManager; + const {tokenProcessor, importProcessor} = sucraseContext; + this.tokens = tokenProcessor; + this.isImportsTransformEnabled = transforms.includes("imports"); + this.isReactHotLoaderTransformEnabled = transforms.includes("react-hot-loader"); + this.disableESTransforms = Boolean(options.disableESTransforms); + + if (!options.disableESTransforms) { + this.transformers.push( + new (0, _OptionalChainingNullishTransformer2.default)(tokenProcessor, this.nameManager), + ); + this.transformers.push(new (0, _NumericSeparatorTransformer2.default)(tokenProcessor)); + this.transformers.push(new (0, _OptionalCatchBindingTransformer2.default)(tokenProcessor, this.nameManager)); + } + + if (transforms.includes("jsx")) { + if (options.jsxRuntime !== "preserve") { + this.transformers.push( + new (0, _JSXTransformer2.default)(this, tokenProcessor, importProcessor, this.nameManager, options), + ); + } + this.transformers.push( + new (0, _ReactDisplayNameTransformer2.default)(this, tokenProcessor, importProcessor, options), + ); + } + + let reactHotLoaderTransformer = null; + if (transforms.includes("react-hot-loader")) { + if (!options.filePath) { + throw new Error("filePath is required when using the react-hot-loader transform."); + } + reactHotLoaderTransformer = new (0, _ReactHotLoaderTransformer2.default)(tokenProcessor, options.filePath); + this.transformers.push(reactHotLoaderTransformer); + } + + // Note that we always want to enable the imports transformer, even when the import transform + // itself isn't enabled, since we need to do type-only import pruning for both Flow and + // TypeScript. + if (transforms.includes("imports")) { + if (importProcessor === null) { + throw new Error("Expected non-null importProcessor with imports transform enabled."); + } + this.transformers.push( + new (0, _CJSImportTransformer2.default)( + this, + tokenProcessor, + importProcessor, + this.nameManager, + this.helperManager, + reactHotLoaderTransformer, + enableLegacyBabel5ModuleInterop, + Boolean(options.enableLegacyTypeScriptModuleInterop), + transforms.includes("typescript"), + Boolean(options.preserveDynamicImport), + ), + ); + } else { + this.transformers.push( + new (0, _ESMImportTransformer2.default)( + tokenProcessor, + this.nameManager, + this.helperManager, + reactHotLoaderTransformer, + transforms.includes("typescript"), + options, + ), + ); + } + + if (transforms.includes("flow")) { + this.transformers.push( + new (0, _FlowTransformer2.default)(this, tokenProcessor, transforms.includes("imports")), + ); + } + if (transforms.includes("typescript")) { + this.transformers.push( + new (0, _TypeScriptTransformer2.default)(this, tokenProcessor, transforms.includes("imports")), + ); + } + if (transforms.includes("jest")) { + this.transformers.push( + new (0, _JestHoistTransformer2.default)(this, tokenProcessor, this.nameManager, importProcessor), + ); + } + } + + transform() { + this.tokens.reset(); + this.processBalancedCode(); + const shouldAddUseStrict = this.isImportsTransformEnabled; + // "use strict" always needs to be first, so override the normal transformer order. + let prefix = shouldAddUseStrict ? '"use strict";' : ""; + for (const transformer of this.transformers) { + prefix += transformer.getPrefixCode(); + } + prefix += this.helperManager.emitHelpers(); + prefix += this.generatedVariables.map((v) => ` var ${v};`).join(""); + for (const transformer of this.transformers) { + prefix += transformer.getHoistedCode(); + } + let suffix = ""; + for (const transformer of this.transformers) { + suffix += transformer.getSuffixCode(); + } + const result = this.tokens.finish(); + let {code} = result; + if (code.startsWith("#!")) { + let newlineIndex = code.indexOf("\n"); + if (newlineIndex === -1) { + newlineIndex = code.length; + code += "\n"; + } + return { + code: code.slice(0, newlineIndex + 1) + prefix + code.slice(newlineIndex + 1) + suffix, + // The hashbang line has no tokens, so shifting the tokens to account + // for prefix can happen normally. + mappings: this.shiftMappings(result.mappings, prefix.length), + }; + } else { + return { + code: prefix + code + suffix, + mappings: this.shiftMappings(result.mappings, prefix.length), + }; + } + } + + processBalancedCode() { + let braceDepth = 0; + let parenDepth = 0; + while (!this.tokens.isAtEnd()) { + if (this.tokens.matches1(_types.TokenType.braceL) || this.tokens.matches1(_types.TokenType.dollarBraceL)) { + braceDepth++; + } else if (this.tokens.matches1(_types.TokenType.braceR)) { + if (braceDepth === 0) { + return; + } + braceDepth--; + } + if (this.tokens.matches1(_types.TokenType.parenL)) { + parenDepth++; + } else if (this.tokens.matches1(_types.TokenType.parenR)) { + if (parenDepth === 0) { + return; + } + parenDepth--; + } + this.processToken(); + } + } + + processToken() { + if (this.tokens.matches1(_types.TokenType._class)) { + this.processClass(); + return; + } + for (const transformer of this.transformers) { + const wasProcessed = transformer.process(); + if (wasProcessed) { + return; + } + } + this.tokens.copyToken(); + } + + /** + * Skip past a class with a name and return that name. + */ + processNamedClass() { + if (!this.tokens.matches2(_types.TokenType._class, _types.TokenType.name)) { + throw new Error("Expected identifier for exported class name."); + } + const name = this.tokens.identifierNameAtIndex(this.tokens.currentIndex() + 1); + this.processClass(); + return name; + } + + processClass() { + const classInfo = _getClassInfo2.default.call(void 0, this, this.tokens, this.nameManager, this.disableESTransforms); + + // Both static and instance initializers need a class name to use to invoke the initializer, so + // assign to one if necessary. + const needsCommaExpression = + (classInfo.headerInfo.isExpression || !classInfo.headerInfo.className) && + classInfo.staticInitializerNames.length + classInfo.instanceInitializerNames.length > 0; + + let className = classInfo.headerInfo.className; + if (needsCommaExpression) { + className = this.nameManager.claimFreeName("_class"); + this.generatedVariables.push(className); + this.tokens.appendCode(` (${className} =`); + } + + const classToken = this.tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected class to have a context ID."); + } + this.tokens.copyExpectedToken(_types.TokenType._class); + while (!this.tokens.matchesContextIdAndLabel(_types.TokenType.braceL, contextId)) { + this.processToken(); + } + + this.processClassBody(classInfo, className); + + const staticInitializerStatements = classInfo.staticInitializerNames.map( + (name) => `${className}.${name}()`, + ); + if (needsCommaExpression) { + this.tokens.appendCode( + `, ${staticInitializerStatements.map((s) => `${s}, `).join("")}${className})`, + ); + } else if (classInfo.staticInitializerNames.length > 0) { + this.tokens.appendCode(` ${staticInitializerStatements.map((s) => `${s};`).join(" ")}`); + } + } + + /** + * We want to just handle class fields in all contexts, since TypeScript supports them. Later, + * when some JS implementations support class fields, this should be made optional. + */ + processClassBody(classInfo, className) { + const { + headerInfo, + constructorInsertPos, + constructorInitializerStatements, + fields, + instanceInitializerNames, + rangesToRemove, + } = classInfo; + let fieldIndex = 0; + let rangeToRemoveIndex = 0; + const classContextId = this.tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null context ID on class."); + } + this.tokens.copyExpectedToken(_types.TokenType.braceL); + if (this.isReactHotLoaderTransformEnabled) { + this.tokens.appendCode( + "__reactstandin__regenerateByEval(key, code) {this[key] = eval(code);}", + ); + } + + const needsConstructorInit = + constructorInitializerStatements.length + instanceInitializerNames.length > 0; + + if (constructorInsertPos === null && needsConstructorInit) { + const constructorInitializersCode = this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ); + if (headerInfo.hasSuperclass) { + const argsName = this.nameManager.claimFreeName("args"); + this.tokens.appendCode( + `constructor(...${argsName}) { super(...${argsName}); ${constructorInitializersCode}; }`, + ); + } else { + this.tokens.appendCode(`constructor() { ${constructorInitializersCode}; }`); + } + } + + while (!this.tokens.matchesContextIdAndLabel(_types.TokenType.braceR, classContextId)) { + if (fieldIndex < fields.length && this.tokens.currentIndex() === fields[fieldIndex].start) { + let needsCloseBrace = false; + if (this.tokens.matches1(_types.TokenType.bracketL)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this`); + } else if (this.tokens.matches1(_types.TokenType.string) || this.tokens.matches1(_types.TokenType.num)) { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this[`); + needsCloseBrace = true; + } else { + this.tokens.copyTokenWithPrefix(`${fields[fieldIndex].initializerName}() {this.`); + } + while (this.tokens.currentIndex() < fields[fieldIndex].end) { + if (needsCloseBrace && this.tokens.currentIndex() === fields[fieldIndex].equalsIndex) { + this.tokens.appendCode("]"); + } + this.processToken(); + } + this.tokens.appendCode("}"); + fieldIndex++; + } else if ( + rangeToRemoveIndex < rangesToRemove.length && + this.tokens.currentIndex() >= rangesToRemove[rangeToRemoveIndex].start + ) { + if (this.tokens.currentIndex() < rangesToRemove[rangeToRemoveIndex].end) { + this.tokens.removeInitialToken(); + } + while (this.tokens.currentIndex() < rangesToRemove[rangeToRemoveIndex].end) { + this.tokens.removeToken(); + } + rangeToRemoveIndex++; + } else if (this.tokens.currentIndex() === constructorInsertPos) { + this.tokens.copyToken(); + if (needsConstructorInit) { + this.tokens.appendCode( + `;${this.makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + )};`, + ); + } + this.processToken(); + } else { + this.processToken(); + } + } + this.tokens.copyExpectedToken(_types.TokenType.braceR); + } + + makeConstructorInitCode( + constructorInitializerStatements, + instanceInitializerNames, + className, + ) { + return [ + ...constructorInitializerStatements, + ...instanceInitializerNames.map((name) => `${className}.prototype.${name}.call(this)`), + ].join(";"); + } + + /** + * Normally it's ok to simply remove type tokens, but we need to be more careful when dealing with + * arrow function return types since they can confuse the parser. In that case, we want to move + * the close-paren to the same line as the arrow. + * + * See https://github.com/alangpierce/sucrase/issues/391 for more details. + */ + processPossibleArrowParamEnd() { + if (this.tokens.matches2(_types.TokenType.parenR, _types.TokenType.colon) && this.tokens.tokenAtRelativeIndex(1).isType) { + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, _types.TokenType.arrow)) { + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.replaceTokenTrimmingLeftWhitespace(") =>"); + return true; + } + } + return false; + } + + /** + * An async arrow function might be of the form: + * + * async < + * T + * >() => {} + * + * in which case, removing the type parameters will cause a syntax error. Detect this case and + * move the open-paren earlier. + */ + processPossibleAsyncArrowWithTypeParams() { + if ( + !this.tokens.matchesContextual(_keywords.ContextualKeyword._async) && + !this.tokens.matches1(_types.TokenType._async) + ) { + return false; + } + const nextToken = this.tokens.tokenAtRelativeIndex(1); + if (nextToken.type !== _types.TokenType.lessThan || !nextToken.isType) { + return false; + } + + let nextNonTypeIndex = this.tokens.currentIndex() + 1; + // Look ahead to see if this is an arrow function or something else. + while (this.tokens.tokens[nextNonTypeIndex].isType) { + nextNonTypeIndex++; + } + if (this.tokens.matches1AtIndex(nextNonTypeIndex, _types.TokenType.parenL)) { + this.tokens.replaceToken("async ("); + this.tokens.removeInitialToken(); + while (this.tokens.currentIndex() < nextNonTypeIndex) { + this.tokens.removeToken(); + } + this.tokens.removeToken(); + // We ate a ( token, so we need to process the tokens in between and then the ) token so that + // we remain balanced. + this.processBalancedCode(); + this.processToken(); + return true; + } + return false; + } + + processPossibleTypeRange() { + if (this.tokens.currentToken().isType) { + this.tokens.removeInitialToken(); + while (this.tokens.currentToken().isType) { + this.tokens.removeToken(); + } + return true; + } + return false; + } + + shiftMappings( + mappings, + prefixLength, + ) { + for (let i = 0; i < mappings.length; i++) { + const mapping = mappings[i]; + if (mapping !== undefined) { + mappings[i] = mapping + prefixLength; + } + } + return mappings; + } +} exports.default = RootTransformer; diff --git a/node_modules/sucrase/dist/transformers/Transformer.js b/node_modules/sucrase/dist/transformers/Transformer.js new file mode 100644 index 0000000..991d363 --- /dev/null +++ b/node_modules/sucrase/dist/transformers/Transformer.js @@ -0,0 +1,16 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); class Transformer { + // Return true if anything was processed, false otherwise. + + + getPrefixCode() { + return ""; + } + + getHoistedCode() { + return ""; + } + + getSuffixCode() { + return ""; + } +} exports.default = Transformer; diff --git a/node_modules/sucrase/dist/transformers/TypeScriptTransformer.js b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.js new file mode 100644 index 0000000..9fe5c6e --- /dev/null +++ b/node_modules/sucrase/dist/transformers/TypeScriptTransformer.js @@ -0,0 +1,279 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _types = require('../parser/tokenizer/types'); + +var _isIdentifier = require('../util/isIdentifier'); var _isIdentifier2 = _interopRequireDefault(_isIdentifier); + +var _Transformer = require('./Transformer'); var _Transformer2 = _interopRequireDefault(_Transformer); + + class TypeScriptTransformer extends _Transformer2.default { + constructor( + rootTransformer, + tokens, + isImportsTransformEnabled, + ) { + super();this.rootTransformer = rootTransformer;this.tokens = tokens;this.isImportsTransformEnabled = isImportsTransformEnabled;; + } + + process() { + if ( + this.rootTransformer.processPossibleArrowParamEnd() || + this.rootTransformer.processPossibleAsyncArrowWithTypeParams() || + this.rootTransformer.processPossibleTypeRange() + ) { + return true; + } + if ( + this.tokens.matches1(_types.TokenType._public) || + this.tokens.matches1(_types.TokenType._protected) || + this.tokens.matches1(_types.TokenType._private) || + this.tokens.matches1(_types.TokenType._abstract) || + this.tokens.matches1(_types.TokenType._readonly) || + this.tokens.matches1(_types.TokenType._override) || + this.tokens.matches1(_types.TokenType.nonNullAssertion) + ) { + this.tokens.removeInitialToken(); + return true; + } + if (this.tokens.matches1(_types.TokenType._enum) || this.tokens.matches2(_types.TokenType._const, _types.TokenType._enum)) { + this.processEnum(); + return true; + } + if ( + this.tokens.matches2(_types.TokenType._export, _types.TokenType._enum) || + this.tokens.matches3(_types.TokenType._export, _types.TokenType._const, _types.TokenType._enum) + ) { + this.processEnum(true); + return true; + } + return false; + } + + processEnum(isExport = false) { + // We might have "export const enum", so just remove all relevant tokens. + this.tokens.removeInitialToken(); + while (this.tokens.matches1(_types.TokenType._const) || this.tokens.matches1(_types.TokenType._enum)) { + this.tokens.removeToken(); + } + const enumName = this.tokens.identifierName(); + this.tokens.removeToken(); + if (isExport && !this.isImportsTransformEnabled) { + this.tokens.appendCode("export "); + } + this.tokens.appendCode(`var ${enumName}; (function (${enumName})`); + this.tokens.copyExpectedToken(_types.TokenType.braceL); + this.processEnumBody(enumName); + this.tokens.copyExpectedToken(_types.TokenType.braceR); + if (isExport && this.isImportsTransformEnabled) { + this.tokens.appendCode(`)(${enumName} || (exports.${enumName} = ${enumName} = {}));`); + } else { + this.tokens.appendCode(`)(${enumName} || (${enumName} = {}));`); + } + } + + /** + * Transform an enum into equivalent JS. This has complexity in a few places: + * - TS allows string enums, numeric enums, and a mix of the two styles within an enum. + * - Enum keys are allowed to be referenced in later enum values. + * - Enum keys are allowed to be strings. + * - When enum values are omitted, they should follow an auto-increment behavior. + */ + processEnumBody(enumName) { + // Code that can be used to reference the previous enum member, or null if this is the first + // enum member. + let previousValueCode = null; + while (true) { + if (this.tokens.matches1(_types.TokenType.braceR)) { + break; + } + const {nameStringCode, variableName} = this.extractEnumKeyInfo(this.tokens.currentToken()); + this.tokens.removeInitialToken(); + + if ( + this.tokens.matches3(_types.TokenType.eq, _types.TokenType.string, _types.TokenType.comma) || + this.tokens.matches3(_types.TokenType.eq, _types.TokenType.string, _types.TokenType.braceR) + ) { + this.processStringLiteralEnumMember(enumName, nameStringCode, variableName); + } else if (this.tokens.matches1(_types.TokenType.eq)) { + this.processExplicitValueEnumMember(enumName, nameStringCode, variableName); + } else { + this.processImplicitValueEnumMember( + enumName, + nameStringCode, + variableName, + previousValueCode, + ); + } + if (this.tokens.matches1(_types.TokenType.comma)) { + this.tokens.removeToken(); + } + + if (variableName != null) { + previousValueCode = variableName; + } else { + previousValueCode = `${enumName}[${nameStringCode}]`; + } + } + } + + /** + * Detect name information about this enum key, which will be used to determine which code to emit + * and whether we should declare a variable as part of this declaration. + * + * Some cases to keep in mind: + * - Enum keys can be implicitly referenced later, e.g. `X = 1, Y = X`. In Sucrase, we implement + * this by declaring a variable `X` so that later expressions can use it. + * - In addition to the usual identifier key syntax, enum keys are allowed to be string literals, + * e.g. `"hello world" = 3,`. Template literal syntax is NOT allowed. + * - Even if the enum key is defined as a string literal, it may still be referenced by identifier + * later, e.g. `"X" = 1, Y = X`. That means that we need to detect whether or not a string + * literal is identifier-like and emit a variable if so, even if the declaration did not use an + * identifier. + * - Reserved keywords like `break` are valid enum keys, but are not valid to be referenced later + * and would be a syntax error if we emitted a variable, so we need to skip the variable + * declaration in those cases. + * + * The variableName return value captures these nuances: if non-null, we can and must emit a + * variable declaration, and if null, we can't and shouldn't. + */ + extractEnumKeyInfo(nameToken) { + if (nameToken.type === _types.TokenType.name) { + const name = this.tokens.identifierNameForToken(nameToken); + return { + nameStringCode: `"${name}"`, + variableName: _isIdentifier2.default.call(void 0, name) ? name : null, + }; + } else if (nameToken.type === _types.TokenType.string) { + const name = this.tokens.stringValueForToken(nameToken); + return { + nameStringCode: this.tokens.code.slice(nameToken.start, nameToken.end), + variableName: _isIdentifier2.default.call(void 0, name) ? name : null, + }; + } else { + throw new Error("Expected name or string at beginning of enum element."); + } + } + + /** + * Handle an enum member where the RHS is just a string literal (not omitted, not a number, and + * not a complex expression). This is the typical form for TS string enums, and in this case, we + * do *not* create a reverse mapping. + * + * This is called after deleting the key token, when the token processor is at the equals sign. + * + * Example 1: + * someKey = "some value" + * -> + * const someKey = "some value"; MyEnum["someKey"] = someKey; + * + * Example 2: + * "some key" = "some value" + * -> + * MyEnum["some key"] = "some value"; + */ + processStringLiteralEnumMember( + enumName, + nameStringCode, + variableName, + ) { + if (variableName != null) { + this.tokens.appendCode(`const ${variableName}`); + // = + this.tokens.copyToken(); + // value string + this.tokens.copyToken(); + this.tokens.appendCode(`; ${enumName}[${nameStringCode}] = ${variableName};`); + } else { + this.tokens.appendCode(`${enumName}[${nameStringCode}]`); + // = + this.tokens.copyToken(); + // value string + this.tokens.copyToken(); + this.tokens.appendCode(";"); + } + } + + /** + * Handle an enum member initialized with an expression on the right-hand side (other than a + * string literal). In these cases, we should transform the expression and emit code that sets up + * a reverse mapping. + * + * The TypeScript implementation of this operation distinguishes between expressions that can be + * "constant folded" at compile time (i.e. consist of number literals and simple math operations + * on those numbers) and ones that are dynamic. For constant expressions, it emits the resolved + * numeric value, and auto-incrementing is only allowed in that case. Evaluating expressions at + * compile time would add significant complexity to Sucrase, so Sucrase instead leaves the + * expression as-is, and will later emit something like `MyEnum["previousKey"] + 1` to implement + * auto-incrementing. + * + * This is called after deleting the key token, when the token processor is at the equals sign. + * + * Example 1: + * someKey = 1 + 1 + * -> + * const someKey = 1 + 1; MyEnum[MyEnum["someKey"] = someKey] = "someKey"; + * + * Example 2: + * "some key" = 1 + 1 + * -> + * MyEnum[MyEnum["some key"] = 1 + 1] = "some key"; + */ + processExplicitValueEnumMember( + enumName, + nameStringCode, + variableName, + ) { + const rhsEndIndex = this.tokens.currentToken().rhsEndIndex; + if (rhsEndIndex == null) { + throw new Error("Expected rhsEndIndex on enum assign."); + } + + if (variableName != null) { + this.tokens.appendCode(`const ${variableName}`); + this.tokens.copyToken(); + while (this.tokens.currentIndex() < rhsEndIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode( + `; ${enumName}[${enumName}[${nameStringCode}] = ${variableName}] = ${nameStringCode};`, + ); + } else { + this.tokens.appendCode(`${enumName}[${enumName}[${nameStringCode}]`); + this.tokens.copyToken(); + while (this.tokens.currentIndex() < rhsEndIndex) { + this.rootTransformer.processToken(); + } + this.tokens.appendCode(`] = ${nameStringCode};`); + } + } + + /** + * Handle an enum member with no right-hand side expression. In this case, the value is the + * previous value plus 1, or 0 if there was no previous value. We should also always emit a + * reverse mapping. + * + * Example 1: + * someKey2 + * -> + * const someKey2 = someKey1 + 1; MyEnum[MyEnum["someKey2"] = someKey2] = "someKey2"; + * + * Example 2: + * "some key 2" + * -> + * MyEnum[MyEnum["some key 2"] = someKey1 + 1] = "some key 2"; + */ + processImplicitValueEnumMember( + enumName, + nameStringCode, + variableName, + previousValueCode, + ) { + let valueCode = previousValueCode != null ? `${previousValueCode} + 1` : "0"; + if (variableName != null) { + this.tokens.appendCode(`const ${variableName} = ${valueCode}; `); + valueCode = variableName; + } + this.tokens.appendCode( + `${enumName}[${enumName}[${nameStringCode}] = ${valueCode}] = ${nameStringCode};`, + ); + } +} exports.default = TypeScriptTransformer; diff --git a/node_modules/sucrase/dist/types/CJSImportProcessor.d.ts b/node_modules/sucrase/dist/types/CJSImportProcessor.d.ts new file mode 100644 index 0000000..283b48e --- /dev/null +++ b/node_modules/sucrase/dist/types/CJSImportProcessor.d.ts @@ -0,0 +1,66 @@ +import type { HelperManager } from "./HelperManager"; +import type { Options } from "./index"; +import type NameManager from "./NameManager"; +import type TokenProcessor from "./TokenProcessor"; +/** + * Class responsible for preprocessing and bookkeeping import and export declarations within the + * file. + * + * TypeScript uses a simpler mechanism that does not use functions like interopRequireDefault and + * interopRequireWildcard, so we also allow that mode for compatibility. + */ +export default class CJSImportProcessor { + readonly nameManager: NameManager; + readonly tokens: TokenProcessor; + readonly enableLegacyTypeScriptModuleInterop: boolean; + readonly options: Options; + readonly isTypeScriptTransformEnabled: boolean; + readonly helperManager: HelperManager; + private nonTypeIdentifiers; + private importInfoByPath; + private importsToReplace; + private identifierReplacements; + private exportBindingsByLocalName; + constructor(nameManager: NameManager, tokens: TokenProcessor, enableLegacyTypeScriptModuleInterop: boolean, options: Options, isTypeScriptTransformEnabled: boolean, helperManager: HelperManager); + preprocessTokens(): void; + /** + * In TypeScript, import statements that only import types should be removed. This does not count + * bare imports. + */ + pruneTypeOnlyImports(): void; + isTypeName(name: string): boolean; + private generateImportReplacements; + getFreeIdentifierForPath(path: string): string; + private preprocessImportAtIndex; + private preprocessExportAtIndex; + private preprocessVarExportAtIndex; + /** + * Walk this export statement just in case it's an export...from statement. + * If it is, combine it into the import info for that path. Otherwise, just + * bail out; it'll be handled later. + */ + private preprocessNamedExportAtIndex; + private preprocessExportStarAtIndex; + private getNamedImports; + /** + * Get a mutable import info object for this path, creating one if it doesn't + * exist yet. + */ + private getImportInfo; + private addExportBinding; + /** + * Return the code to use for the import for this path, or the empty string if + * the code has already been "claimed" by a previous import. + */ + claimImportCode(importPath: string): string; + getIdentifierReplacement(identifierName: string): string | null; + /** + * Return a string like `exports.foo = exports.bar`. + */ + resolveExportBinding(assignedName: string): string | null; + /** + * Return all imported/exported names where we might be interested in whether usages of those + * names are shadowed. + */ + getGlobalNames(): Set; +} diff --git a/node_modules/sucrase/dist/types/HelperManager.d.ts b/node_modules/sucrase/dist/types/HelperManager.d.ts new file mode 100644 index 0000000..8ec724d --- /dev/null +++ b/node_modules/sucrase/dist/types/HelperManager.d.ts @@ -0,0 +1,15 @@ +import type NameManager from "./NameManager"; +declare const HELPERS: { + [name: string]: string; +}; +export declare class HelperManager { + readonly nameManager: NameManager; + helperNames: { + [baseName in keyof typeof HELPERS]?: string; + }; + createRequireName: string | null; + constructor(nameManager: NameManager); + getHelperName(baseName: keyof typeof HELPERS): string; + emitHelpers(): string; +} +export {}; diff --git a/node_modules/sucrase/dist/types/NameManager.d.ts b/node_modules/sucrase/dist/types/NameManager.d.ts new file mode 100644 index 0000000..7485bf0 --- /dev/null +++ b/node_modules/sucrase/dist/types/NameManager.d.ts @@ -0,0 +1,7 @@ +import type { Token } from "./parser/tokenizer"; +export default class NameManager { + private readonly usedNames; + constructor(code: string, tokens: Array); + claimFreeName(name: string): string; + findFreeName(name: string): string; +} diff --git a/node_modules/sucrase/dist/types/Options-gen-types.d.ts b/node_modules/sucrase/dist/types/Options-gen-types.d.ts new file mode 100644 index 0000000..22996e8 --- /dev/null +++ b/node_modules/sucrase/dist/types/Options-gen-types.d.ts @@ -0,0 +1,9 @@ +/** + * This module was automatically generated by `ts-interface-builder` + */ +import * as t from "ts-interface-checker"; +export declare const Transform: t.TUnion; +export declare const SourceMapOptions: t.TIface; +export declare const Options: t.TIface; +declare const exportedTypeSuite: t.ITypeSuite; +export default exportedTypeSuite; diff --git a/node_modules/sucrase/dist/types/Options.d.ts b/node_modules/sucrase/dist/types/Options.d.ts new file mode 100644 index 0000000..5b52bbe --- /dev/null +++ b/node_modules/sucrase/dist/types/Options.d.ts @@ -0,0 +1,84 @@ +export declare type Transform = "jsx" | "typescript" | "flow" | "imports" | "react-hot-loader" | "jest"; +export interface SourceMapOptions { + /** + * The name to use in the "file" field of the source map. This should be the name of the compiled + * file. + */ + compiledFilename: string; +} +export interface Options { + /** + * Unordered array of transform names describing both the allowed syntax + * (where applicable) and the transformation behavior. + */ + transforms: Array; + /** + * Opts out of all ES syntax transformations: optional chaining, nullish + * coalescing, class fields, numeric separators, optional catch binding. + */ + disableESTransforms?: boolean; + /** + * Transformation mode for the JSX transform. + * - "classic" refers to the original behavior using `React.createElement`. + * - "automatic" refers to the transform behavior released with React 17, + * where the `jsx` function (or a variation) is automatically imported. + * - "preserve" leaves the JSX as-is. + * + * Default value: "classic". + */ + jsxRuntime?: "classic" | "automatic" | "preserve"; + /** + * Compile code for production use. Currently only applies to the JSX + * transform. + */ + production?: boolean; + /** + * If specified, import path prefix to use in place of "react" when compiling + * JSX with the automatic runtime. + */ + jsxImportSource?: string; + /** + * If specified, function name to use in place of React.createClass when + * compiling JSX with the classic runtime. + */ + jsxPragma?: string; + /** + * If specified, function name to use in place of React.Fragment when + * compiling JSX with the classic runtime. + */ + jsxFragmentPragma?: string; + /** + * If specified, the imports transform does not attempt to change dynamic + * import() expressions into require() calls. + */ + preserveDynamicImport?: boolean; + /** + * Only relevant when targeting ESM (i.e. when the imports transform is *not* + * specified). This flag changes the behavior of TS require imports: + * + * import Foo = require("foo"); + * + * to import createRequire, create a require function, and use that function. + * This is the TS behavior with module: nodenext and makes it easier for the + * same code to target ESM and CJS. + */ + injectCreateRequireForImportRequire?: boolean; + /** + * If true, replicate the import behavior of TypeScript's esModuleInterop: false. + */ + enableLegacyTypeScriptModuleInterop?: boolean; + /** + * If true, replicate the import behavior Babel 5 and babel-plugin-add-module-exports. + */ + enableLegacyBabel5ModuleInterop?: boolean; + /** + * If specified, we also return a RawSourceMap object alongside the code. + * filePath must be specified if this option is enabled. + */ + sourceMapOptions?: SourceMapOptions; + /** + * File path to use in error messages, React display names, and source maps. + */ + filePath?: string; +} +export declare function validateOptions(options: Options): void; diff --git a/node_modules/sucrase/dist/types/TokenProcessor.d.ts b/node_modules/sucrase/dist/types/TokenProcessor.d.ts new file mode 100644 index 0000000..4d09f8d --- /dev/null +++ b/node_modules/sucrase/dist/types/TokenProcessor.d.ts @@ -0,0 +1,87 @@ +import type { HelperManager } from "./HelperManager"; +import type { Token } from "./parser/tokenizer"; +import type { ContextualKeyword } from "./parser/tokenizer/keywords"; +import { TokenType } from "./parser/tokenizer/types"; +export interface TokenProcessorSnapshot { + resultCode: string; + tokenIndex: number; +} +export interface TokenProcessorResult { + code: string; + mappings: Array; +} +export default class TokenProcessor { + readonly code: string; + readonly tokens: Array; + readonly isFlowEnabled: boolean; + readonly disableESTransforms: boolean; + readonly helperManager: HelperManager; + private resultCode; + private resultMappings; + private tokenIndex; + constructor(code: string, tokens: Array, isFlowEnabled: boolean, disableESTransforms: boolean, helperManager: HelperManager); + /** + * Snapshot the token state in a way that can be restored later, useful for + * things like lookahead. + * + * resultMappings do not need to be copied since in all use cases, they will + * be overwritten anyway after restore. + */ + snapshot(): TokenProcessorSnapshot; + restoreToSnapshot(snapshot: TokenProcessorSnapshot): void; + /** + * Remove and return the code generated since the snapshot, leaving the + * current token position in-place. Unlike most TokenProcessor operations, + * this operation can result in input/output line number mismatches because + * the removed code may contain newlines, so this operation should be used + * sparingly. + */ + dangerouslyGetAndRemoveCodeSinceSnapshot(snapshot: TokenProcessorSnapshot): string; + reset(): void; + matchesContextualAtIndex(index: number, contextualKeyword: ContextualKeyword): boolean; + identifierNameAtIndex(index: number): string; + identifierNameAtRelativeIndex(relativeIndex: number): string; + identifierName(): string; + identifierNameForToken(token: Token): string; + rawCodeForToken(token: Token): string; + stringValueAtIndex(index: number): string; + stringValue(): string; + stringValueForToken(token: Token): string; + matches1AtIndex(index: number, t1: TokenType): boolean; + matches2AtIndex(index: number, t1: TokenType, t2: TokenType): boolean; + matches3AtIndex(index: number, t1: TokenType, t2: TokenType, t3: TokenType): boolean; + matches1(t1: TokenType): boolean; + matches2(t1: TokenType, t2: TokenType): boolean; + matches3(t1: TokenType, t2: TokenType, t3: TokenType): boolean; + matches4(t1: TokenType, t2: TokenType, t3: TokenType, t4: TokenType): boolean; + matches5(t1: TokenType, t2: TokenType, t3: TokenType, t4: TokenType, t5: TokenType): boolean; + matchesContextual(contextualKeyword: ContextualKeyword): boolean; + matchesContextIdAndLabel(type: TokenType, contextId: number): boolean; + previousWhitespaceAndComments(): string; + replaceToken(newCode: string): void; + replaceTokenTrimmingLeftWhitespace(newCode: string): void; + removeInitialToken(): void; + removeToken(): void; + /** + * Remove all code until the next }, accounting for balanced braces. + */ + removeBalancedCode(): void; + copyExpectedToken(tokenType: TokenType): void; + copyToken(): void; + copyTokenWithPrefix(prefix: string): void; + private appendTokenPrefix; + private appendTokenSuffix; + appendCode(code: string): void; + currentToken(): Token; + currentTokenCode(): string; + tokenAtRelativeIndex(relativeIndex: number): Token; + currentIndex(): number; + /** + * Move to the next token. Only suitable in preprocessing steps. When + * generating new code, you should use copyToken or removeToken. + */ + nextToken(): void; + previousToken(): void; + finish(): TokenProcessorResult; + isAtEnd(): boolean; +} diff --git a/node_modules/sucrase/dist/types/cli.d.ts b/node_modules/sucrase/dist/types/cli.d.ts new file mode 100644 index 0000000..238c75f --- /dev/null +++ b/node_modules/sucrase/dist/types/cli.d.ts @@ -0,0 +1 @@ +export default function run(): void; diff --git a/node_modules/sucrase/dist/types/computeSourceMap.d.ts b/node_modules/sucrase/dist/types/computeSourceMap.d.ts new file mode 100644 index 0000000..1277847 --- /dev/null +++ b/node_modules/sucrase/dist/types/computeSourceMap.d.ts @@ -0,0 +1,17 @@ +import type { SourceMapOptions } from "./index"; +import type { Token } from "./parser/tokenizer"; +import type { RootTransformerResult } from "./transformers/RootTransformer"; +export interface RawSourceMap { + version: number; + file: string; + sources: Array; + sourceRoot?: string; + sourcesContent?: Array; + mappings: string; + names: Array; +} +/** + * Generate a source map indicating that each line maps directly to the original line, + * with the tokens in their new positions. + */ +export default function computeSourceMap({ code: generatedCode, mappings: rawMappings }: RootTransformerResult, filePath: string, options: SourceMapOptions, source: string, tokens: Array): RawSourceMap; diff --git a/node_modules/sucrase/dist/types/identifyShadowedGlobals.d.ts b/node_modules/sucrase/dist/types/identifyShadowedGlobals.d.ts new file mode 100644 index 0000000..a9222d8 --- /dev/null +++ b/node_modules/sucrase/dist/types/identifyShadowedGlobals.d.ts @@ -0,0 +1,12 @@ +import type { Scope } from "./parser/tokenizer/state"; +import type TokenProcessor from "./TokenProcessor"; +/** + * Traverse the given tokens and modify them if necessary to indicate that some names shadow global + * variables. + */ +export default function identifyShadowedGlobals(tokens: TokenProcessor, scopes: Array, globalNames: Set): void; +/** + * We can do a fast up-front check to see if there are any declarations to global names. If not, + * then there's no point in computing scope assignments. + */ +export declare function hasShadowedGlobals(tokens: TokenProcessor, globalNames: Set): boolean; diff --git a/node_modules/sucrase/dist/types/index.d.ts b/node_modules/sucrase/dist/types/index.d.ts new file mode 100644 index 0000000..13af5a6 --- /dev/null +++ b/node_modules/sucrase/dist/types/index.d.ts @@ -0,0 +1,27 @@ +import CJSImportProcessor from "./CJSImportProcessor"; +import { RawSourceMap } from "./computeSourceMap"; +import { HelperManager } from "./HelperManager"; +import NameManager from "./NameManager"; +import type { Scope } from "./parser/tokenizer/state"; +import TokenProcessor from "./TokenProcessor"; +export interface TransformResult { + code: string; + sourceMap?: RawSourceMap; +} +export interface SucraseContext { + tokenProcessor: TokenProcessor; + scopes: Array; + nameManager: NameManager; + importProcessor: CJSImportProcessor | null; + helperManager: HelperManager; +} +export declare type Options = import("./Options").Options; +export declare type SourceMapOptions = import("./Options").SourceMapOptions; +export declare type Transform = import("./Options").Transform; +export declare function getVersion(): string; +export declare function transform(code: string, options: Options): TransformResult; +/** + * Return a string representation of the sucrase tokens, mostly useful for + * diagnostic purposes. + */ +export declare function getFormattedTokens(code: string, options: Options): string; diff --git a/node_modules/sucrase/dist/types/parser/index.d.ts b/node_modules/sucrase/dist/types/parser/index.d.ts new file mode 100644 index 0000000..8d082c0 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/index.d.ts @@ -0,0 +1,8 @@ +import type { Token } from "./tokenizer/index"; +import type { Scope } from "./tokenizer/state"; +export declare class File { + tokens: Array; + scopes: Array; + constructor(tokens: Array, scopes: Array); +} +export declare function parse(input: string, isJSXEnabled: boolean, isTypeScriptEnabled: boolean, isFlowEnabled: boolean): File; diff --git a/node_modules/sucrase/dist/types/parser/plugins/flow.d.ts b/node_modules/sucrase/dist/types/parser/plugins/flow.d.ts new file mode 100644 index 0000000..df66a39 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/plugins/flow.d.ts @@ -0,0 +1,27 @@ +import { ContextualKeyword } from "../tokenizer/keywords"; +import { StopState } from "../traverser/expression"; +export declare function flowParseTypeParameterDeclaration(): void; +export declare function flowParseTypeAnnotation(): void; +export declare function flowParseVariance(): void; +export declare function flowParseFunctionBodyAndFinish(funcContextId: number): void; +export declare function flowParseSubscript(startTokenIndex: number, noCalls: boolean, stopState: StopState): void; +export declare function flowStartParseNewArguments(): void; +export declare function flowTryParseStatement(): boolean; +export declare function flowTryParseExportDefaultExpression(): boolean; +export declare function flowParseIdentifierStatement(contextualKeyword: ContextualKeyword): void; +export declare function flowShouldParseExportDeclaration(): boolean; +export declare function flowShouldDisallowExportDefaultSpecifier(): boolean; +export declare function flowParseExportDeclaration(): void; +export declare function flowShouldParseExportStar(): boolean; +export declare function flowParseExportStar(): void; +export declare function flowAfterParseClassSuper(hasSuper: boolean): void; +export declare function flowStartParseObjPropValue(): void; +export declare function flowParseAssignableListItemTypes(): void; +export declare function flowStartParseImportSpecifiers(): void; +export declare function flowParseImportSpecifier(): void; +export declare function flowStartParseFunctionParams(): void; +export declare function flowAfterParseVarHead(): void; +export declare function flowStartParseAsyncArrowFromCallExpression(): void; +export declare function flowParseMaybeAssign(noIn: boolean, isWithinParens: boolean): boolean; +export declare function flowParseArrow(): boolean; +export declare function flowParseSubscripts(startTokenIndex: number, noCalls?: boolean): void; diff --git a/node_modules/sucrase/dist/types/parser/plugins/jsx/index.d.ts b/node_modules/sucrase/dist/types/parser/plugins/jsx/index.d.ts new file mode 100644 index 0000000..2600d0f --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/plugins/jsx/index.d.ts @@ -0,0 +1,2 @@ +export declare function jsxParseElement(): void; +export declare function nextJSXTagToken(): void; diff --git a/node_modules/sucrase/dist/types/parser/plugins/jsx/xhtml.d.ts b/node_modules/sucrase/dist/types/parser/plugins/jsx/xhtml.d.ts new file mode 100644 index 0000000..1190776 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/plugins/jsx/xhtml.d.ts @@ -0,0 +1,2 @@ +declare const _default: Map; +export default _default; diff --git a/node_modules/sucrase/dist/types/parser/plugins/types.d.ts b/node_modules/sucrase/dist/types/parser/plugins/types.d.ts new file mode 100644 index 0000000..fce6440 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/plugins/types.d.ts @@ -0,0 +1,5 @@ +/** + * Common parser code for TypeScript and Flow. + */ +export declare function typedParseConditional(noIn: boolean): void; +export declare function typedParseParenItem(): void; diff --git a/node_modules/sucrase/dist/types/parser/plugins/typescript.d.ts b/node_modules/sucrase/dist/types/parser/plugins/typescript.d.ts new file mode 100644 index 0000000..b3bdb74 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/plugins/typescript.d.ts @@ -0,0 +1,49 @@ +import { ContextualKeyword } from "../tokenizer/keywords"; +import { StopState } from "../traverser/expression"; +export declare function tsParseModifiers(allowedModifiers: Array): void; +/** Parses a modifier matching one the given modifier names. */ +export declare function tsParseModifier(allowedModifiers: Array): ContextualKeyword | null; +export declare function tsTryParseTypeParameters(): void; +export declare function tsTryParseTypeAnnotation(): void; +export declare function tsParseTypeAnnotation(): void; +export declare function tsParseType(): void; +export declare function tsParseNonConditionalType(): void; +export declare function tsParseTypeAssertion(): void; +export declare function tsTryParseJSXTypeArgument(): void; +export declare function tsParseImportEqualsDeclaration(): void; +export declare function tsIsDeclarationStart(): boolean; +export declare function tsParseFunctionBodyAndFinish(functionStart: number, funcContextId: number): void; +export declare function tsParseSubscript(startTokenIndex: number, noCalls: boolean, stopState: StopState): void; +export declare function tsTryParseExport(): boolean; +/** + * Parse a TS import specifier, which may be prefixed with "type" and may be of + * the form `foo as bar`. + * + * The number of identifier-like tokens we see happens to be enough to uniquely + * identify the form, so simply count the number of identifiers rather than + * matching the words `type` or `as`. This is particularly important because + * `type` and `as` could each actually be plain identifiers rather than + * keywords. + */ +export declare function tsParseImportSpecifier(): void; +/** + * Just like named import specifiers, export specifiers can have from 1 to 4 + * tokens, inclusive, and the number of tokens determines the role of each token. + */ +export declare function tsParseExportSpecifier(): void; +export declare function tsTryParseExportDefaultExpression(): boolean; +export declare function tsTryParseStatementContent(): boolean; +export declare function tsTryParseClassMemberWithIsStatic(isStatic: boolean): boolean; +export declare function tsParseIdentifierStatement(contextualKeyword: ContextualKeyword): void; +export declare function tsParseExportDeclaration(): void; +export declare function tsAfterParseClassSuper(hasSuper: boolean): void; +export declare function tsStartParseObjPropValue(): void; +export declare function tsStartParseFunctionParams(): void; +export declare function tsAfterParseVarHead(): void; +export declare function tsStartParseAsyncArrowFromCallExpression(): void; +export declare function tsParseMaybeAssign(noIn: boolean, isWithinParens: boolean): boolean; +export declare function tsParseMaybeAssignWithJSX(noIn: boolean, isWithinParens: boolean): boolean; +export declare function tsParseMaybeAssignWithoutJSX(noIn: boolean, isWithinParens: boolean): boolean; +export declare function tsParseArrow(): boolean; +export declare function tsParseAssignableListItemTypes(): void; +export declare function tsParseMaybeDecoratorArguments(): void; diff --git a/node_modules/sucrase/dist/types/parser/tokenizer/index.d.ts b/node_modules/sucrase/dist/types/parser/tokenizer/index.d.ts new file mode 100644 index 0000000..de19727 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/tokenizer/index.d.ts @@ -0,0 +1,90 @@ +import { ContextualKeyword } from "./keywords"; +import { TokenType } from "./types"; +export declare enum IdentifierRole { + Access = 0, + ExportAccess = 1, + TopLevelDeclaration = 2, + FunctionScopedDeclaration = 3, + BlockScopedDeclaration = 4, + ObjectShorthandTopLevelDeclaration = 5, + ObjectShorthandFunctionScopedDeclaration = 6, + ObjectShorthandBlockScopedDeclaration = 7, + ObjectShorthand = 8, + ImportDeclaration = 9, + ObjectKey = 10, + ImportAccess = 11 +} +/** + * Extra information on jsxTagStart tokens, used to determine which of the three + * jsx functions are called in the automatic transform. + */ +export declare enum JSXRole { + NoChildren = 0, + OneChild = 1, + StaticChildren = 2, + KeyAfterPropSpread = 3 +} +export declare function isDeclaration(token: Token): boolean; +export declare function isNonTopLevelDeclaration(token: Token): boolean; +export declare function isTopLevelDeclaration(token: Token): boolean; +export declare function isBlockScopedDeclaration(token: Token): boolean; +export declare function isFunctionScopedDeclaration(token: Token): boolean; +export declare function isObjectShorthandDeclaration(token: Token): boolean; +export declare class Token { + constructor(); + type: TokenType; + contextualKeyword: ContextualKeyword; + start: number; + end: number; + scopeDepth: number; + isType: boolean; + identifierRole: IdentifierRole | null; + jsxRole: JSXRole | null; + shadowsGlobal: boolean; + isAsyncOperation: boolean; + contextId: number | null; + rhsEndIndex: number | null; + isExpression: boolean; + numNullishCoalesceStarts: number; + numNullishCoalesceEnds: number; + isOptionalChainStart: boolean; + isOptionalChainEnd: boolean; + subscriptStartIndex: number | null; + nullishStartIndex: number | null; +} +export declare function next(): void; +export declare function nextTemplateToken(): void; +export declare function retokenizeSlashAsRegex(): void; +export declare function pushTypeContext(existingTokensInType: number): boolean; +export declare function popTypeContext(oldIsType: boolean): void; +export declare function eat(type: TokenType): boolean; +export declare function eatTypeToken(tokenType: TokenType): void; +export declare function match(type: TokenType): boolean; +export declare function lookaheadType(): TokenType; +export declare class TypeAndKeyword { + type: TokenType; + contextualKeyword: ContextualKeyword; + constructor(type: TokenType, contextualKeyword: ContextualKeyword); +} +export declare function lookaheadTypeAndKeyword(): TypeAndKeyword; +export declare function nextTokenStart(): number; +export declare function nextTokenStartSince(pos: number): number; +export declare function lookaheadCharCode(): number; +export declare function nextToken(): void; +export declare function skipLineComment(startSkip: number): void; +export declare function skipSpace(): void; +export declare function finishToken(type: TokenType, contextualKeyword?: ContextualKeyword): void; +/** + * Called after `as` expressions in TS; we're switching from a type to a + * non-type context, so a > token may actually be >= . This is needed because >= + * must be tokenized as a > in a type context because of code like + * `const x: Array=[];`, but `a as T >= 1` is a code example where it must be + * treated as >=. + * + * Notably, this only applies to >, not <. In a code snippet like `a as T <= 1`, + * we must NOT tokenize as <, or else the type parser will start parsing a type + * argument and fail. + */ +export declare function rescan_gt(): void; +export declare function getTokenFromCode(code: number): void; +export declare function skipWord(): void; diff --git a/node_modules/sucrase/dist/types/parser/tokenizer/keywords.d.ts b/node_modules/sucrase/dist/types/parser/tokenizer/keywords.d.ts new file mode 100644 index 0000000..e0e7cd0 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/tokenizer/keywords.d.ts @@ -0,0 +1,43 @@ +export declare enum ContextualKeyword { + NONE = 0, + _abstract = 1, + _accessor = 2, + _as = 3, + _assert = 4, + _asserts = 5, + _async = 6, + _await = 7, + _checks = 8, + _constructor = 9, + _declare = 10, + _enum = 11, + _exports = 12, + _from = 13, + _get = 14, + _global = 15, + _implements = 16, + _infer = 17, + _interface = 18, + _is = 19, + _keyof = 20, + _mixins = 21, + _module = 22, + _namespace = 23, + _of = 24, + _opaque = 25, + _out = 26, + _override = 27, + _private = 28, + _protected = 29, + _proto = 30, + _public = 31, + _readonly = 32, + _require = 33, + _satisfies = 34, + _set = 35, + _static = 36, + _symbol = 37, + _type = 38, + _unique = 39, + _using = 40 +} diff --git a/node_modules/sucrase/dist/types/parser/tokenizer/readWord.d.ts b/node_modules/sucrase/dist/types/parser/tokenizer/readWord.d.ts new file mode 100644 index 0000000..6fdfe90 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/tokenizer/readWord.d.ts @@ -0,0 +1,7 @@ +/** + * Read an identifier, producing either a name token or matching on one of the existing keywords. + * For performance, we pre-generate big decision tree that we traverse. Each node represents a + * prefix and has 27 values, where the first value is the token or contextual token, if any (-1 if + * not), and the other 26 values are the transitions to other nodes, or -1 to stop. + */ +export default function readWord(): void; diff --git a/node_modules/sucrase/dist/types/parser/tokenizer/readWordTree.d.ts b/node_modules/sucrase/dist/types/parser/tokenizer/readWordTree.d.ts new file mode 100644 index 0000000..f6fb9b2 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/tokenizer/readWordTree.d.ts @@ -0,0 +1 @@ +export declare const READ_WORD_TREE: Int32Array; diff --git a/node_modules/sucrase/dist/types/parser/tokenizer/state.d.ts b/node_modules/sucrase/dist/types/parser/tokenizer/state.d.ts new file mode 100644 index 0000000..9b0f07f --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/tokenizer/state.d.ts @@ -0,0 +1,50 @@ +import type { Token } from "./index"; +import { ContextualKeyword } from "./keywords"; +import { TokenType } from "./types"; +export declare class Scope { + startTokenIndex: number; + endTokenIndex: number; + isFunctionScope: boolean; + constructor(startTokenIndex: number, endTokenIndex: number, isFunctionScope: boolean); +} +export declare class StateSnapshot { + readonly potentialArrowAt: number; + readonly noAnonFunctionType: boolean; + readonly inDisallowConditionalTypesContext: boolean; + readonly tokensLength: number; + readonly scopesLength: number; + readonly pos: number; + readonly type: TokenType; + readonly contextualKeyword: ContextualKeyword; + readonly start: number; + readonly end: number; + readonly isType: boolean; + readonly scopeDepth: number; + readonly error: Error | null; + constructor(potentialArrowAt: number, noAnonFunctionType: boolean, inDisallowConditionalTypesContext: boolean, tokensLength: number, scopesLength: number, pos: number, type: TokenType, contextualKeyword: ContextualKeyword, start: number, end: number, isType: boolean, scopeDepth: number, error: Error | null); +} +export default class State { + potentialArrowAt: number; + noAnonFunctionType: boolean; + inDisallowConditionalTypesContext: boolean; + tokens: Array; + scopes: Array; + pos: number; + type: TokenType; + contextualKeyword: ContextualKeyword; + start: number; + end: number; + isType: boolean; + scopeDepth: number; + /** + * If the parser is in an error state, then the token is always tt.eof and all functions can + * keep executing but should be written so they don't get into an infinite loop in this situation. + * + * This approach, combined with the ability to snapshot and restore state, allows us to implement + * backtracking without exceptions and without needing to explicitly propagate error states + * everywhere. + */ + error: Error | null; + snapshot(): StateSnapshot; + restoreFromSnapshot(snapshot: StateSnapshot): void; +} diff --git a/node_modules/sucrase/dist/types/parser/tokenizer/types.d.ts b/node_modules/sucrase/dist/types/parser/tokenizer/types.d.ts new file mode 100644 index 0000000..d778d5d --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/tokenizer/types.d.ts @@ -0,0 +1,126 @@ +/** + * Enum of all token types, with bit fields to signify meaningful properties. + */ +export declare enum TokenType { + PRECEDENCE_MASK = 15, + IS_KEYWORD = 16, + IS_ASSIGN = 32, + IS_RIGHT_ASSOCIATIVE = 64, + IS_PREFIX = 128, + IS_POSTFIX = 256, + IS_EXPRESSION_START = 512, + num = 512, + bigint = 1536, + decimal = 2560, + regexp = 3584, + string = 4608, + name = 5632, + eof = 6144, + bracketL = 7680, + bracketR = 8192, + braceL = 9728, + braceBarL = 10752, + braceR = 11264, + braceBarR = 12288, + parenL = 13824, + parenR = 14336, + comma = 15360, + semi = 16384, + colon = 17408, + doubleColon = 18432, + dot = 19456, + question = 20480, + questionDot = 21504, + arrow = 22528, + template = 23552, + ellipsis = 24576, + backQuote = 25600, + dollarBraceL = 27136, + at = 27648, + hash = 29184, + eq = 29728, + assign = 30752, + preIncDec = 32640, + postIncDec = 33664, + bang = 34432, + tilde = 35456, + pipeline = 35841, + nullishCoalescing = 36866, + logicalOR = 37890, + logicalAND = 38915, + bitwiseOR = 39940, + bitwiseXOR = 40965, + bitwiseAND = 41990, + equality = 43015, + lessThan = 44040, + greaterThan = 45064, + relationalOrEqual = 46088, + bitShiftL = 47113, + bitShiftR = 48137, + plus = 49802, + minus = 50826, + modulo = 51723, + star = 52235, + slash = 53259, + exponent = 54348, + jsxName = 55296, + jsxText = 56320, + jsxEmptyText = 57344, + jsxTagStart = 58880, + jsxTagEnd = 59392, + typeParameterStart = 60928, + nonNullAssertion = 61440, + _break = 62480, + _case = 63504, + _catch = 64528, + _continue = 65552, + _debugger = 66576, + _default = 67600, + _do = 68624, + _else = 69648, + _finally = 70672, + _for = 71696, + _function = 73232, + _if = 73744, + _return = 74768, + _switch = 75792, + _throw = 77456, + _try = 77840, + _var = 78864, + _let = 79888, + _const = 80912, + _while = 81936, + _with = 82960, + _new = 84496, + _this = 85520, + _super = 86544, + _class = 87568, + _extends = 88080, + _export = 89104, + _import = 90640, + _yield = 91664, + _null = 92688, + _true = 93712, + _false = 94736, + _in = 95256, + _instanceof = 96280, + _typeof = 97936, + _void = 98960, + _delete = 99984, + _async = 100880, + _get = 101904, + _set = 102928, + _declare = 103952, + _readonly = 104976, + _abstract = 106000, + _static = 107024, + _public = 107536, + _private = 108560, + _protected = 109584, + _override = 110608, + _as = 112144, + _enum = 113168, + _type = 114192, + _implements = 115216 +} +export declare function formatTokenType(tokenType: TokenType): string; diff --git a/node_modules/sucrase/dist/types/parser/traverser/base.d.ts b/node_modules/sucrase/dist/types/parser/traverser/base.d.ts new file mode 100644 index 0000000..e15e892 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/traverser/base.d.ts @@ -0,0 +1,16 @@ +import State from "../tokenizer/state"; +export declare let isJSXEnabled: boolean; +export declare let isTypeScriptEnabled: boolean; +export declare let isFlowEnabled: boolean; +export declare let state: State; +export declare let input: string; +export declare let nextContextId: number; +export declare function getNextContextId(): number; +export declare function augmentError(error: any): any; +export declare class Loc { + line: number; + column: number; + constructor(line: number, column: number); +} +export declare function locationForIndex(pos: number): Loc; +export declare function initParser(inputCode: string, isJSXEnabledArg: boolean, isTypeScriptEnabledArg: boolean, isFlowEnabledArg: boolean): void; diff --git a/node_modules/sucrase/dist/types/parser/traverser/expression.d.ts b/node_modules/sucrase/dist/types/parser/traverser/expression.d.ts new file mode 100644 index 0000000..52e29fb --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/traverser/expression.d.ts @@ -0,0 +1,34 @@ +export declare class StopState { + stop: boolean; + constructor(stop: boolean); +} +export declare function parseExpression(noIn?: boolean): void; +/** + * noIn is used when parsing a for loop so that we don't interpret a following "in" as the binary + * operatior. + * isWithinParens is used to indicate that we're parsing something that might be a comma expression + * or might be an arrow function or might be a Flow type assertion (which requires explicit parens). + * In these cases, we should allow : and ?: after the initial "left" part. + */ +export declare function parseMaybeAssign(noIn?: boolean, isWithinParens?: boolean): boolean; +export declare function baseParseMaybeAssign(noIn: boolean, isWithinParens: boolean): boolean; +export declare function baseParseConditional(noIn: boolean): void; +export declare function parseMaybeUnary(): boolean; +export declare function parseExprSubscripts(): boolean; +export declare function baseParseSubscripts(startTokenIndex: number, noCalls?: boolean): void; +/** Set 'state.stop = true' to indicate that we should stop parsing subscripts. */ +export declare function baseParseSubscript(startTokenIndex: number, noCalls: boolean, stopState: StopState): void; +export declare function atPossibleAsync(): boolean; +export declare function parseCallExpressionArguments(): void; +export declare function parseExprAtom(): boolean; +export declare function parseLiteral(): void; +export declare function parseParenExpression(): void; +export declare function parseArrow(): boolean; +export declare function parseTemplate(): void; +export declare function parseObj(isPattern: boolean, isBlockScope: boolean): void; +export declare function parsePropertyName(objectContextId: number): void; +export declare function parseMethod(functionStart: number, isConstructor: boolean): void; +export declare function parseArrowExpression(startTokenIndex: number): void; +export declare function parseFunctionBodyAndFinish(functionStart: number, funcContextId?: number): void; +export declare function parseFunctionBody(allowExpression: boolean, funcContextId?: number): void; +export declare function parseIdentifier(): void; diff --git a/node_modules/sucrase/dist/types/parser/traverser/index.d.ts b/node_modules/sucrase/dist/types/parser/traverser/index.d.ts new file mode 100644 index 0000000..9f9127d --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/traverser/index.d.ts @@ -0,0 +1,2 @@ +import type { File } from "../index"; +export declare function parseFile(): File; diff --git a/node_modules/sucrase/dist/types/parser/traverser/lval.d.ts b/node_modules/sucrase/dist/types/parser/traverser/lval.d.ts new file mode 100644 index 0000000..eb17756 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/traverser/lval.d.ts @@ -0,0 +1,9 @@ +import { TokenType } from "../tokenizer/types"; +export declare function parseSpread(): void; +export declare function parseRest(isBlockScope: boolean): void; +export declare function parseBindingIdentifier(isBlockScope: boolean): void; +export declare function parseImportedIdentifier(): void; +export declare function markPriorBindingIdentifier(isBlockScope: boolean): void; +export declare function parseBindingAtom(isBlockScope: boolean): void; +export declare function parseBindingList(close: TokenType, isBlockScope: boolean, allowEmpty?: boolean, allowModifiers?: boolean, contextId?: number): void; +export declare function parseMaybeDefault(isBlockScope: boolean, leftAlreadyParsed?: boolean): void; diff --git a/node_modules/sucrase/dist/types/parser/traverser/statement.d.ts b/node_modules/sucrase/dist/types/parser/traverser/statement.d.ts new file mode 100644 index 0000000..801efcc --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/traverser/statement.d.ts @@ -0,0 +1,20 @@ +import { File } from "../index"; +import { TokenType } from "../tokenizer/types"; +export declare function parseTopLevel(): File; +export declare function parseStatement(declaration: boolean): void; +export declare function parseDecorators(): void; +export declare function baseParseMaybeDecoratorArguments(): void; +export declare function parseVarStatement(isBlockScope: boolean): void; +export declare function parseBlock(isFunctionScope?: boolean, contextId?: number): void; +export declare function parseBlockBody(end: TokenType): void; +export declare function parseFunction(functionStart: number, isStatement: boolean, optionalId?: boolean): void; +export declare function parseFunctionParams(allowModifiers?: boolean, funcContextId?: number): void; +export declare function parseClass(isStatement: boolean, optionalId?: boolean): void; +export declare function parseClassPropertyName(classContextId: number): void; +export declare function parsePostMemberNameModifiers(): void; +export declare function parseClassProperty(): void; +export declare function parseExport(): void; +export declare function parseExportFrom(): void; +export declare function baseParseExportStar(): void; +export declare function parseExportSpecifiers(): void; +export declare function parseImport(): void; diff --git a/node_modules/sucrase/dist/types/parser/traverser/util.d.ts b/node_modules/sucrase/dist/types/parser/traverser/util.d.ts new file mode 100644 index 0000000..6cbaf7d --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/traverser/util.d.ts @@ -0,0 +1,17 @@ +import type { ContextualKeyword } from "../tokenizer/keywords"; +import { TokenType } from "../tokenizer/types"; +export declare function isContextual(contextualKeyword: ContextualKeyword): boolean; +export declare function isLookaheadContextual(contextualKeyword: ContextualKeyword): boolean; +export declare function eatContextual(contextualKeyword: ContextualKeyword): boolean; +export declare function expectContextual(contextualKeyword: ContextualKeyword): void; +export declare function canInsertSemicolon(): boolean; +export declare function hasPrecedingLineBreak(): boolean; +export declare function hasFollowingLineBreak(): boolean; +export declare function isLineTerminator(): boolean; +export declare function semicolon(): void; +export declare function expect(type: TokenType): void; +/** + * Transition the parser to an error state. All code needs to be written to naturally unwind in this + * state, which allows us to backtrack without exceptions and without error plumbing everywhere. + */ +export declare function unexpected(message?: string, pos?: number): void; diff --git a/node_modules/sucrase/dist/types/parser/util/charcodes.d.ts b/node_modules/sucrase/dist/types/parser/util/charcodes.d.ts new file mode 100644 index 0000000..19bba1f --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/util/charcodes.d.ts @@ -0,0 +1,107 @@ +export declare enum charCodes { + backSpace = 8, + lineFeed = 10, + tab = 9, + carriageReturn = 13, + shiftOut = 14, + space = 32, + exclamationMark = 33, + quotationMark = 34, + numberSign = 35, + dollarSign = 36, + percentSign = 37, + ampersand = 38, + apostrophe = 39, + leftParenthesis = 40, + rightParenthesis = 41, + asterisk = 42, + plusSign = 43, + comma = 44, + dash = 45, + dot = 46, + slash = 47, + digit0 = 48, + digit1 = 49, + digit2 = 50, + digit3 = 51, + digit4 = 52, + digit5 = 53, + digit6 = 54, + digit7 = 55, + digit8 = 56, + digit9 = 57, + colon = 58, + semicolon = 59, + lessThan = 60, + equalsTo = 61, + greaterThan = 62, + questionMark = 63, + atSign = 64, + uppercaseA = 65, + uppercaseB = 66, + uppercaseC = 67, + uppercaseD = 68, + uppercaseE = 69, + uppercaseF = 70, + uppercaseG = 71, + uppercaseH = 72, + uppercaseI = 73, + uppercaseJ = 74, + uppercaseK = 75, + uppercaseL = 76, + uppercaseM = 77, + uppercaseN = 78, + uppercaseO = 79, + uppercaseP = 80, + uppercaseQ = 81, + uppercaseR = 82, + uppercaseS = 83, + uppercaseT = 84, + uppercaseU = 85, + uppercaseV = 86, + uppercaseW = 87, + uppercaseX = 88, + uppercaseY = 89, + uppercaseZ = 90, + leftSquareBracket = 91, + backslash = 92, + rightSquareBracket = 93, + caret = 94, + underscore = 95, + graveAccent = 96, + lowercaseA = 97, + lowercaseB = 98, + lowercaseC = 99, + lowercaseD = 100, + lowercaseE = 101, + lowercaseF = 102, + lowercaseG = 103, + lowercaseH = 104, + lowercaseI = 105, + lowercaseJ = 106, + lowercaseK = 107, + lowercaseL = 108, + lowercaseM = 109, + lowercaseN = 110, + lowercaseO = 111, + lowercaseP = 112, + lowercaseQ = 113, + lowercaseR = 114, + lowercaseS = 115, + lowercaseT = 116, + lowercaseU = 117, + lowercaseV = 118, + lowercaseW = 119, + lowercaseX = 120, + lowercaseY = 121, + lowercaseZ = 122, + leftCurlyBrace = 123, + verticalBar = 124, + rightCurlyBrace = 125, + tilde = 126, + nonBreakingSpace = 160, + oghamSpaceMark = 5760, + lineSeparator = 8232, + paragraphSeparator = 8233 +} +export declare function isDigit(code: number): boolean; diff --git a/node_modules/sucrase/dist/types/parser/util/identifier.d.ts b/node_modules/sucrase/dist/types/parser/util/identifier.d.ts new file mode 100644 index 0000000..5c2eaeb --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/util/identifier.d.ts @@ -0,0 +1,2 @@ +export declare const IS_IDENTIFIER_CHAR: Uint8Array; +export declare const IS_IDENTIFIER_START: Uint8Array; diff --git a/node_modules/sucrase/dist/types/parser/util/whitespace.d.ts b/node_modules/sucrase/dist/types/parser/util/whitespace.d.ts new file mode 100644 index 0000000..dca5425 --- /dev/null +++ b/node_modules/sucrase/dist/types/parser/util/whitespace.d.ts @@ -0,0 +1,3 @@ +export declare const WHITESPACE_CHARS: Array; +export declare const skipWhiteSpace: RegExp; +export declare const IS_WHITESPACE: Uint8Array; diff --git a/node_modules/sucrase/dist/types/register.d.ts b/node_modules/sucrase/dist/types/register.d.ts new file mode 100644 index 0000000..ce594ad --- /dev/null +++ b/node_modules/sucrase/dist/types/register.d.ts @@ -0,0 +1,14 @@ +import { Options } from "./index"; +export interface HookOptions { + matcher?: (code: string) => boolean; + ignoreNodeModules?: boolean; +} +export declare type RevertFunction = () => void; +export declare function addHook(extension: string, options: Options, hookOptions?: HookOptions): RevertFunction; +export declare function registerJS(hookOptions?: HookOptions): RevertFunction; +export declare function registerJSX(hookOptions?: HookOptions): RevertFunction; +export declare function registerTS(hookOptions?: HookOptions): RevertFunction; +export declare function registerTSX(hookOptions?: HookOptions): RevertFunction; +export declare function registerTSLegacyModuleInterop(hookOptions?: HookOptions): RevertFunction; +export declare function registerTSXLegacyModuleInterop(hookOptions?: HookOptions): RevertFunction; +export declare function registerAll(hookOptions?: HookOptions): RevertFunction; diff --git a/node_modules/sucrase/dist/types/transformers/CJSImportTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/CJSImportTransformer.d.ts new file mode 100644 index 0000000..32b4233 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/CJSImportTransformer.d.ts @@ -0,0 +1,136 @@ +import type CJSImportProcessor from "../CJSImportProcessor"; +import type { HelperManager } from "../HelperManager"; +import type NameManager from "../NameManager"; +import type TokenProcessor from "../TokenProcessor"; +import type ReactHotLoaderTransformer from "./ReactHotLoaderTransformer"; +import type RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +/** + * Class for editing import statements when we are transforming to commonjs. + */ +export default class CJSImportTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly importProcessor: CJSImportProcessor; + readonly nameManager: NameManager; + readonly helperManager: HelperManager; + readonly reactHotLoaderTransformer: ReactHotLoaderTransformer | null; + readonly enableLegacyBabel5ModuleInterop: boolean; + readonly enableLegacyTypeScriptModuleInterop: boolean; + readonly isTypeScriptTransformEnabled: boolean; + readonly preserveDynamicImport: boolean; + private hadExport; + private hadNamedExport; + private hadDefaultExport; + private declarationInfo; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, importProcessor: CJSImportProcessor, nameManager: NameManager, helperManager: HelperManager, reactHotLoaderTransformer: ReactHotLoaderTransformer | null, enableLegacyBabel5ModuleInterop: boolean, enableLegacyTypeScriptModuleInterop: boolean, isTypeScriptTransformEnabled: boolean, preserveDynamicImport: boolean); + getPrefixCode(): string; + getSuffixCode(): string; + process(): boolean; + private processImportEquals; + /** + * Transform this: + * import foo, {bar} from 'baz'; + * into + * var _baz = require('baz'); var _baz2 = _interopRequireDefault(_baz); + * + * The import code was already generated in the import preprocessing step, so + * we just need to look it up. + */ + private processImport; + /** + * Erase this import, and return true if it was either of the form "import type" or contained only + * "type" named imports. Such imports should not even do a side-effect import. + * + * The position should end at the import string. + */ + private removeImportAndDetectIfType; + private removeRemainingImport; + private processIdentifier; + processObjectShorthand(): boolean; + processExport(): boolean; + private processAssignment; + /** + * Process something like `a += 3`, where `a` might be an exported value. + */ + private processComplexAssignment; + /** + * Process something like `++a`, where `a` might be an exported value. + */ + private processPreIncDec; + /** + * Process something like `a++`, where `a` might be an exported value. + * This starts at the `a`, not at the `++`. + */ + private processPostIncDec; + private processExportDefault; + private copyDecorators; + /** + * Transform a declaration like `export var`, `export let`, or `export const`. + */ + private processExportVar; + /** + * Determine if the export is of the form: + * export var/let/const [varName] = [expr]; + * In other words, determine if function name inference might apply. + */ + private isSimpleExportVar; + /** + * Transform an `export var` declaration initializing a single variable. + * + * For example, this: + * export const f = () => {}; + * becomes this: + * const f = () => {}; exports.f = f; + * + * The variable is unused (e.g. exports.f has the true value of the export). + * We need to produce an assignment of this form so that the function will + * have an inferred name of "f", which wouldn't happen in the more general + * case below. + */ + private processSimpleExportVar; + /** + * Transform normal declaration exports, including handling destructuring. + * For example, this: + * export const {x: [a = 2, b], c} = d; + * becomes this: + * ({x: [exports.a = 2, exports.b], c: exports.c} = d;) + */ + private processComplexExportVar; + /** + * Transform this: + * export function foo() {} + * into this: + * function foo() {} exports.foo = foo; + */ + private processExportFunction; + /** + * Skip past a function with a name and return that name. + */ + private processNamedFunction; + /** + * Transform this: + * export class A {} + * into this: + * class A {} exports.A = A; + */ + private processExportClass; + /** + * Transform this: + * export {a, b as c}; + * into this: + * exports.a = a; exports.c = b; + * + * OR + * + * Transform this: + * export {a, b as c} from './foo'; + * into the pre-generated Object.defineProperty code from the ImportProcessor. + * + * For the first case, if the TypeScript transform is enabled, we need to skip + * exports that are only defined as types. + */ + private processExportBindings; + private processExportStar; + private shouldElideExportedIdentifier; +} diff --git a/node_modules/sucrase/dist/types/transformers/ESMImportTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/ESMImportTransformer.d.ts new file mode 100644 index 0000000..5da3d98 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/ESMImportTransformer.d.ts @@ -0,0 +1,44 @@ +import type { HelperManager } from "../HelperManager"; +import type { Options } from "../index"; +import type NameManager from "../NameManager"; +import type TokenProcessor from "../TokenProcessor"; +import type ReactHotLoaderTransformer from "./ReactHotLoaderTransformer"; +import Transformer from "./Transformer"; +/** + * Class for editing import statements when we are keeping the code as ESM. We still need to remove + * type-only imports in TypeScript and Flow. + */ +export default class ESMImportTransformer extends Transformer { + readonly tokens: TokenProcessor; + readonly nameManager: NameManager; + readonly helperManager: HelperManager; + readonly reactHotLoaderTransformer: ReactHotLoaderTransformer | null; + readonly isTypeScriptTransformEnabled: boolean; + private nonTypeIdentifiers; + private declarationInfo; + private injectCreateRequireForImportRequire; + constructor(tokens: TokenProcessor, nameManager: NameManager, helperManager: HelperManager, reactHotLoaderTransformer: ReactHotLoaderTransformer | null, isTypeScriptTransformEnabled: boolean, options: Options); + process(): boolean; + private processImportEquals; + private processImport; + /** + * Remove type bindings from this import, leaving the rest of the import intact. + * + * Return true if this import was ONLY types, and thus is eligible for removal. This will bail out + * of the replacement operation, so we can return early here. + */ + private removeImportTypeBindings; + private isTypeName; + private processExportDefault; + /** + * In TypeScript, we need to remove named exports that were never declared or only declared as a + * type. + */ + private processNamedExports; + /** + * ESM elides all imports with the rule that we only elide if we see that it's + * a type and never see it as a value. This is in contrast to CJS, which + * elides imports that are completely unknown. + */ + private shouldElideExportedName; +} diff --git a/node_modules/sucrase/dist/types/transformers/FlowTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/FlowTransformer.d.ts new file mode 100644 index 0000000..8a85ac4 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/FlowTransformer.d.ts @@ -0,0 +1,79 @@ +import type TokenProcessor from "../TokenProcessor"; +import type RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +export default class FlowTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly isImportsTransformEnabled: boolean; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, isImportsTransformEnabled: boolean); + process(): boolean; + /** + * Handle a declaration like: + * export enum E ... + * + * With this imports transform, this becomes: + * const E = [[enum]]; exports.E = E; + * + * otherwise, it becomes: + * export const E = [[enum]]; + */ + processNamedExportEnum(): void; + /** + * Handle a declaration like: + * export default enum E + * + * With the imports transform, this becomes: + * const E = [[enum]]; exports.default = E; + * + * otherwise, it becomes: + * const E = [[enum]]; export default E; + */ + processDefaultExportEnum(): void; + /** + * Transpile flow enums to invoke the "flow-enums-runtime" library. + * + * Currently, the transpiled code always uses `require("flow-enums-runtime")`, + * but if future flexibility is needed, we could expose a config option for + * this string (similar to configurable JSX). Even when targeting ESM, the + * default behavior of babel-plugin-transform-flow-enums is to use require + * rather than injecting an import. + * + * Flow enums are quite a bit simpler than TS enums and have some convenient + * constraints: + * - Element initializers must be either always present or always absent. That + * means that we can use fixed lookahead on the first element (if any) and + * assume that all elements are like that. + * - The right-hand side of an element initializer must be a literal value, + * not a complex expression and not referencing other elements. That means + * we can simply copy a single token. + * + * Enums can be broken up into three basic cases: + * + * Mirrored enums: + * enum E {A, B} + * -> + * const E = require("flow-enums-runtime").Mirrored(["A", "B"]); + * + * Initializer enums: + * enum E {A = 1, B = 2} + * -> + * const E = require("flow-enums-runtime")({A: 1, B: 2}); + * + * Symbol enums: + * enum E of symbol {A, B} + * -> + * const E = require("flow-enums-runtime")({A: Symbol("A"), B: Symbol("B")}); + * + * We can statically detect which of the three cases this is by looking at the + * "of" declaration (if any) and seeing if the first element has an initializer. + * Since the other transform details are so similar between the three cases, we + * use a single implementation and vary the transform within processEnumElement + * based on case. + */ + processEnum(): void; + /** + * Process an individual enum element, producing either an array element or an + * object element based on what type of enum this is. + */ + processEnumElement(isSymbolEnum: boolean, hasInitializers: boolean): void; +} diff --git a/node_modules/sucrase/dist/types/transformers/JSXTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/JSXTransformer.d.ts new file mode 100644 index 0000000..161e9e5 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/JSXTransformer.d.ts @@ -0,0 +1,144 @@ +import type CJSImportProcessor from "../CJSImportProcessor"; +import type { Options } from "../index"; +import type NameManager from "../NameManager"; +import { JSXRole } from "../parser/tokenizer"; +import type TokenProcessor from "../TokenProcessor"; +import { JSXPragmaInfo } from "../util/getJSXPragmaInfo"; +import type RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +export default class JSXTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly importProcessor: CJSImportProcessor | null; + readonly nameManager: NameManager; + readonly options: Options; + jsxPragmaInfo: JSXPragmaInfo; + jsxImportSource: string; + isAutomaticRuntime: boolean; + lastLineNumber: number; + lastIndex: number; + filenameVarName: string | null; + esmAutomaticImportNameResolutions: { + [name: string]: string; + }; + cjsAutomaticModuleNameResolutions: { + [path: string]: string; + }; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, importProcessor: CJSImportProcessor | null, nameManager: NameManager, options: Options); + process(): boolean; + getPrefixCode(): string; + processJSXTag(): void; + getElementLocationCode(firstTokenStart: number): string; + /** + * Get the line number for this source position. This is calculated lazily and + * must be called in increasing order by index. + */ + getLineNumberForIndex(index: number): number; + /** + * Convert the current JSX element to a call to jsx, jsxs, or jsxDEV. This is + * the primary transformation for the automatic transform. + * + * Example: + *
Hello{x}
+ * becomes + * jsxs('div', {a: 1, children: ["Hello", x]}, 2) + */ + transformTagToJSXFunc(elementLocationCode: string | null, jsxRole: JSXRole): void; + /** + * Convert the current JSX element to a createElement call. In the classic + * runtime, this is the only case. In the automatic runtime, this is called + * as a fallback in some situations. + * + * Example: + *
Hello{x}
+ * becomes + * React.createElement('div', {a: 1, key: 2}, "Hello", x) + */ + transformTagToCreateElement(elementLocationCode: string | null): void; + /** + * Get the code for the relevant function for this context: jsx, jsxs, + * or jsxDEV. The following open-paren is included as well. + * + * These functions are only used for the automatic runtime, so they are always + * auto-imported, but the auto-import will be either CJS or ESM based on the + * target module format. + */ + getJSXFuncInvocationCode(isStatic: boolean): string; + /** + * Return the code to use for the createElement function, e.g. + * `React.createElement`, including the following open-paren. + * + * This is the main function to use for the classic runtime. For the + * automatic runtime, this function is used as a fallback function to + * preserve behavior when there is a prop spread followed by an explicit + * key. In that automatic runtime case, the function should be automatically + * imported. + */ + getCreateElementInvocationCode(): string; + /** + * Return the code to use as the component when compiling a shorthand + * fragment, e.g. `React.Fragment`. + * + * This may be called from either the classic or automatic runtime, and + * the value should be auto-imported for the automatic runtime. + */ + getFragmentCode(): string; + /** + * Return code that invokes the given function. + * + * When the imports transform is enabled, use the CJSImportTransformer + * strategy of using `.call(void 0, ...` to avoid passing a `this` value in a + * situation that would otherwise look like a method call. + */ + claimAutoImportedFuncInvocation(funcName: string, importPathSuffix: string): string; + claimAutoImportedName(funcName: string, importPathSuffix: string): string; + /** + * Process the first part of a tag, before any props. + */ + processTagIntro(): void; + /** + * Starting at the beginning of the props, add the props argument to + * React.createElement, including the comma before it. + */ + processPropsObjectWithDevInfo(elementLocationCode: string | null): void; + /** + * Transform the core part of the props, assuming that a { has already been + * inserted before us and that a } will be inserted after us. + * + * If extractKeyCode is true (i.e. when using any jsx... function), any prop + * named "key" has its code captured and returned rather than being emitted to + * the output code. This shifts line numbers, and emitting the code later will + * correct line numbers again. If no key is found or if extractKeyCode is + * false, this function returns null. + */ + processProps(extractKeyCode: boolean): string | null; + processPropName(propName: string): void; + processPropValue(): void; + processStringPropValue(): void; + /** + * Starting in the middle of the props object literal, produce an additional + * prop for the children and close the object literal. + */ + processAutomaticChildrenAndEndProps(jsxRole: JSXRole): void; + /** + * Transform children into a comma-separated list, which will be either + * arguments to createElement or array elements of a children prop. + */ + processChildren(needsInitialComma: boolean): void; + /** + * Turn a JSX text element into a string literal, or nothing at all if the JSX + * text resolves to the empty string. + * + * Returns true if a string literal is emitted, false otherwise. + */ + processChildTextElement(needsComma: boolean): boolean; + getDevSource(elementLocationCode: string): string; + getFilenameVarName(): string; +} +/** + * Spec for identifiers: https://tc39.github.io/ecma262/#prod-IdentifierStart. + * + * Really only treat anything starting with a-z as tag names. `_`, `$`, `é` + * should be treated as component names + */ +export declare function startsWithLowerCase(s: string): boolean; diff --git a/node_modules/sucrase/dist/types/transformers/JestHoistTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/JestHoistTransformer.d.ts new file mode 100644 index 0000000..6a8b9df --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/JestHoistTransformer.d.ts @@ -0,0 +1,32 @@ +import type CJSImportProcessor from "../CJSImportProcessor"; +import type NameManager from "../NameManager"; +import type TokenProcessor from "../TokenProcessor"; +import type RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +/** + * Implementation of babel-plugin-jest-hoist, which hoists up some jest method + * calls above the imports to allow them to override other imports. + * + * To preserve line numbers, rather than directly moving the jest.mock code, we + * wrap each invocation in a function statement and then call the function from + * the top of the file. + */ +export default class JestHoistTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly nameManager: NameManager; + readonly importProcessor: CJSImportProcessor | null; + private readonly hoistedFunctionNames; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, nameManager: NameManager, importProcessor: CJSImportProcessor | null); + process(): boolean; + getHoistedCode(): string; + /** + * Extracts any methods calls on the jest-object that should be hoisted. + * + * According to the jest docs, https://jestjs.io/docs/en/jest-object#jestmockmodulename-factory-options, + * mock, unmock, enableAutomock, disableAutomock, are the methods that should be hoisted. + * + * We do not apply the same checks of the arguments as babel-plugin-jest-hoist does. + */ + private extractHoistedCalls; +} diff --git a/node_modules/sucrase/dist/types/transformers/NumericSeparatorTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/NumericSeparatorTransformer.d.ts new file mode 100644 index 0000000..69a1cf5 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/NumericSeparatorTransformer.d.ts @@ -0,0 +1,7 @@ +import type TokenProcessor from "../TokenProcessor"; +import Transformer from "./Transformer"; +export default class NumericSeparatorTransformer extends Transformer { + readonly tokens: TokenProcessor; + constructor(tokens: TokenProcessor); + process(): boolean; +} diff --git a/node_modules/sucrase/dist/types/transformers/OptionalCatchBindingTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/OptionalCatchBindingTransformer.d.ts new file mode 100644 index 0000000..d381e5a --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/OptionalCatchBindingTransformer.d.ts @@ -0,0 +1,9 @@ +import type NameManager from "../NameManager"; +import type TokenProcessor from "../TokenProcessor"; +import Transformer from "./Transformer"; +export default class OptionalCatchBindingTransformer extends Transformer { + readonly tokens: TokenProcessor; + readonly nameManager: NameManager; + constructor(tokens: TokenProcessor, nameManager: NameManager); + process(): boolean; +} diff --git a/node_modules/sucrase/dist/types/transformers/OptionalChainingNullishTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/OptionalChainingNullishTransformer.d.ts new file mode 100644 index 0000000..d468029 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/OptionalChainingNullishTransformer.d.ts @@ -0,0 +1,36 @@ +import type NameManager from "../NameManager"; +import type TokenProcessor from "../TokenProcessor"; +import Transformer from "./Transformer"; +/** + * Transformer supporting the optional chaining and nullish coalescing operators. + * + * Tech plan here: + * https://github.com/alangpierce/sucrase/wiki/Sucrase-Optional-Chaining-and-Nullish-Coalescing-Technical-Plan + * + * The prefix and suffix code snippets are handled by TokenProcessor, and this transformer handles + * the operators themselves. + */ +export default class OptionalChainingNullishTransformer extends Transformer { + readonly tokens: TokenProcessor; + readonly nameManager: NameManager; + constructor(tokens: TokenProcessor, nameManager: NameManager); + process(): boolean; + /** + * Determine if the current token is the last of its chain, so that we know whether it's eligible + * to have a delete op inserted. + * + * We can do this by walking forward until we determine one way or another. Each + * isOptionalChainStart token must be paired with exactly one isOptionalChainEnd token after it in + * a nesting way, so we can track depth and walk to the end of the chain (the point where the + * depth goes negative) and see if any other subscript token is after us in the chain. + */ + isLastSubscriptInChain(): boolean; + /** + * Determine if we are the open-paren in an expression like super.a()?.b. + * + * We can do this by walking backward to find the previous subscript. If that subscript was + * preceded by a super, then we must be the subscript after it, so if this is a call expression, + * we'll need to attach the right context. + */ + justSkippedSuper(): boolean; +} diff --git a/node_modules/sucrase/dist/types/transformers/ReactDisplayNameTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/ReactDisplayNameTransformer.d.ts new file mode 100644 index 0000000..56ed86e --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/ReactDisplayNameTransformer.d.ts @@ -0,0 +1,29 @@ +import type CJSImportProcessor from "../CJSImportProcessor"; +import type { Options } from "../index"; +import type TokenProcessor from "../TokenProcessor"; +import type RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +/** + * Implementation of babel-plugin-transform-react-display-name, which adds a + * display name to usages of React.createClass and createReactClass. + */ +export default class ReactDisplayNameTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly importProcessor: CJSImportProcessor | null; + readonly options: Options; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, importProcessor: CJSImportProcessor | null, options: Options); + process(): boolean; + /** + * This is called with the token position at the open-paren. + */ + private tryProcessCreateClassCall; + private findDisplayName; + private getDisplayNameFromFilename; + /** + * We only want to add a display name when this is a function call containing + * one argument, which is an object literal without `displayName` as an + * existing key. + */ + private classNeedsDisplayName; +} diff --git a/node_modules/sucrase/dist/types/transformers/ReactHotLoaderTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/ReactHotLoaderTransformer.d.ts new file mode 100644 index 0000000..0221216 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/ReactHotLoaderTransformer.d.ts @@ -0,0 +1,12 @@ +import type TokenProcessor from "../TokenProcessor"; +import Transformer from "./Transformer"; +export default class ReactHotLoaderTransformer extends Transformer { + readonly tokens: TokenProcessor; + readonly filePath: string; + private extractedDefaultExportName; + constructor(tokens: TokenProcessor, filePath: string); + setExtractedDefaultExportName(extractedDefaultExportName: string): void; + getPrefixCode(): string; + getSuffixCode(): string; + process(): boolean; +} diff --git a/node_modules/sucrase/dist/types/transformers/RootTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/RootTransformer.d.ts new file mode 100644 index 0000000..97d6cd8 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/RootTransformer.d.ts @@ -0,0 +1,52 @@ +import type { Options, SucraseContext, Transform } from "../index"; +import { ClassInfo } from "../util/getClassInfo"; +export interface RootTransformerResult { + code: string; + mappings: Array; +} +export default class RootTransformer { + private transformers; + private nameManager; + private tokens; + private generatedVariables; + private isImportsTransformEnabled; + private isReactHotLoaderTransformEnabled; + private disableESTransforms; + private helperManager; + constructor(sucraseContext: SucraseContext, transforms: Array, enableLegacyBabel5ModuleInterop: boolean, options: Options); + transform(): RootTransformerResult; + processBalancedCode(): void; + processToken(): void; + /** + * Skip past a class with a name and return that name. + */ + processNamedClass(): string; + processClass(): void; + /** + * We want to just handle class fields in all contexts, since TypeScript supports them. Later, + * when some JS implementations support class fields, this should be made optional. + */ + processClassBody(classInfo: ClassInfo, className: string | null): void; + makeConstructorInitCode(constructorInitializerStatements: Array, instanceInitializerNames: Array, className: string): string; + /** + * Normally it's ok to simply remove type tokens, but we need to be more careful when dealing with + * arrow function return types since they can confuse the parser. In that case, we want to move + * the close-paren to the same line as the arrow. + * + * See https://github.com/alangpierce/sucrase/issues/391 for more details. + */ + processPossibleArrowParamEnd(): boolean; + /** + * An async arrow function might be of the form: + * + * async < + * T + * >() => {} + * + * in which case, removing the type parameters will cause a syntax error. Detect this case and + * move the open-paren earlier. + */ + processPossibleAsyncArrowWithTypeParams(): boolean; + processPossibleTypeRange(): boolean; + shiftMappings(mappings: Array, prefixLength: number): Array; +} diff --git a/node_modules/sucrase/dist/types/transformers/Transformer.d.ts b/node_modules/sucrase/dist/types/transformers/Transformer.d.ts new file mode 100644 index 0000000..35c18b1 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/Transformer.d.ts @@ -0,0 +1,6 @@ +export default abstract class Transformer { + abstract process(): boolean; + getPrefixCode(): string; + getHoistedCode(): string; + getSuffixCode(): string; +} diff --git a/node_modules/sucrase/dist/types/transformers/TypeScriptTransformer.d.ts b/node_modules/sucrase/dist/types/transformers/TypeScriptTransformer.d.ts new file mode 100644 index 0000000..6e9c9d3 --- /dev/null +++ b/node_modules/sucrase/dist/types/transformers/TypeScriptTransformer.d.ts @@ -0,0 +1,104 @@ +import type { Token } from "../parser/tokenizer"; +import type TokenProcessor from "../TokenProcessor"; +import type RootTransformer from "./RootTransformer"; +import Transformer from "./Transformer"; +export default class TypeScriptTransformer extends Transformer { + readonly rootTransformer: RootTransformer; + readonly tokens: TokenProcessor; + readonly isImportsTransformEnabled: boolean; + constructor(rootTransformer: RootTransformer, tokens: TokenProcessor, isImportsTransformEnabled: boolean); + process(): boolean; + processEnum(isExport?: boolean): void; + /** + * Transform an enum into equivalent JS. This has complexity in a few places: + * - TS allows string enums, numeric enums, and a mix of the two styles within an enum. + * - Enum keys are allowed to be referenced in later enum values. + * - Enum keys are allowed to be strings. + * - When enum values are omitted, they should follow an auto-increment behavior. + */ + processEnumBody(enumName: string): void; + /** + * Detect name information about this enum key, which will be used to determine which code to emit + * and whether we should declare a variable as part of this declaration. + * + * Some cases to keep in mind: + * - Enum keys can be implicitly referenced later, e.g. `X = 1, Y = X`. In Sucrase, we implement + * this by declaring a variable `X` so that later expressions can use it. + * - In addition to the usual identifier key syntax, enum keys are allowed to be string literals, + * e.g. `"hello world" = 3,`. Template literal syntax is NOT allowed. + * - Even if the enum key is defined as a string literal, it may still be referenced by identifier + * later, e.g. `"X" = 1, Y = X`. That means that we need to detect whether or not a string + * literal is identifier-like and emit a variable if so, even if the declaration did not use an + * identifier. + * - Reserved keywords like `break` are valid enum keys, but are not valid to be referenced later + * and would be a syntax error if we emitted a variable, so we need to skip the variable + * declaration in those cases. + * + * The variableName return value captures these nuances: if non-null, we can and must emit a + * variable declaration, and if null, we can't and shouldn't. + */ + extractEnumKeyInfo(nameToken: Token): { + nameStringCode: string; + variableName: string | null; + }; + /** + * Handle an enum member where the RHS is just a string literal (not omitted, not a number, and + * not a complex expression). This is the typical form for TS string enums, and in this case, we + * do *not* create a reverse mapping. + * + * This is called after deleting the key token, when the token processor is at the equals sign. + * + * Example 1: + * someKey = "some value" + * -> + * const someKey = "some value"; MyEnum["someKey"] = someKey; + * + * Example 2: + * "some key" = "some value" + * -> + * MyEnum["some key"] = "some value"; + */ + processStringLiteralEnumMember(enumName: string, nameStringCode: string, variableName: string | null): void; + /** + * Handle an enum member initialized with an expression on the right-hand side (other than a + * string literal). In these cases, we should transform the expression and emit code that sets up + * a reverse mapping. + * + * The TypeScript implementation of this operation distinguishes between expressions that can be + * "constant folded" at compile time (i.e. consist of number literals and simple math operations + * on those numbers) and ones that are dynamic. For constant expressions, it emits the resolved + * numeric value, and auto-incrementing is only allowed in that case. Evaluating expressions at + * compile time would add significant complexity to Sucrase, so Sucrase instead leaves the + * expression as-is, and will later emit something like `MyEnum["previousKey"] + 1` to implement + * auto-incrementing. + * + * This is called after deleting the key token, when the token processor is at the equals sign. + * + * Example 1: + * someKey = 1 + 1 + * -> + * const someKey = 1 + 1; MyEnum[MyEnum["someKey"] = someKey] = "someKey"; + * + * Example 2: + * "some key" = 1 + 1 + * -> + * MyEnum[MyEnum["some key"] = 1 + 1] = "some key"; + */ + processExplicitValueEnumMember(enumName: string, nameStringCode: string, variableName: string | null): void; + /** + * Handle an enum member with no right-hand side expression. In this case, the value is the + * previous value plus 1, or 0 if there was no previous value. We should also always emit a + * reverse mapping. + * + * Example 1: + * someKey2 + * -> + * const someKey2 = someKey1 + 1; MyEnum[MyEnum["someKey2"] = someKey2] = "someKey2"; + * + * Example 2: + * "some key 2" + * -> + * MyEnum[MyEnum["some key 2"] = someKey1 + 1] = "some key 2"; + */ + processImplicitValueEnumMember(enumName: string, nameStringCode: string, variableName: string | null, previousValueCode: string | null): void; +} diff --git a/node_modules/sucrase/dist/types/util/elideImportEquals.d.ts b/node_modules/sucrase/dist/types/util/elideImportEquals.d.ts new file mode 100644 index 0000000..d04bb71 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/elideImportEquals.d.ts @@ -0,0 +1,2 @@ +import type TokenProcessor from "../TokenProcessor"; +export default function elideImportEquals(tokens: TokenProcessor): void; diff --git a/node_modules/sucrase/dist/types/util/formatTokens.d.ts b/node_modules/sucrase/dist/types/util/formatTokens.d.ts new file mode 100644 index 0000000..fe5fb1e --- /dev/null +++ b/node_modules/sucrase/dist/types/util/formatTokens.d.ts @@ -0,0 +1,2 @@ +import type { Token } from "../parser/tokenizer"; +export default function formatTokens(code: string, tokens: Array): string; diff --git a/node_modules/sucrase/dist/types/util/getClassInfo.d.ts b/node_modules/sucrase/dist/types/util/getClassInfo.d.ts new file mode 100644 index 0000000..f4309cb --- /dev/null +++ b/node_modules/sucrase/dist/types/util/getClassInfo.d.ts @@ -0,0 +1,34 @@ +import type NameManager from "../NameManager"; +import type TokenProcessor from "../TokenProcessor"; +import type RootTransformer from "../transformers/RootTransformer"; +export interface ClassHeaderInfo { + isExpression: boolean; + className: string | null; + hasSuperclass: boolean; +} +export interface TokenRange { + start: number; + end: number; +} +export interface FieldInfo extends TokenRange { + equalsIndex: number; + initializerName: string; +} +/** + * Information about a class returned to inform the implementation of class fields and constructor + * initializers. + */ +export interface ClassInfo { + headerInfo: ClassHeaderInfo; + constructorInitializerStatements: Array; + instanceInitializerNames: Array; + staticInitializerNames: Array; + constructorInsertPos: number | null; + fields: Array; + rangesToRemove: Array; +} +/** + * Get information about the class fields for this class, given a token processor pointing to the + * open-brace at the start of the class. + */ +export default function getClassInfo(rootTransformer: RootTransformer, tokens: TokenProcessor, nameManager: NameManager, disableESTransforms: boolean): ClassInfo; diff --git a/node_modules/sucrase/dist/types/util/getDeclarationInfo.d.ts b/node_modules/sucrase/dist/types/util/getDeclarationInfo.d.ts new file mode 100644 index 0000000..04122ba --- /dev/null +++ b/node_modules/sucrase/dist/types/util/getDeclarationInfo.d.ts @@ -0,0 +1,18 @@ +import type TokenProcessor from "../TokenProcessor"; +export interface DeclarationInfo { + typeDeclarations: Set; + valueDeclarations: Set; +} +export declare const EMPTY_DECLARATION_INFO: DeclarationInfo; +/** + * Get all top-level identifiers that should be preserved when exported in TypeScript. + * + * Examples: + * - If an identifier is declared as `const x`, then `export {x}` should be preserved. + * - If it's declared as `type x`, then `export {x}` should be removed. + * - If it's declared as both `const x` and `type x`, then the export should be preserved. + * - Classes and enums should be preserved (even though they also introduce types). + * - Imported identifiers should be preserved since we don't have enough information to + * rule them out. --isolatedModules disallows re-exports, which catches errors here. + */ +export default function getDeclarationInfo(tokens: TokenProcessor): DeclarationInfo; diff --git a/node_modules/sucrase/dist/types/util/getIdentifierNames.d.ts b/node_modules/sucrase/dist/types/util/getIdentifierNames.d.ts new file mode 100644 index 0000000..9e0d5a2 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/getIdentifierNames.d.ts @@ -0,0 +1,5 @@ +import type { Token } from "../parser/tokenizer"; +/** + * Get all identifier names in the code, in order, including duplicates. + */ +export default function getIdentifierNames(code: string, tokens: Array): Array; diff --git a/node_modules/sucrase/dist/types/util/getImportExportSpecifierInfo.d.ts b/node_modules/sucrase/dist/types/util/getImportExportSpecifierInfo.d.ts new file mode 100644 index 0000000..8d089f1 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/getImportExportSpecifierInfo.d.ts @@ -0,0 +1,36 @@ +import type TokenProcessor from "../TokenProcessor"; +export declare type ImportExportSpecifierInfo = { + isType: false; + leftName: string; + rightName: string; + endIndex: number; +} | { + isType: true; + leftName: null; + rightName: null; + endIndex: number; +}; +/** + * Determine information about this named import or named export specifier. + * + * This syntax is the `a` from statements like these: + * import {A} from "./foo"; + * export {A}; + * export {A} from "./foo"; + * + * As it turns out, we can exactly characterize the syntax meaning by simply + * counting the number of tokens, which can be from 1 to 4: + * {A} + * {type A} + * {A as B} + * {type A as B} + * + * In the type case, we never actually need the names in practice, so don't get + * them. + * + * TODO: There's some redundancy with the type detection here and the isType + * flag that's already present on tokens in TS mode. This function could + * potentially be simplified and/or pushed to the call sites to avoid the object + * allocation. + */ +export default function getImportExportSpecifierInfo(tokens: TokenProcessor, index?: number): ImportExportSpecifierInfo; diff --git a/node_modules/sucrase/dist/types/util/getJSXPragmaInfo.d.ts b/node_modules/sucrase/dist/types/util/getJSXPragmaInfo.d.ts new file mode 100644 index 0000000..fec9a48 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/getJSXPragmaInfo.d.ts @@ -0,0 +1,8 @@ +import type { Options } from "../index"; +export interface JSXPragmaInfo { + base: string; + suffix: string; + fragmentBase: string; + fragmentSuffix: string; +} +export default function getJSXPragmaInfo(options: Options): JSXPragmaInfo; diff --git a/node_modules/sucrase/dist/types/util/getNonTypeIdentifiers.d.ts b/node_modules/sucrase/dist/types/util/getNonTypeIdentifiers.d.ts new file mode 100644 index 0000000..3e40768 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/getNonTypeIdentifiers.d.ts @@ -0,0 +1,3 @@ +import type { Options } from "../index"; +import type TokenProcessor from "../TokenProcessor"; +export declare function getNonTypeIdentifiers(tokens: TokenProcessor, options: Options): Set; diff --git a/node_modules/sucrase/dist/types/util/getTSImportedNames.d.ts b/node_modules/sucrase/dist/types/util/getTSImportedNames.d.ts new file mode 100644 index 0000000..bc236e4 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/getTSImportedNames.d.ts @@ -0,0 +1,9 @@ +import type TokenProcessor from "../TokenProcessor"; +/** + * Special case code to scan for imported names in ESM TypeScript. We need to do this so we can + * properly get globals so we can compute shadowed globals. + * + * This is similar to logic in CJSImportProcessor, but trimmed down to avoid logic with CJS + * replacement and flow type imports. + */ +export default function getTSImportedNames(tokens: TokenProcessor): Set; diff --git a/node_modules/sucrase/dist/types/util/isAsyncOperation.d.ts b/node_modules/sucrase/dist/types/util/isAsyncOperation.d.ts new file mode 100644 index 0000000..3ad0036 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/isAsyncOperation.d.ts @@ -0,0 +1,11 @@ +import type TokenProcessor from "../TokenProcessor"; +/** + * Determine whether this optional chain or nullish coalescing operation has any await statements in + * it. If so, we'll need to transpile to an async operation. + * + * We compute this by walking the length of the operation and returning true if we see an await + * keyword used as a real await (rather than an object key or property access). Nested optional + * chain/nullish operations need to be tracked but don't silence await, but a nested async function + * (or any other nested scope) will make the await not count. + */ +export default function isAsyncOperation(tokens: TokenProcessor): boolean; diff --git a/node_modules/sucrase/dist/types/util/isIdentifier.d.ts b/node_modules/sucrase/dist/types/util/isIdentifier.d.ts new file mode 100644 index 0000000..b39af27 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/isIdentifier.d.ts @@ -0,0 +1,8 @@ +/** + * Determine if the given name is a legal variable name. + * + * This is needed when transforming TypeScript enums; if an enum key is a valid + * variable name, it might be referenced later in the enum, so we need to + * declare a variable. + */ +export default function isIdentifier(name: string): boolean; diff --git a/node_modules/sucrase/dist/types/util/removeMaybeImportAssertion.d.ts b/node_modules/sucrase/dist/types/util/removeMaybeImportAssertion.d.ts new file mode 100644 index 0000000..b203a6c --- /dev/null +++ b/node_modules/sucrase/dist/types/util/removeMaybeImportAssertion.d.ts @@ -0,0 +1,6 @@ +import type TokenProcessor from "../TokenProcessor"; +/** + * Starting at a potential `assert` token remove the import assertion if there + * is one. + */ +export declare function removeMaybeImportAssertion(tokens: TokenProcessor): void; diff --git a/node_modules/sucrase/dist/types/util/shouldElideDefaultExport.d.ts b/node_modules/sucrase/dist/types/util/shouldElideDefaultExport.d.ts new file mode 100644 index 0000000..899f615 --- /dev/null +++ b/node_modules/sucrase/dist/types/util/shouldElideDefaultExport.d.ts @@ -0,0 +1,6 @@ +import type TokenProcessor from "../TokenProcessor"; +import type { DeclarationInfo } from "./getDeclarationInfo"; +/** + * Common method sharing code between CJS and ESM cases, since they're the same here. + */ +export default function shouldElideDefaultExport(isTypeScriptTransformEnabled: boolean, tokens: TokenProcessor, declarationInfo: DeclarationInfo): boolean; diff --git a/node_modules/sucrase/dist/util/elideImportEquals.js b/node_modules/sucrase/dist/util/elideImportEquals.js new file mode 100644 index 0000000..c6ef30f --- /dev/null +++ b/node_modules/sucrase/dist/util/elideImportEquals.js @@ -0,0 +1,29 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _types = require('../parser/tokenizer/types'); + + + function elideImportEquals(tokens) { + // import + tokens.removeInitialToken(); + // name + tokens.removeToken(); + // = + tokens.removeToken(); + // name or require + tokens.removeToken(); + // Handle either `import A = require('A')` or `import A = B.C.D`. + if (tokens.matches1(_types.TokenType.parenL)) { + // ( + tokens.removeToken(); + // path string + tokens.removeToken(); + // ) + tokens.removeToken(); + } else { + while (tokens.matches1(_types.TokenType.dot)) { + // . + tokens.removeToken(); + // name + tokens.removeToken(); + } + } +} exports.default = elideImportEquals; diff --git a/node_modules/sucrase/dist/util/formatTokens.js b/node_modules/sucrase/dist/util/formatTokens.js new file mode 100644 index 0000000..c663633 --- /dev/null +++ b/node_modules/sucrase/dist/util/formatTokens.js @@ -0,0 +1,74 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _linesandcolumns = require('lines-and-columns'); var _linesandcolumns2 = _interopRequireDefault(_linesandcolumns); + + +var _types = require('../parser/tokenizer/types'); + + function formatTokens(code, tokens) { + if (tokens.length === 0) { + return ""; + } + + const tokenKeys = Object.keys(tokens[0]).filter( + (k) => k !== "type" && k !== "value" && k !== "start" && k !== "end" && k !== "loc", + ); + const typeKeys = Object.keys(tokens[0].type).filter((k) => k !== "label" && k !== "keyword"); + + const headings = ["Location", "Label", "Raw", ...tokenKeys, ...typeKeys]; + + const lines = new (0, _linesandcolumns2.default)(code); + const rows = [headings, ...tokens.map(getTokenComponents)]; + const padding = headings.map(() => 0); + for (const components of rows) { + for (let i = 0; i < components.length; i++) { + padding[i] = Math.max(padding[i], components[i].length); + } + } + return rows + .map((components) => components.map((component, i) => component.padEnd(padding[i])).join(" ")) + .join("\n"); + + function getTokenComponents(token) { + const raw = code.slice(token.start, token.end); + return [ + formatRange(token.start, token.end), + _types.formatTokenType.call(void 0, token.type), + truncate(String(raw), 14), + // @ts-ignore: Intentional dynamic access by key. + ...tokenKeys.map((key) => formatValue(token[key], key)), + // @ts-ignore: Intentional dynamic access by key. + ...typeKeys.map((key) => formatValue(token.type[key], key)), + ]; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + function formatValue(value, key) { + if (value === true) { + return key; + } else if (value === false || value === null) { + return ""; + } else { + return String(value); + } + } + + function formatRange(start, end) { + return `${formatPos(start)}-${formatPos(end)}`; + } + + function formatPos(pos) { + const location = lines.locationForIndex(pos); + if (!location) { + return "Unknown"; + } else { + return `${location.line + 1}:${location.column + 1}`; + } + } +} exports.default = formatTokens; + +function truncate(s, length) { + if (s.length > length) { + return `${s.slice(0, length - 3)}...`; + } else { + return s; + } +} diff --git a/node_modules/sucrase/dist/util/getClassInfo.js b/node_modules/sucrase/dist/util/getClassInfo.js new file mode 100644 index 0000000..b8f1a1d --- /dev/null +++ b/node_modules/sucrase/dist/util/getClassInfo.js @@ -0,0 +1,347 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + +var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +/** + * Get information about the class fields for this class, given a token processor pointing to the + * open-brace at the start of the class. + */ + function getClassInfo( + rootTransformer, + tokens, + nameManager, + disableESTransforms, +) { + const snapshot = tokens.snapshot(); + + const headerInfo = processClassHeader(tokens); + + let constructorInitializerStatements = []; + const instanceInitializerNames = []; + const staticInitializerNames = []; + let constructorInsertPos = null; + const fields = []; + const rangesToRemove = []; + + const classContextId = tokens.currentToken().contextId; + if (classContextId == null) { + throw new Error("Expected non-null class context ID on class open-brace."); + } + + tokens.nextToken(); + while (!tokens.matchesContextIdAndLabel(_types.TokenType.braceR, classContextId)) { + if (tokens.matchesContextual(_keywords.ContextualKeyword._constructor) && !tokens.currentToken().isType) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + } else if (tokens.matches1(_types.TokenType.semi)) { + if (!disableESTransforms) { + rangesToRemove.push({start: tokens.currentIndex(), end: tokens.currentIndex() + 1}); + } + tokens.nextToken(); + } else if (tokens.currentToken().isType) { + tokens.nextToken(); + } else { + // Either a method or a field. Skip to the identifier part. + const statementStartIndex = tokens.currentIndex(); + let isStatic = false; + let isESPrivate = false; + let isDeclareOrAbstract = false; + while (isAccessModifier(tokens.currentToken())) { + if (tokens.matches1(_types.TokenType._static)) { + isStatic = true; + } + if (tokens.matches1(_types.TokenType.hash)) { + isESPrivate = true; + } + if (tokens.matches1(_types.TokenType._declare) || tokens.matches1(_types.TokenType._abstract)) { + isDeclareOrAbstract = true; + } + tokens.nextToken(); + } + if (isStatic && tokens.matches1(_types.TokenType.braceL)) { + // This is a static block, so don't process it in any special way. + skipToNextClassElement(tokens, classContextId); + continue; + } + if (isESPrivate) { + // Sucrase doesn't attempt to transpile private fields; just leave them as-is. + skipToNextClassElement(tokens, classContextId); + continue; + } + if ( + tokens.matchesContextual(_keywords.ContextualKeyword._constructor) && + !tokens.currentToken().isType + ) { + ({constructorInitializerStatements, constructorInsertPos} = processConstructor(tokens)); + continue; + } + + const nameStartIndex = tokens.currentIndex(); + skipFieldName(tokens); + if (tokens.matches1(_types.TokenType.lessThan) || tokens.matches1(_types.TokenType.parenL)) { + // This is a method, so nothing to process. + skipToNextClassElement(tokens, classContextId); + continue; + } + // There might be a type annotation that we need to skip. + while (tokens.currentToken().isType) { + tokens.nextToken(); + } + if (tokens.matches1(_types.TokenType.eq)) { + const equalsIndex = tokens.currentIndex(); + // This is an initializer, so we need to wrap in an initializer method. + const valueEnd = tokens.currentToken().rhsEndIndex; + if (valueEnd == null) { + throw new Error("Expected rhsEndIndex on class field assignment."); + } + tokens.nextToken(); + while (tokens.currentIndex() < valueEnd) { + rootTransformer.processToken(); + } + let initializerName; + if (isStatic) { + initializerName = nameManager.claimFreeName("__initStatic"); + staticInitializerNames.push(initializerName); + } else { + initializerName = nameManager.claimFreeName("__init"); + instanceInitializerNames.push(initializerName); + } + // Fields start at the name, so `static x = 1;` has a field range of `x = 1;`. + fields.push({ + initializerName, + equalsIndex, + start: nameStartIndex, + end: tokens.currentIndex(), + }); + } else if (!disableESTransforms || isDeclareOrAbstract) { + // This is a regular field declaration, like `x;`. With the class transform enabled, we just + // remove the line so that no output is produced. With the class transform disabled, we + // usually want to preserve the declaration (but still strip types), but if the `declare` + // or `abstract` keyword is specified, we should remove the line to avoid initializing the + // value to undefined. + rangesToRemove.push({start: statementStartIndex, end: tokens.currentIndex()}); + } + } + } + + tokens.restoreToSnapshot(snapshot); + if (disableESTransforms) { + // With ES transforms disabled, we don't want to transform regular class + // field declarations, and we don't need to do any additional tricks to + // reference the constructor for static init, but we still need to transform + // TypeScript field initializers defined as constructor parameters and we + // still need to remove `declare` fields. For now, we run the same code + // path but omit any field information, as if the class had no field + // declarations. In the future, when we fully drop the class fields + // transform, we can simplify this code significantly. + return { + headerInfo, + constructorInitializerStatements, + instanceInitializerNames: [], + staticInitializerNames: [], + constructorInsertPos, + fields: [], + rangesToRemove, + }; + } else { + return { + headerInfo, + constructorInitializerStatements, + instanceInitializerNames, + staticInitializerNames, + constructorInsertPos, + fields, + rangesToRemove, + }; + } +} exports.default = getClassInfo; + +/** + * Move the token processor to the next method/field in the class. + * + * To do that, we seek forward to the next start of a class name (either an open + * bracket or an identifier, or the closing curly brace), then seek backward to + * include any access modifiers. + */ +function skipToNextClassElement(tokens, classContextId) { + tokens.nextToken(); + while (tokens.currentToken().contextId !== classContextId) { + tokens.nextToken(); + } + while (isAccessModifier(tokens.tokenAtRelativeIndex(-1))) { + tokens.previousToken(); + } +} + +function processClassHeader(tokens) { + const classToken = tokens.currentToken(); + const contextId = classToken.contextId; + if (contextId == null) { + throw new Error("Expected context ID on class token."); + } + const isExpression = classToken.isExpression; + if (isExpression == null) { + throw new Error("Expected isExpression on class token."); + } + let className = null; + let hasSuperclass = false; + tokens.nextToken(); + if (tokens.matches1(_types.TokenType.name)) { + className = tokens.identifierName(); + } + while (!tokens.matchesContextIdAndLabel(_types.TokenType.braceL, contextId)) { + // If this has a superclass, there will always be an `extends` token. If it doesn't have a + // superclass, only type parameters and `implements` clauses can show up here, all of which + // consist only of type tokens. A declaration like `class A {` should *not* count + // as having a superclass. + if (tokens.matches1(_types.TokenType._extends) && !tokens.currentToken().isType) { + hasSuperclass = true; + } + tokens.nextToken(); + } + return {isExpression, className, hasSuperclass}; +} + +/** + * Extract useful information out of a constructor, starting at the "constructor" name. + */ +function processConstructor(tokens) + + + { + const constructorInitializerStatements = []; + + tokens.nextToken(); + const constructorContextId = tokens.currentToken().contextId; + if (constructorContextId == null) { + throw new Error("Expected context ID on open-paren starting constructor params."); + } + // Advance through parameters looking for access modifiers. + while (!tokens.matchesContextIdAndLabel(_types.TokenType.parenR, constructorContextId)) { + if (tokens.currentToken().contextId === constructorContextId) { + // Current token is an open paren or comma just before a param, so check + // that param for access modifiers. + tokens.nextToken(); + if (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + while (isAccessModifier(tokens.currentToken())) { + tokens.nextToken(); + } + const token = tokens.currentToken(); + if (token.type !== _types.TokenType.name) { + throw new Error("Expected identifier after access modifiers in constructor arg."); + } + const name = tokens.identifierNameForToken(token); + constructorInitializerStatements.push(`this.${name} = ${name}`); + } + } else { + tokens.nextToken(); + } + } + // ) + tokens.nextToken(); + let constructorInsertPos = tokens.currentIndex(); + + // Advance through body looking for a super call. + let foundSuperCall = false; + while (!tokens.matchesContextIdAndLabel(_types.TokenType.braceR, constructorContextId)) { + if (!foundSuperCall && tokens.matches2(_types.TokenType._super, _types.TokenType.parenL)) { + tokens.nextToken(); + const superCallContextId = tokens.currentToken().contextId; + if (superCallContextId == null) { + throw new Error("Expected a context ID on the super call"); + } + while (!tokens.matchesContextIdAndLabel(_types.TokenType.parenR, superCallContextId)) { + tokens.nextToken(); + } + constructorInsertPos = tokens.currentIndex(); + foundSuperCall = true; + } + tokens.nextToken(); + } + // } + tokens.nextToken(); + + return {constructorInitializerStatements, constructorInsertPos}; +} + +/** + * Determine if this is any token that can go before the name in a method/field. + */ +function isAccessModifier(token) { + return [ + _types.TokenType._async, + _types.TokenType._get, + _types.TokenType._set, + _types.TokenType.plus, + _types.TokenType.minus, + _types.TokenType._readonly, + _types.TokenType._static, + _types.TokenType._public, + _types.TokenType._private, + _types.TokenType._protected, + _types.TokenType._override, + _types.TokenType._abstract, + _types.TokenType.star, + _types.TokenType._declare, + _types.TokenType.hash, + ].includes(token.type); +} + +/** + * The next token or set of tokens is either an identifier or an expression in square brackets, for + * a method or field name. + */ +function skipFieldName(tokens) { + if (tokens.matches1(_types.TokenType.bracketL)) { + const startToken = tokens.currentToken(); + const classContextId = startToken.contextId; + if (classContextId == null) { + throw new Error("Expected class context ID on computed name open bracket."); + } + while (!tokens.matchesContextIdAndLabel(_types.TokenType.bracketR, classContextId)) { + tokens.nextToken(); + } + tokens.nextToken(); + } else { + tokens.nextToken(); + } +} diff --git a/node_modules/sucrase/dist/util/getDeclarationInfo.js b/node_modules/sucrase/dist/util/getDeclarationInfo.js new file mode 100644 index 0000000..465fc06 --- /dev/null +++ b/node_modules/sucrase/dist/util/getDeclarationInfo.js @@ -0,0 +1,40 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _tokenizer = require('../parser/tokenizer'); +var _types = require('../parser/tokenizer/types'); + + + + + + + + const EMPTY_DECLARATION_INFO = { + typeDeclarations: new Set(), + valueDeclarations: new Set(), +}; exports.EMPTY_DECLARATION_INFO = EMPTY_DECLARATION_INFO; + +/** + * Get all top-level identifiers that should be preserved when exported in TypeScript. + * + * Examples: + * - If an identifier is declared as `const x`, then `export {x}` should be preserved. + * - If it's declared as `type x`, then `export {x}` should be removed. + * - If it's declared as both `const x` and `type x`, then the export should be preserved. + * - Classes and enums should be preserved (even though they also introduce types). + * - Imported identifiers should be preserved since we don't have enough information to + * rule them out. --isolatedModules disallows re-exports, which catches errors here. + */ + function getDeclarationInfo(tokens) { + const typeDeclarations = new Set(); + const valueDeclarations = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if (token.type === _types.TokenType.name && _tokenizer.isTopLevelDeclaration.call(void 0, token)) { + if (token.isType) { + typeDeclarations.add(tokens.identifierNameForToken(token)); + } else { + valueDeclarations.add(tokens.identifierNameForToken(token)); + } + } + } + return {typeDeclarations, valueDeclarations}; +} exports.default = getDeclarationInfo; diff --git a/node_modules/sucrase/dist/util/getIdentifierNames.js b/node_modules/sucrase/dist/util/getIdentifierNames.js new file mode 100644 index 0000000..c0195ca --- /dev/null +++ b/node_modules/sucrase/dist/util/getIdentifierNames.js @@ -0,0 +1,15 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); +var _types = require('../parser/tokenizer/types'); + +/** + * Get all identifier names in the code, in order, including duplicates. + */ + function getIdentifierNames(code, tokens) { + const names = []; + for (const token of tokens) { + if (token.type === _types.TokenType.name) { + names.push(code.slice(token.start, token.end)); + } + } + return names; +} exports.default = getIdentifierNames; diff --git a/node_modules/sucrase/dist/util/getImportExportSpecifierInfo.js b/node_modules/sucrase/dist/util/getImportExportSpecifierInfo.js new file mode 100644 index 0000000..45edb74 --- /dev/null +++ b/node_modules/sucrase/dist/util/getImportExportSpecifierInfo.js @@ -0,0 +1,92 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _types = require('../parser/tokenizer/types'); + + + + + + + + + + + + + + + + +/** + * Determine information about this named import or named export specifier. + * + * This syntax is the `a` from statements like these: + * import {A} from "./foo"; + * export {A}; + * export {A} from "./foo"; + * + * As it turns out, we can exactly characterize the syntax meaning by simply + * counting the number of tokens, which can be from 1 to 4: + * {A} + * {type A} + * {A as B} + * {type A as B} + * + * In the type case, we never actually need the names in practice, so don't get + * them. + * + * TODO: There's some redundancy with the type detection here and the isType + * flag that's already present on tokens in TS mode. This function could + * potentially be simplified and/or pushed to the call sites to avoid the object + * allocation. + */ + function getImportExportSpecifierInfo( + tokens, + index = tokens.currentIndex(), +) { + let endIndex = index + 1; + if (isSpecifierEnd(tokens, endIndex)) { + // import {A} + const name = tokens.identifierNameAtIndex(index); + return { + isType: false, + leftName: name, + rightName: name, + endIndex, + }; + } + endIndex++; + if (isSpecifierEnd(tokens, endIndex)) { + // import {type A} + return { + isType: true, + leftName: null, + rightName: null, + endIndex, + }; + } + endIndex++; + if (isSpecifierEnd(tokens, endIndex)) { + // import {A as B} + return { + isType: false, + leftName: tokens.identifierNameAtIndex(index), + rightName: tokens.identifierNameAtIndex(index + 2), + endIndex, + }; + } + endIndex++; + if (isSpecifierEnd(tokens, endIndex)) { + // import {type A as B} + return { + isType: true, + leftName: null, + rightName: null, + endIndex, + }; + } + throw new Error(`Unexpected import/export specifier at ${index}`); +} exports.default = getImportExportSpecifierInfo; + +function isSpecifierEnd(tokens, index) { + const token = tokens.tokens[index]; + return token.type === _types.TokenType.braceR || token.type === _types.TokenType.comma; +} diff --git a/node_modules/sucrase/dist/util/getJSXPragmaInfo.js b/node_modules/sucrase/dist/util/getJSXPragmaInfo.js new file mode 100644 index 0000000..67513db --- /dev/null +++ b/node_modules/sucrase/dist/util/getJSXPragmaInfo.js @@ -0,0 +1,22 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); + + + + + + + + + function getJSXPragmaInfo(options) { + const [base, suffix] = splitPragma(options.jsxPragma || "React.createElement"); + const [fragmentBase, fragmentSuffix] = splitPragma(options.jsxFragmentPragma || "React.Fragment"); + return {base, suffix, fragmentBase, fragmentSuffix}; +} exports.default = getJSXPragmaInfo; + +function splitPragma(pragma) { + let dotIndex = pragma.indexOf("."); + if (dotIndex === -1) { + dotIndex = pragma.length; + } + return [pragma.slice(0, dotIndex), pragma.slice(dotIndex)]; +} diff --git a/node_modules/sucrase/dist/util/getNonTypeIdentifiers.js b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.js new file mode 100644 index 0000000..86d9daf --- /dev/null +++ b/node_modules/sucrase/dist/util/getNonTypeIdentifiers.js @@ -0,0 +1,43 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _tokenizer = require('../parser/tokenizer'); +var _types = require('../parser/tokenizer/types'); + +var _JSXTransformer = require('../transformers/JSXTransformer'); +var _getJSXPragmaInfo = require('./getJSXPragmaInfo'); var _getJSXPragmaInfo2 = _interopRequireDefault(_getJSXPragmaInfo); + + function getNonTypeIdentifiers(tokens, options) { + const jsxPragmaInfo = _getJSXPragmaInfo2.default.call(void 0, options); + const nonTypeIdentifiers = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + const token = tokens.tokens[i]; + if ( + token.type === _types.TokenType.name && + !token.isType && + (token.identifierRole === _tokenizer.IdentifierRole.Access || + token.identifierRole === _tokenizer.IdentifierRole.ObjectShorthand || + token.identifierRole === _tokenizer.IdentifierRole.ExportAccess) && + !token.shadowsGlobal + ) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + if (token.type === _types.TokenType.jsxTagStart) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + } + if ( + token.type === _types.TokenType.jsxTagStart && + i + 1 < tokens.tokens.length && + tokens.tokens[i + 1].type === _types.TokenType.jsxTagEnd + ) { + nonTypeIdentifiers.add(jsxPragmaInfo.base); + nonTypeIdentifiers.add(jsxPragmaInfo.fragmentBase); + } + if (token.type === _types.TokenType.jsxName && token.identifierRole === _tokenizer.IdentifierRole.Access) { + const identifierName = tokens.identifierNameForToken(token); + // Lower-case single-component tag names like "div" don't count. + if (!_JSXTransformer.startsWithLowerCase.call(void 0, identifierName) || tokens.tokens[i + 1].type === _types.TokenType.dot) { + nonTypeIdentifiers.add(tokens.identifierNameForToken(token)); + } + } + } + return nonTypeIdentifiers; +} exports.getNonTypeIdentifiers = getNonTypeIdentifiers; diff --git a/node_modules/sucrase/dist/util/getTSImportedNames.js b/node_modules/sucrase/dist/util/getTSImportedNames.js new file mode 100644 index 0000000..5c5362c --- /dev/null +++ b/node_modules/sucrase/dist/util/getTSImportedNames.js @@ -0,0 +1,84 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _types = require('../parser/tokenizer/types'); + +var _getImportExportSpecifierInfo = require('./getImportExportSpecifierInfo'); var _getImportExportSpecifierInfo2 = _interopRequireDefault(_getImportExportSpecifierInfo); + +/** + * Special case code to scan for imported names in ESM TypeScript. We need to do this so we can + * properly get globals so we can compute shadowed globals. + * + * This is similar to logic in CJSImportProcessor, but trimmed down to avoid logic with CJS + * replacement and flow type imports. + */ + function getTSImportedNames(tokens) { + const importedNames = new Set(); + for (let i = 0; i < tokens.tokens.length; i++) { + if ( + tokens.matches1AtIndex(i, _types.TokenType._import) && + !tokens.matches3AtIndex(i, _types.TokenType._import, _types.TokenType.name, _types.TokenType.eq) + ) { + collectNamesForImport(tokens, i, importedNames); + } + } + return importedNames; +} exports.default = getTSImportedNames; + +function collectNamesForImport( + tokens, + index, + importedNames, +) { + index++; + + if (tokens.matches1AtIndex(index, _types.TokenType.parenL)) { + // Dynamic import, so nothing to do + return; + } + + if (tokens.matches1AtIndex(index, _types.TokenType.name)) { + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + if (tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } + } + + if (tokens.matches1AtIndex(index, _types.TokenType.star)) { + // * as + index += 2; + importedNames.add(tokens.identifierNameAtIndex(index)); + index++; + } + + if (tokens.matches1AtIndex(index, _types.TokenType.braceL)) { + index++; + collectNamesForNamedImport(tokens, index, importedNames); + } +} + +function collectNamesForNamedImport( + tokens, + index, + importedNames, +) { + while (true) { + if (tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + return; + } + + const specifierInfo = _getImportExportSpecifierInfo2.default.call(void 0, tokens, index); + index = specifierInfo.endIndex; + if (!specifierInfo.isType) { + importedNames.add(specifierInfo.rightName); + } + + if (tokens.matches2AtIndex(index, _types.TokenType.comma, _types.TokenType.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, _types.TokenType.braceR)) { + return; + } else if (tokens.matches1AtIndex(index, _types.TokenType.comma)) { + index++; + } else { + throw new Error(`Unexpected token: ${JSON.stringify(tokens.tokens[index])}`); + } + } +} diff --git a/node_modules/sucrase/dist/util/isAsyncOperation.js b/node_modules/sucrase/dist/util/isAsyncOperation.js new file mode 100644 index 0000000..c02023a --- /dev/null +++ b/node_modules/sucrase/dist/util/isAsyncOperation.js @@ -0,0 +1,38 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _keywords = require('../parser/tokenizer/keywords'); + + +/** + * Determine whether this optional chain or nullish coalescing operation has any await statements in + * it. If so, we'll need to transpile to an async operation. + * + * We compute this by walking the length of the operation and returning true if we see an await + * keyword used as a real await (rather than an object key or property access). Nested optional + * chain/nullish operations need to be tracked but don't silence await, but a nested async function + * (or any other nested scope) will make the await not count. + */ + function isAsyncOperation(tokens) { + let index = tokens.currentIndex(); + let depth = 0; + const startToken = tokens.currentToken(); + do { + const token = tokens.tokens[index]; + if (token.isOptionalChainStart) { + depth++; + } + if (token.isOptionalChainEnd) { + depth--; + } + depth += token.numNullishCoalesceStarts; + depth -= token.numNullishCoalesceEnds; + + if ( + token.contextualKeyword === _keywords.ContextualKeyword._await && + token.identifierRole == null && + token.scopeDepth === startToken.scopeDepth + ) { + return true; + } + index += 1; + } while (depth > 0 && index < tokens.tokens.length); + return false; +} exports.default = isAsyncOperation; diff --git a/node_modules/sucrase/dist/util/isIdentifier.js b/node_modules/sucrase/dist/util/isIdentifier.js new file mode 100644 index 0000000..8be8cf4 --- /dev/null +++ b/node_modules/sucrase/dist/util/isIdentifier.js @@ -0,0 +1,81 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _identifier = require('../parser/util/identifier'); + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar +// Hard-code a list of reserved words rather than trying to use keywords or contextual keywords +// from the parser, since currently there are various exceptions, like `package` being reserved +// but unused and various contextual keywords being reserved. Note that we assume that all code +// compiled by Sucrase is in a module, so strict mode words and await are all considered reserved +// here. +const RESERVED_WORDS = new Set([ + // Reserved keywords as of ECMAScript 2015 + "break", + "case", + "catch", + "class", + "const", + "continue", + "debugger", + "default", + "delete", + "do", + "else", + "export", + "extends", + "finally", + "for", + "function", + "if", + "import", + "in", + "instanceof", + "new", + "return", + "super", + "switch", + "this", + "throw", + "try", + "typeof", + "var", + "void", + "while", + "with", + "yield", + // Future reserved keywords + "enum", + "implements", + "interface", + "let", + "package", + "private", + "protected", + "public", + "static", + "await", + // Literals that cannot be used as identifiers + "false", + "null", + "true", +]); + +/** + * Determine if the given name is a legal variable name. + * + * This is needed when transforming TypeScript enums; if an enum key is a valid + * variable name, it might be referenced later in the enum, so we need to + * declare a variable. + */ + function isIdentifier(name) { + if (name.length === 0) { + return false; + } + if (!_identifier.IS_IDENTIFIER_START[name.charCodeAt(0)]) { + return false; + } + for (let i = 1; i < name.length; i++) { + if (!_identifier.IS_IDENTIFIER_CHAR[name.charCodeAt(i)]) { + return false; + } + } + return !RESERVED_WORDS.has(name); +} exports.default = isIdentifier; diff --git a/node_modules/sucrase/dist/util/removeMaybeImportAssertion.js b/node_modules/sucrase/dist/util/removeMaybeImportAssertion.js new file mode 100644 index 0000000..66d6f50 --- /dev/null +++ b/node_modules/sucrase/dist/util/removeMaybeImportAssertion.js @@ -0,0 +1,19 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _keywords = require('../parser/tokenizer/keywords'); +var _types = require('../parser/tokenizer/types'); + + +/** + * Starting at a potential `assert` token remove the import assertion if there + * is one. + */ + function removeMaybeImportAssertion(tokens) { + if (tokens.matches2(_types.TokenType.name, _types.TokenType.braceL) && tokens.matchesContextual(_keywords.ContextualKeyword._assert)) { + // assert + tokens.removeToken(); + // { + tokens.removeToken(); + tokens.removeBalancedCode(); + // } + tokens.removeToken(); + } +} exports.removeMaybeImportAssertion = removeMaybeImportAssertion; diff --git a/node_modules/sucrase/dist/util/shouldElideDefaultExport.js b/node_modules/sucrase/dist/util/shouldElideDefaultExport.js new file mode 100644 index 0000000..9135b69 --- /dev/null +++ b/node_modules/sucrase/dist/util/shouldElideDefaultExport.js @@ -0,0 +1,37 @@ +"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _types = require('../parser/tokenizer/types'); + + + +/** + * Common method sharing code between CJS and ESM cases, since they're the same here. + */ + function shouldElideDefaultExport( + isTypeScriptTransformEnabled, + tokens, + declarationInfo, +) { + if (!isTypeScriptTransformEnabled) { + return false; + } + const exportToken = tokens.currentToken(); + if (exportToken.rhsEndIndex == null) { + throw new Error("Expected non-null rhsEndIndex on export token."); + } + // The export must be of the form `export default a` or `export default a;`. + const numTokens = exportToken.rhsEndIndex - tokens.currentIndex(); + if ( + numTokens !== 3 && + !(numTokens === 4 && tokens.matches1AtIndex(exportToken.rhsEndIndex - 1, _types.TokenType.semi)) + ) { + return false; + } + const identifierToken = tokens.tokenAtRelativeIndex(2); + if (identifierToken.type !== _types.TokenType.name) { + return false; + } + const exportedName = tokens.identifierNameForToken(identifierToken); + return ( + declarationInfo.typeDeclarations.has(exportedName) && + !declarationInfo.valueDeclarations.has(exportedName) + ); +} exports.default = shouldElideDefaultExport; diff --git a/node_modules/sucrase/package.json b/node_modules/sucrase/package.json new file mode 100644 index 0000000..b9be75e --- /dev/null +++ b/node_modules/sucrase/package.json @@ -0,0 +1,88 @@ +{ + "name": "sucrase", + "version": "3.32.0", + "description": "Super-fast alternative to Babel for when you can target modern JS runtimes", + "author": "Alan Pierce ", + "license": "MIT", + "main": "dist/index.js", + "module": "dist/esm/index.js", + "types": "dist/types/index.d.ts", + "bin": { + "sucrase": "./bin/sucrase", + "sucrase-node": "./bin/sucrase-node" + }, + "scripts": { + "build": "sucrase-node script/build.ts", + "fast-build": "sucrase-node script/build.ts --fast", + "clean": "rm -rf ./build ./dist ./dist-self-build ./dist-types ./example-runner/example-repos ./spec-compliance-tests/test262/test262-checkout ./spec-compliance-tests/babel-tests/babel-tests-checkout", + "generate": "sucrase-node generator/generate.ts", + "benchmark": "cd benchmark && yarn && sucrase-node ./benchmark.ts", + "benchmark-compare": "sucrase-node ./benchmark/compare-performance.ts", + "microbenchmark": "sucrase-node benchmark/microbenchmark.ts", + "lint": "sucrase-node script/lint.ts", + "profile": "node --inspect-brk ./node_modules/.bin/sucrase-node ./benchmark/profile", + "profile-project": "node --inspect-brk ./node_modules/.bin/sucrase-node ./benchmark/benchmark-project.ts --profile", + "prepublishOnly": "yarn clean && yarn build", + "release": "sucrase-node script/release.ts", + "run-examples": "sucrase-node example-runner/example-runner.ts", + "test": "yarn lint && yarn test-only", + "test-only": "mocha './test/**/*.ts'", + "integration-test": "cd integration-test && yarn && yarn link @sucrase/jest-plugin && mocha --timeout 10000 ./integration-tests.ts", + "test262": "sucrase-node spec-compliance-tests/test262/run-test262.ts", + "check-babel-tests": "sucrase-node spec-compliance-tests/babel-tests/check-babel-tests.ts", + "test-with-coverage": "nyc mocha './test/**/*.ts'", + "report-coverage": "nyc report --reporter=text-lcov > coverage.lcov && codecov" + }, + "repository": { + "type": "git", + "url": "https://github.com/alangpierce/sucrase.git" + }, + "keywords": [ + "babel", + "jsx", + "typescript", + "flow" + ], + "bugs": { + "url": "https://github.com/alangpierce/sucrase/issues" + }, + "devDependencies": { + "@babel/core": "^7.18.6", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/glob": "^7", + "@types/mocha": "^9.1.1", + "@types/mz": "^2.7.4", + "@types/node": "^17.0.41", + "@typescript-eslint/eslint-plugin": "^5.27.1", + "@typescript-eslint/parser": "^5.27.1", + "chalk": "^4", + "codecov": "^3.8.3", + "eslint": "^8.17.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-prettier": "^8.5.0", + "eslint-plugin-import": "~2.26", + "eslint-plugin-prettier": "^4.0.0", + "mocha": "^10.0.0", + "nyc": "^15.1.0", + "prettier": "^2.6.2", + "sucrase": "^3.31.0", + "test262-harness": "^10.0.0", + "ts-interface-builder": "^0.3.3", + "typescript": "~4.7" + }, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "7.1.6", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "engines": { + "node": ">=8" + }, + "resolutions": { + "**/eshost/socket.io": "^4" + } +} diff --git a/node_modules/sucrase/register/index.js b/node_modules/sucrase/register/index.js new file mode 100644 index 0000000..f6eb814 --- /dev/null +++ b/node_modules/sucrase/register/index.js @@ -0,0 +1 @@ +require("../dist/register").registerAll(); diff --git a/node_modules/sucrase/register/js.js b/node_modules/sucrase/register/js.js new file mode 100644 index 0000000..4891896 --- /dev/null +++ b/node_modules/sucrase/register/js.js @@ -0,0 +1 @@ +require("../dist/register").registerJS(); diff --git a/node_modules/sucrase/register/jsx.js b/node_modules/sucrase/register/jsx.js new file mode 100644 index 0000000..4bd476e --- /dev/null +++ b/node_modules/sucrase/register/jsx.js @@ -0,0 +1 @@ +require("../dist/register").registerJSX(); diff --git a/node_modules/sucrase/register/ts-legacy-module-interop.js b/node_modules/sucrase/register/ts-legacy-module-interop.js new file mode 100644 index 0000000..1ec5a6d --- /dev/null +++ b/node_modules/sucrase/register/ts-legacy-module-interop.js @@ -0,0 +1 @@ +require("../dist/register").registerTSLegacyModuleInterop(); diff --git a/node_modules/sucrase/register/ts.js b/node_modules/sucrase/register/ts.js new file mode 100644 index 0000000..23b8c88 --- /dev/null +++ b/node_modules/sucrase/register/ts.js @@ -0,0 +1 @@ +require("../dist/register").registerTS(); diff --git a/node_modules/sucrase/register/tsx-legacy-module-interop.js b/node_modules/sucrase/register/tsx-legacy-module-interop.js new file mode 100644 index 0000000..a883680 --- /dev/null +++ b/node_modules/sucrase/register/tsx-legacy-module-interop.js @@ -0,0 +1 @@ +require("../dist/register").registerTSXLegacyModuleInterop(); diff --git a/node_modules/sucrase/register/tsx.js b/node_modules/sucrase/register/tsx.js new file mode 100644 index 0000000..deb8b34 --- /dev/null +++ b/node_modules/sucrase/register/tsx.js @@ -0,0 +1 @@ +require("../dist/register").registerTSX(); diff --git a/node_modules/sucrase/ts-node-plugin/index.js b/node_modules/sucrase/ts-node-plugin/index.js new file mode 100644 index 0000000..d072c67 --- /dev/null +++ b/node_modules/sucrase/ts-node-plugin/index.js @@ -0,0 +1,76 @@ +const {transform} = require("../dist"); + +// Enum constants taken from the TypeScript codebase. +const ModuleKindCommonJS = 1; + +const JsxEmitReactJSX = 4; +const JsxEmitReactJSXDev = 5; + +/** + * ts-node transpiler plugin + * + * This plugin hooks into ts-node so that Sucrase can handle all TS-to-JS + * conversion while ts-node handles the ESM loader, require hook, REPL + * integration, etc. ts-node automatically discovers the relevant tsconfig file, + * so the main logic in this integration is translating tsconfig options to the + * corresponding Sucrase options. + * + * Any tsconfig options relevant to Sucrase are translated, but some config + * options outside the scope of Sucrase are ignored. For example, we assume the + * isolatedModules option, and we ignore target because Sucrase doesn't provide + * JS syntax downleveling (at least not in a way that is useful for Node). + * + * One notable caveat is that importsNotUsedAsValues and preserveValueImports + * are ignored right now, and Sucrase uses TypeScript's default behavior of + * eliding imports only used as types. This usually makes no difference when + * running the code, so for now we ignore these options without a warning. + */ +function create(createOptions) { + const {nodeModuleEmitKind} = createOptions; + const {module, jsx, jsxFactory, jsxFragmentFactory, jsxImportSource, esModuleInterop} = + createOptions.service.config.options; + + return { + transpile(input, transpileOptions) { + const {fileName} = transpileOptions; + const transforms = []; + // Detect JS rather than TS so we bias toward including the typescript + // transform, since almost always it doesn't hurt to include. + const isJS = + fileName.endsWith(".js") || + fileName.endsWith(".jsx") || + fileName.endsWith(".mjs") || + fileName.endsWith(".cjs"); + if (!isJS) { + transforms.push("typescript"); + } + if (module === ModuleKindCommonJS || nodeModuleEmitKind === "nodecjs") { + transforms.push("imports"); + } + if (fileName.endsWith(".tsx") || fileName.endsWith(".jsx")) { + transforms.push("jsx"); + } + + const {code, sourceMap} = transform(input, { + transforms, + disableESTransforms: true, + jsxRuntime: jsx === JsxEmitReactJSX || jsx === JsxEmitReactJSXDev ? "automatic" : "classic", + production: jsx === JsxEmitReactJSX, + jsxImportSource, + jsxPragma: jsxFactory, + jsxFragmentPragma: jsxFragmentFactory, + preserveDynamicImport: nodeModuleEmitKind === "nodecjs", + injectCreateRequireForImportRequire: nodeModuleEmitKind === "nodeesm", + enableLegacyTypeScriptModuleInterop: !esModuleInterop, + sourceMapOptions: {compiledFilename: fileName}, + filePath: fileName, + }); + return { + outputText: code, + sourceMapText: JSON.stringify(sourceMap), + }; + }, + }; +} + +exports.create = create; diff --git a/node_modules/supports-preserve-symlinks-flag/.eslintrc b/node_modules/supports-preserve-symlinks-flag/.eslintrc new file mode 100644 index 0000000..346ffec --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/.eslintrc @@ -0,0 +1,14 @@ +{ + "root": true, + + "extends": "@ljharb", + + "env": { + "browser": true, + "node": true, + }, + + "rules": { + "id-length": "off", + }, +} diff --git a/node_modules/supports-preserve-symlinks-flag/.github/FUNDING.yml b/node_modules/supports-preserve-symlinks-flag/.github/FUNDING.yml new file mode 100644 index 0000000..e8d64f3 --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/supports-preserve-symlink-flag +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/supports-preserve-symlinks-flag/.nycrc b/node_modules/supports-preserve-symlinks-flag/.nycrc new file mode 100644 index 0000000..bdd626c --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/supports-preserve-symlinks-flag/CHANGELOG.md b/node_modules/supports-preserve-symlinks-flag/CHANGELOG.md new file mode 100644 index 0000000..61f607f --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/CHANGELOG.md @@ -0,0 +1,22 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## v1.0.0 - 2022-01-02 + +### Commits + +- Tests [`e2f59ad`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/e2f59ad74e2ae0f5f4899fcde6a6f693ab7cc074) +- Initial commit [`dc222aa`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/dc222aad3c0b940d8d3af1ca9937d108bd2dc4b9) +- [meta] do not publish workflow files [`5ef77f7`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/5ef77f7cb6946d16ee38672be9ec0f1bbdf63262) +- npm init [`992b068`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/992b068503a461f7e8676f40ca2aab255fd8d6ff) +- read me [`6c9afa9`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/6c9afa9fabc8eaf0814aaed6dd01e6df0931b76d) +- Initial implementation [`2f98925`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/2f9892546396d4ab0ad9f1ff83e76c3f01234ae8) +- [meta] add `auto-changelog` [`6c476ae`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/6c476ae1ed7ce68b0480344f090ac2844f35509d) +- [Dev Deps] add `eslint`, `@ljharb/eslint-config` [`d0fffc8`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/d0fffc886d25fba119355520750a909d64da0087) +- Only apps should have lockfiles [`ab318ed`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/ab318ed7ae62f6c2c0e80a50398d40912afd8f69) +- [meta] add `safe-publish-latest` [`2bb23b3`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/2bb23b3ebab02dc4135c4cdf0217db82835b9fca) +- [meta] add `sideEffects` flag [`600223b`](https://github.com/inspect-js/node-supports-preserve-symlinks-flag/commit/600223ba24f30779f209d9097721eff35ed62741) diff --git a/node_modules/supports-preserve-symlinks-flag/LICENSE b/node_modules/supports-preserve-symlinks-flag/LICENSE new file mode 100644 index 0000000..2e7b9a3 --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Inspect JS + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/supports-preserve-symlinks-flag/README.md b/node_modules/supports-preserve-symlinks-flag/README.md new file mode 100644 index 0000000..eb05b12 --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/README.md @@ -0,0 +1,42 @@ +# node-supports-preserve-symlinks-flag [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +Determine if the current node version supports the `--preserve-symlinks` flag. + +## Example + +```js +var supportsPreserveSymlinks = require('node-supports-preserve-symlinks-flag'); +var assert = require('assert'); + +assert.equal(supportsPreserveSymlinks, null); // in a browser +assert.equal(supportsPreserveSymlinks, false); // in node < v6.2 +assert.equal(supportsPreserveSymlinks, true); // in node v6.2+ +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/node-supports-preserve-symlinks-flag +[npm-version-svg]: https://versionbadg.es/inspect-js/node-supports-preserve-symlinks-flag.svg +[deps-svg]: https://david-dm.org/inspect-js/node-supports-preserve-symlinks-flag.svg +[deps-url]: https://david-dm.org/inspect-js/node-supports-preserve-symlinks-flag +[dev-deps-svg]: https://david-dm.org/inspect-js/node-supports-preserve-symlinks-flag/dev-status.svg +[dev-deps-url]: https://david-dm.org/inspect-js/node-supports-preserve-symlinks-flag#info=devDependencies +[npm-badge-png]: https://nodei.co/npm/node-supports-preserve-symlinks-flag.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/node-supports-preserve-symlinks-flag.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/node-supports-preserve-symlinks-flag.svg +[downloads-url]: https://npm-stat.com/charts.html?package=node-supports-preserve-symlinks-flag +[codecov-image]: https://codecov.io/gh/inspect-js/node-supports-preserve-symlinks-flag/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/inspect-js/node-supports-preserve-symlinks-flag/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/node-supports-preserve-symlinks-flag +[actions-url]: https://github.com/inspect-js/node-supports-preserve-symlinks-flag/actions diff --git a/node_modules/supports-preserve-symlinks-flag/browser.js b/node_modules/supports-preserve-symlinks-flag/browser.js new file mode 100644 index 0000000..087be1f --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/browser.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = null; diff --git a/node_modules/supports-preserve-symlinks-flag/index.js b/node_modules/supports-preserve-symlinks-flag/index.js new file mode 100644 index 0000000..86fd5d3 --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/index.js @@ -0,0 +1,9 @@ +'use strict'; + +module.exports = ( +// node 12+ + process.allowedNodeEnvironmentFlags && process.allowedNodeEnvironmentFlags.has('--preserve-symlinks') +) || ( +// node v6.2 - v11 + String(module.constructor._findPath).indexOf('preserveSymlinks') >= 0 // eslint-disable-line no-underscore-dangle +); diff --git a/node_modules/supports-preserve-symlinks-flag/package.json b/node_modules/supports-preserve-symlinks-flag/package.json new file mode 100644 index 0000000..56edadc --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/package.json @@ -0,0 +1,70 @@ +{ + "name": "supports-preserve-symlinks-flag", + "version": "1.0.0", + "description": "Determine if the current node version supports the `--preserve-symlinks` flag.", + "main": "./index.js", + "browser": "./browser.js", + "exports": { + ".": [ + { + "browser": "./browser.js", + "default": "./index.js" + }, + "./index.js" + ], + "./package.json": "./package.json" + }, + "sideEffects": false, + "scripts": { + "prepublishOnly": "safe-publish-latest", + "prepublish": "not-in-publish || npm run prepublishOnly", + "lint": "eslint --ext=js,mjs .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/inspect-js/node-supports-preserve-symlinks-flag.git" + }, + "keywords": [ + "node", + "flag", + "symlink", + "symlinks", + "preserve-symlinks" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/inspect-js/node-supports-preserve-symlinks-flag/issues" + }, + "homepage": "https://github.com/inspect-js/node-supports-preserve-symlinks-flag#readme", + "devDependencies": { + "@ljharb/eslint-config": "^20.1.0", + "aud": "^1.1.5", + "auto-changelog": "^2.3.0", + "eslint": "^8.6.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "semver": "^6.3.0", + "tape": "^5.4.0" + }, + "engines": { + "node": ">= 0.4" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + } +} diff --git a/node_modules/supports-preserve-symlinks-flag/test/index.js b/node_modules/supports-preserve-symlinks-flag/test/index.js new file mode 100644 index 0000000..9938d67 --- /dev/null +++ b/node_modules/supports-preserve-symlinks-flag/test/index.js @@ -0,0 +1,29 @@ +'use strict'; + +var test = require('tape'); +var semver = require('semver'); + +var supportsPreserveSymlinks = require('../'); +var browser = require('../browser'); + +test('supportsPreserveSymlinks', function (t) { + t.equal(typeof supportsPreserveSymlinks, 'boolean', 'is a boolean'); + + t.equal(browser, null, 'browser file is `null`'); + t.equal( + supportsPreserveSymlinks, + null, + 'in a browser, is null', + { skip: typeof window === 'undefined' } + ); + + var expected = semver.satisfies(process.version, '>= 6.2'); + t.equal( + supportsPreserveSymlinks, + expected, + 'is true in node v6.2+, false otherwise (actual: ' + supportsPreserveSymlinks + ', expected ' + expected + ')', + { skip: typeof window !== 'undefined' } + ); + + t.end(); +}); diff --git a/node_modules/tailwindcss/CHANGELOG.md b/node_modules/tailwindcss/CHANGELOG.md new file mode 100644 index 0000000..fd899dc --- /dev/null +++ b/node_modules/tailwindcss/CHANGELOG.md @@ -0,0 +1,2449 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +- Nothing yet! + +## [3.3.2] - 2023-04-25 + +### Fixed + +- Don’t move unknown pseudo-elements to the end of selectors ([#10943](https://github.com/tailwindlabs/tailwindcss/pull/10943), [#10962](https://github.com/tailwindlabs/tailwindcss/pull/10962)) +- Inherit gradient stop positions when using variants ([#11002](https://github.com/tailwindlabs/tailwindcss/pull/11002)) +- Honor default `to` position of gradient when using implicit transparent colors ([#11002](https://github.com/tailwindlabs/tailwindcss/pull/11002)) +- Ensure `@tailwindcss/oxide` doesn't leak in the stable engine ([#10988](https://github.com/tailwindlabs/tailwindcss/pull/10988)) +- Ensure multiple `theme(spacing[5])` calls with bracket notation in arbitrary properties work ([#11039](https://github.com/tailwindlabs/tailwindcss/pull/11039)) +- Normalize arbitrary modifiers ([#11057](https://github.com/tailwindlabs/tailwindcss/pull/11057)) + +### Changed + +- Drop support for Node.js v12 ([#11089](https://github.com/tailwindlabs/tailwindcss/pull/11089)) + +## [3.3.1] - 2023-03-30 + +### Fixed + +- Fix edge case bug when loading a TypeScript config file with webpack ([#10898](https://github.com/tailwindlabs/tailwindcss/pull/10898)) +- Fix variant, `@apply`, and `important` selectors when using `:is()` or `:has()` with pseudo-elements ([#10903](https://github.com/tailwindlabs/tailwindcss/pull/10903)) +- Fix `safelist` config types ([#10901](https://github.com/tailwindlabs/tailwindcss/pull/10901)) +- Fix build errors caused by `@tailwindcss/line-clamp` warning ([#10915](https://github.com/tailwindlabs/tailwindcss/pull/10915), [#10919](https://github.com/tailwindlabs/tailwindcss/pull/10919)) +- Fix "process is not defined" error ([#10919](https://github.com/tailwindlabs/tailwindcss/pull/10919)) + +## [3.3.0] - 2023-03-27 + +### Added + +- Support ESM and TypeScript config files ([#10785](https://github.com/tailwindlabs/tailwindcss/pull/10785)) +- Extend default color palette with new 950 shades ([#10879](https://github.com/tailwindlabs/tailwindcss/pull/10879)) +- Add `line-height` modifier support to `font-size` utilities ([#9875](https://github.com/tailwindlabs/tailwindcss/pull/9875)) +- Add support for using variables as arbitrary values without `var(...)` ([#9880](https://github.com/tailwindlabs/tailwindcss/pull/9880), [#9962](https://github.com/tailwindlabs/tailwindcss/pull/9962)) +- Add logical properties support for inline direction ([#10166](https://github.com/tailwindlabs/tailwindcss/pull/10166)) +- Add `hyphens` utilities ([#10071](https://github.com/tailwindlabs/tailwindcss/pull/10071)) +- Add `from-{position}`, `via-{position}` and `to-{position}` utilities ([#10886](https://github.com/tailwindlabs/tailwindcss/pull/10886)) +- Add `list-style-image` utilities ([#10817](https://github.com/tailwindlabs/tailwindcss/pull/10817)) +- Add `caption-side` utilities ([#10470](https://github.com/tailwindlabs/tailwindcss/pull/10470)) +- Add `line-clamp` utilities from `@tailwindcss/line-clamp` to core ([#10768](https://github.com/tailwindlabs/tailwindcss/pull/10768), [#10876](https://github.com/tailwindlabs/tailwindcss/pull/10876), [#10862](https://github.com/tailwindlabs/tailwindcss/pull/10862)) +- Add `delay-0` and `duration-0` utilities ([#10294](https://github.com/tailwindlabs/tailwindcss/pull/10294)) +- Add `justify-normal` and `justify-stretch` utilities ([#10560](https://github.com/tailwindlabs/tailwindcss/pull/10560)) +- Add `content-normal` and `content-stretch` utilities ([#10645](https://github.com/tailwindlabs/tailwindcss/pull/10645)) +- Add `whitespace-break-spaces` utility ([#10729](https://github.com/tailwindlabs/tailwindcss/pull/10729)) +- Add support for configuring default `font-variation-settings` for a `font-family` ([#10034](https://github.com/tailwindlabs/tailwindcss/pull/10034), [#10515](https://github.com/tailwindlabs/tailwindcss/pull/10515)) + +### Fixed + +- Disallow using multiple selectors in arbitrary variants ([#10655](https://github.com/tailwindlabs/tailwindcss/pull/10655)) +- Sort class lists deterministically for Prettier plugin ([#10672](https://github.com/tailwindlabs/tailwindcss/pull/10672)) +- Ensure CLI builds have a non-zero exit code on failure ([#10703](https://github.com/tailwindlabs/tailwindcss/pull/10703)) +- Ensure module dependencies for value `null`, is an empty `Set` ([#10877](https://github.com/tailwindlabs/tailwindcss/pull/10877)) +- Fix format assumption when resolving module dependencies ([#10878](https://github.com/tailwindlabs/tailwindcss/pull/10878)) + +### Changed + +- Mark `rtl` and `ltr` variants as stable and remove warnings ([#10764](https://github.com/tailwindlabs/tailwindcss/pull/10764)) +- Use `inset` instead of `top`, `right`, `bottom`, and `left` properties ([#10765](https://github.com/tailwindlabs/tailwindcss/pull/10765)) +- Make `dark` and `rtl`/`ltr` variants insensitive to DOM order ([#10766](https://github.com/tailwindlabs/tailwindcss/pull/10766)) +- Use `:is` to make important selector option insensitive to DOM order ([#10835](https://github.com/tailwindlabs/tailwindcss/pull/10835)) + +## [3.2.7] - 2023-02-16 + +### Fixed + +- Fix use of `:where(.btn)` when matching `!btn` ([#10601](https://github.com/tailwindlabs/tailwindcss/pull/10601)) +- Revert including `outline-color` in `transition` and `transition-colors` by default ([#10604](https://github.com/tailwindlabs/tailwindcss/pull/10604)) + +## [3.2.6] - 2023-02-08 + +### Fixed + +- Fix installation failing with yarn and pnpm by dropping `oxide-api-shim` ([add1636](https://github.com/tailwindlabs/tailwindcss/commit/add16364b4b1100e1af23ad1ca6900a0b53cbba0)) + +## [3.2.5] - 2023-02-08 + +### Added + +- Add standalone CLI build for 64-bit Windows on ARM (`node16-win-arm64`) ([#10001](https://github.com/tailwindlabs/tailwindcss/pull/10001)) + +### Fixed + +- Cleanup unused `variantOrder` ([#9829](https://github.com/tailwindlabs/tailwindcss/pull/9829)) +- Fix `foo-[abc]/[def]` not being handled correctly ([#9866](https://github.com/tailwindlabs/tailwindcss/pull/9866)) +- Add container queries plugin to standalone CLI ([#9865](https://github.com/tailwindlabs/tailwindcss/pull/9865)) +- Support renaming of output files by PostCSS plugins in CLI ([#9944](https://github.com/tailwindlabs/tailwindcss/pull/9944)) +- Improve return value of `resolveConfig`, unwrap `ResolvableTo` ([#9972](https://github.com/tailwindlabs/tailwindcss/pull/9972)) +- Clip unbalanced brackets in arbitrary values ([#9973](https://github.com/tailwindlabs/tailwindcss/pull/9973)) +- Don’t reorder webkit scrollbar pseudo elements ([#9991](https://github.com/tailwindlabs/tailwindcss/pull/9991)) +- Deterministic sorting of arbitrary variants ([#10016](https://github.com/tailwindlabs/tailwindcss/pull/10016)) +- Add `data` key to theme types ([#10023](https://github.com/tailwindlabs/tailwindcss/pull/10023)) +- Prevent invalid arbitrary variant selectors from failing the build ([#10059](https://github.com/tailwindlabs/tailwindcss/pull/10059)) +- Properly handle subtraction followed by a variable ([#10074](https://github.com/tailwindlabs/tailwindcss/pull/10074)) +- Fix missing `string[]` in the `theme.dropShadow` types ([#10072](https://github.com/tailwindlabs/tailwindcss/pull/10072)) +- Update list of length units ([#10100](https://github.com/tailwindlabs/tailwindcss/pull/10100)) +- Fix not matching arbitrary properties when closely followed by square brackets ([#10212](https://github.com/tailwindlabs/tailwindcss/pull/10212)) +- Allow direct nesting in `root` or `@layer` nodes ([#10229](https://github.com/tailwindlabs/tailwindcss/pull/10229)) +- Don't prefix classes in arbitrary variants ([#10214](https://github.com/tailwindlabs/tailwindcss/pull/10214)) +- Fix perf regression when checking for changed content ([#10234](https://github.com/tailwindlabs/tailwindcss/pull/10234)) +- Fix missing `blocklist` member in the `Config` type ([#10239](https://github.com/tailwindlabs/tailwindcss/pull/10239)) +- Escape group names in selectors ([#10276](https://github.com/tailwindlabs/tailwindcss/pull/10276)) +- Consider earlier variants before sorting functions ([#10288](https://github.com/tailwindlabs/tailwindcss/pull/10288)) +- Allow variants with slashes ([#10336](https://github.com/tailwindlabs/tailwindcss/pull/10336)) +- Ensure generated CSS is always sorted in the same order for a given set of templates ([#10382](https://github.com/tailwindlabs/tailwindcss/pull/10382)) +- Handle variants when the same class appears multiple times in a selector ([#10397](https://github.com/tailwindlabs/tailwindcss/pull/10397)) +- Handle group/peer variants with quoted strings ([#10400](https://github.com/tailwindlabs/tailwindcss/pull/10400)) +- Parse alpha value from rgba/hsla colors when using variables ([#10429](https://github.com/tailwindlabs/tailwindcss/pull/10429)) +- Sort by `layer` inside `variants` layer ([#10505](https://github.com/tailwindlabs/tailwindcss/pull/10505)) +- Add `--watch=always` option to prevent exit when stdin closes ([#9966](https://github.com/tailwindlabs/tailwindcss/pull/9966)) + +### Changed + +- Alphabetize `theme` keys in default config ([#9953](https://github.com/tailwindlabs/tailwindcss/pull/9953)) +- Update esbuild to v17 ([#10368](https://github.com/tailwindlabs/tailwindcss/pull/10368)) +- Include `outline-color` in `transition` and `transition-colors` utilities ([#10385](https://github.com/tailwindlabs/tailwindcss/pull/10385)) + +## [3.2.4] - 2022-11-11 + +### Added + +- Add `blocklist` option to prevent generating unwanted CSS ([#9812](https://github.com/tailwindlabs/tailwindcss/pull/9812)) + +### Fixed + +- Fix watching of files on Linux when renames are involved ([#9796](https://github.com/tailwindlabs/tailwindcss/pull/9796)) +- Make sure errors are always displayed when watching for changes ([#9810](https://github.com/tailwindlabs/tailwindcss/pull/9810)) + +## [3.2.3] - 2022-11-09 + +### Fixed + +- Fixed use of `raw` content in the CLI ([#9773](https://github.com/tailwindlabs/tailwindcss/pull/9773)) +- Pick up changes from files that are both context and content deps ([#9787](https://github.com/tailwindlabs/tailwindcss/pull/9787)) +- Sort pseudo-elements ONLY after classes when using variants and `@apply` ([#9765](https://github.com/tailwindlabs/tailwindcss/pull/9765)) +- Support important utilities in the safelist (pattern must include a `!`) ([#9791](https://github.com/tailwindlabs/tailwindcss/pull/9791)) + +## [3.2.2] - 2022-11-04 + +### Fixed + +- Escape special characters in resolved content base paths ([#9650](https://github.com/tailwindlabs/tailwindcss/pull/9650)) +- Don't reuse container for array returning variant functions ([#9644](https://github.com/tailwindlabs/tailwindcss/pull/9644)) +- Exclude non-relevant selectors when generating rules with the important modifier ([#9677](https://github.com/tailwindlabs/tailwindcss/issues/9677)) +- Fix merging of arrays during config resolution ([#9706](https://github.com/tailwindlabs/tailwindcss/issues/9706)) +- Ensure configured `font-feature-settings` are included in Preflight ([#9707](https://github.com/tailwindlabs/tailwindcss/pull/9707)) +- Fix fractional values not being parsed properly inside arbitrary properties ([#9705](https://github.com/tailwindlabs/tailwindcss/pull/9705)) +- Fix incorrect selectors when using `@apply` in selectors with combinators and pseudos ([#9722](https://github.com/tailwindlabs/tailwindcss/pull/9722)) +- Fix cannot read properties of undefined (reading 'modifier') ([#9656](https://github.com/tailwindlabs/tailwindcss/pull/9656), [aa979d6](https://github.com/tailwindlabs/tailwindcss/commit/aa979d645f8bf4108c5fc938d7c0ba085b654c31)) + +## [3.2.1] - 2022-10-21 + +### Fixed + +- Fix missing `supports` in types ([#9616](https://github.com/tailwindlabs/tailwindcss/pull/9616)) +- Fix missing PostCSS dependencies in the CLI ([#9617](https://github.com/tailwindlabs/tailwindcss/pull/9617)) +- Ensure `micromatch` is a proper CLI dependency ([#9620](https://github.com/tailwindlabs/tailwindcss/pull/9620)) +- Ensure modifier values exist when using a `modifiers` object for `matchVariant` ([ba6551d](https://github.com/tailwindlabs/tailwindcss/commit/ba6551db0f2726461371b4f3c6cd4c7090888504)) + +## [3.2.0] - 2022-10-19 + +### Added + +- Add new `@config` directive ([#9405](https://github.com/tailwindlabs/tailwindcss/pull/9405)) +- Add new `relative: true` option to resolve content paths relative to the config file ([#9396](https://github.com/tailwindlabs/tailwindcss/pull/9396)) +- Add new `supports-*` variant ([#9453](https://github.com/tailwindlabs/tailwindcss/pull/9453)) +- Add new `min-*` and `max-*` variants ([#9558](https://github.com/tailwindlabs/tailwindcss/pull/9558)) +- Add new `aria-*` variants ([#9557](https://github.com/tailwindlabs/tailwindcss/pull/9557), [#9588](https://github.com/tailwindlabs/tailwindcss/pull/9588)) +- Add new `data-*` variants ([#9559](https://github.com/tailwindlabs/tailwindcss/pull/9559), [#9588](https://github.com/tailwindlabs/tailwindcss/pull/9588)) +- Add new `break-keep` utility for `word-break: keep-all` ([#9393](https://github.com/tailwindlabs/tailwindcss/pull/9393)) +- Add new `collapse` utility for `visibility: collapse` ([#9181](https://github.com/tailwindlabs/tailwindcss/pull/9181)) +- Add new `fill-none` utility for `fill: none` ([#9403](https://github.com/tailwindlabs/tailwindcss/pull/9403)) +- Add new `stroke-none` utility for `stroke: none` ([#9403](https://github.com/tailwindlabs/tailwindcss/pull/9403)) +- Add new `place-content-baseline` utility for `place-content: baseline` ([#9498](https://github.com/tailwindlabs/tailwindcss/pull/9498)) +- Add new `place-items-baseline` utility for `place-items: baseline` ([#9507](https://github.com/tailwindlabs/tailwindcss/pull/9507)) +- Add new `content-baseline` utility for `align-content: baseline` ([#9507](https://github.com/tailwindlabs/tailwindcss/pull/9507)) +- Add support for configuring default `font-feature-settings` for a font family ([#9039](https://github.com/tailwindlabs/tailwindcss/pull/9039)) +- Add standalone CLI build for 32-bit Linux on ARM (`node16-linux-armv7`) ([#9084](https://github.com/tailwindlabs/tailwindcss/pull/9084)) +- Add future flag to disable color opacity utility plugins ([#9088](https://github.com/tailwindlabs/tailwindcss/pull/9088)) +- Add negative value support for `outline-offset` ([#9136](https://github.com/tailwindlabs/tailwindcss/pull/9136)) +- Add support for modifiers to `matchUtilities` ([#9541](https://github.com/tailwindlabs/tailwindcss/pull/9541)) +- Allow negating utilities using `min`/`max`/`clamp` ([#9237](https://github.com/tailwindlabs/tailwindcss/pull/9237)) +- Implement fallback plugins when there is ambiguity between plugins when using arbitrary values ([#9376](https://github.com/tailwindlabs/tailwindcss/pull/9376)) +- Support `sort` function in `matchVariant` ([#9423](https://github.com/tailwindlabs/tailwindcss/pull/9423)) +- Upgrade to `postcss-nested` v6.0 ([#9546](https://github.com/tailwindlabs/tailwindcss/pull/9546)) + +### Fixed + +- Use absolute paths when resolving changed files for resilience against working directory changes ([#9032](https://github.com/tailwindlabs/tailwindcss/pull/9032)) +- Fix ring color utility generation when using `respectDefaultRingColorOpacity` ([#9070](https://github.com/tailwindlabs/tailwindcss/pull/9070)) +- Sort tags before classes when `@apply`-ing a selector with joined classes ([#9107](https://github.com/tailwindlabs/tailwindcss/pull/9107)) +- Remove invalid `outline-hidden` utility ([#9147](https://github.com/tailwindlabs/tailwindcss/pull/9147)) +- Honor the `hidden` attribute on elements in preflight ([#9174](https://github.com/tailwindlabs/tailwindcss/pull/9174)) +- Don't stop watching atomically renamed files ([#9173](https://github.com/tailwindlabs/tailwindcss/pull/9173), [#9215](https://github.com/tailwindlabs/tailwindcss/pull/9215)) +- Fix duplicate utilities issue causing memory leaks ([#9208](https://github.com/tailwindlabs/tailwindcss/pull/9208)) +- Fix `fontFamily` config TypeScript types ([#9214](https://github.com/tailwindlabs/tailwindcss/pull/9214)) +- Handle variants on complex selector utilities ([#9262](https://github.com/tailwindlabs/tailwindcss/pull/9262)) +- Fix shared config mutation issue ([#9294](https://github.com/tailwindlabs/tailwindcss/pull/9294)) +- Fix ordering of parallel variants ([#9282](https://github.com/tailwindlabs/tailwindcss/pull/9282)) +- Handle variants in utility selectors using `:where()` and `:has()` ([#9309](https://github.com/tailwindlabs/tailwindcss/pull/9309)) +- Improve data type analysis for arbitrary values ([#9320](https://github.com/tailwindlabs/tailwindcss/pull/9320)) +- Don't emit generated utilities with invalid uses of theme functions ([#9319](https://github.com/tailwindlabs/tailwindcss/pull/9319)) +- Revert change that only listened for stdin close on TTYs ([#9331](https://github.com/tailwindlabs/tailwindcss/pull/9331)) +- Ignore unset values (like `null` or `undefined`) when resolving the classList for intellisense ([#9385](https://github.com/tailwindlabs/tailwindcss/pull/9385)) +- Improve type checking for formal syntax ([#9349](https://github.com/tailwindlabs/tailwindcss/pull/9349), [#9448](https://github.com/tailwindlabs/tailwindcss/pull/9448)) +- Fix incorrect required `content` key in custom plugin configs ([#9502](https://github.com/tailwindlabs/tailwindcss/pull/9502), [#9545](https://github.com/tailwindlabs/tailwindcss/pull/9545)) +- Fix content path detection on Windows ([#9569](https://github.com/tailwindlabs/tailwindcss/pull/9569)) +- Ensure `--content` is used in the CLI when passed ([#9587](https://github.com/tailwindlabs/tailwindcss/pull/9587)) + +## [3.1.8] - 2022-08-05 + +### Fixed + +- Don’t prefix classes within reused arbitrary variants ([#8992](https://github.com/tailwindlabs/tailwindcss/pull/8992)) +- Fix usage of alpha values inside single-named colors that are functions ([#9008](https://github.com/tailwindlabs/tailwindcss/pull/9008)) +- Fix `@apply` of user utilities when negative and non-negative versions both exist ([#9027](https://github.com/tailwindlabs/tailwindcss/pull/9027)) + +## [3.1.7] - 2022-07-29 + +### Fixed + +- Don't rewrite source maps for `@layer` rules ([#8971](https://github.com/tailwindlabs/tailwindcss/pull/8971)) + +### Added + +- Added types for `resolveConfig` ([#8924](https://github.com/tailwindlabs/tailwindcss/pull/8924)) + +## [3.1.6] - 2022-07-11 + +### Fixed + +- Fix usage on Node 12.x ([b4e637e](https://github.com/tailwindlabs/tailwindcss/commit/b4e637e2e096a9d6f2210efba9541f6fd4f28e56)) +- Handle theme keys with slashes when using `theme()` in CSS ([#8831](https://github.com/tailwindlabs/tailwindcss/pull/8831)) + +## [3.1.5] - 2022-07-07 + +### Added + +- Support configuring a default `font-weight` for each font size utility ([#8763](https://github.com/tailwindlabs/tailwindcss/pull/8763)) +- Add support for alpha values in safe list ([#8774](https://github.com/tailwindlabs/tailwindcss/pull/8774)) + +### Fixed + +- Improve types to support fallback values in the CSS-in-JS syntax used in plugin APIs ([#8762](https://github.com/tailwindlabs/tailwindcss/pull/8762)) +- Support including `tailwindcss` and `autoprefixer` in `postcss.config.js` in standalone CLI ([#8769](https://github.com/tailwindlabs/tailwindcss/pull/8769)) +- Fix using special-characters as prefixes ([#8772](https://github.com/tailwindlabs/tailwindcss/pull/8772)) +- Don’t prefix classes used within arbitrary variants ([#8773](https://github.com/tailwindlabs/tailwindcss/pull/8773)) +- Add more explicit types for the default theme ([#8780](https://github.com/tailwindlabs/tailwindcss/pull/8780)) + +## [3.1.4] - 2022-06-21 + +### Fixed + +- Provide default to `` when using `theme()` ([#8652](https://github.com/tailwindlabs/tailwindcss/pull/8652)) +- Detect arbitrary variants with quotes ([#8687](https://github.com/tailwindlabs/tailwindcss/pull/8687)) +- Don’t add spaces around raw `/` that are preceded by numbers ([#8688](https://github.com/tailwindlabs/tailwindcss/pull/8688)) + +## [3.1.3] - 2022-06-14 + +### Fixed + +- Fix extraction of multi-word utilities with arbitrary values and quotes ([#8604](https://github.com/tailwindlabs/tailwindcss/pull/8604)) +- Fix casing of import of `corePluginList` type definition ([#8587](https://github.com/tailwindlabs/tailwindcss/pull/8587)) +- Ignore PostCSS nodes returned by `addVariant` ([#8608](https://github.com/tailwindlabs/tailwindcss/pull/8608)) +- Fix missing spaces around arithmetic operators ([#8615](https://github.com/tailwindlabs/tailwindcss/pull/8615)) +- Detect alpha value in CSS `theme()` function when using quotes ([#8625](https://github.com/tailwindlabs/tailwindcss/pull/8625)) +- Fix "Maximum call stack size exceeded" bug ([#8636](https://github.com/tailwindlabs/tailwindcss/pull/8636)) +- Allow functions returning parallel variants to mutate the container ([#8622](https://github.com/tailwindlabs/tailwindcss/pull/8622)) +- Remove text opacity CSS variables from `::marker` ([#8622](https://github.com/tailwindlabs/tailwindcss/pull/8622)) + +## [3.1.2] - 2022-06-10 + +### Fixed + +- Ensure `\` is a valid arbitrary variant token ([#8576](https://github.com/tailwindlabs/tailwindcss/pull/8576)) +- Enable `postcss-import` in the CLI by default in watch mode ([#8574](https://github.com/tailwindlabs/tailwindcss/pull/8574), [#8580](https://github.com/tailwindlabs/tailwindcss/pull/8580)) + +## [3.1.1] - 2022-06-09 + +### Fixed + +- Fix candidate extractor regression ([#8558](https://github.com/tailwindlabs/tailwindcss/pull/8558)) +- Split `::backdrop` into separate defaults group ([#8567](https://github.com/tailwindlabs/tailwindcss/pull/8567)) +- Fix postcss plugin type ([#8564](https://github.com/tailwindlabs/tailwindcss/pull/8564)) +- Fix class detection in markdown code fences and slim templates ([#8569](https://github.com/tailwindlabs/tailwindcss/pull/8569)) + +## [3.1.0] - 2022-06-08 + +### Fixed + +- Types: allow for arbitrary theme values (for 3rd party plugins) ([#7926](https://github.com/tailwindlabs/tailwindcss/pull/7926)) +- Don’t split vars with numbers in them inside arbitrary values ([#8091](https://github.com/tailwindlabs/tailwindcss/pull/8091)) +- Require matching prefix when detecting negatives ([#8121](https://github.com/tailwindlabs/tailwindcss/pull/8121)) +- Handle duplicate At Rules without children ([#8122](https://github.com/tailwindlabs/tailwindcss/pull/8122)) +- Allow arbitrary values with commas in `@apply` ([#8125](https://github.com/tailwindlabs/tailwindcss/pull/8125)) +- Fix intellisense for plugins with multiple `@apply` rules ([#8213](https://github.com/tailwindlabs/tailwindcss/pull/8213)) +- Improve type detection for arbitrary color values ([#8201](https://github.com/tailwindlabs/tailwindcss/pull/8201)) +- Support PostCSS config options in config file in CLI ([#8226](https://github.com/tailwindlabs/tailwindcss/pull/8226)) +- Remove default `[hidden]` style in preflight ([#8248](https://github.com/tailwindlabs/tailwindcss/pull/8248)) +- Only check selectors containing base apply candidates for circular dependencies ([#8222](https://github.com/tailwindlabs/tailwindcss/pull/8222)) +- Rewrite default class extractor ([#8204](https://github.com/tailwindlabs/tailwindcss/pull/8204)) +- Move `important` selector to the front when `@apply`-ing selector-modifying variants in custom utilities ([#8313](https://github.com/tailwindlabs/tailwindcss/pull/8313)) +- Error when registering an invalid custom variant ([#8345](https://github.com/tailwindlabs/tailwindcss/pull/8345)) +- Create tailwind.config.cjs file in ESM package when running init ([#8363](https://github.com/tailwindlabs/tailwindcss/pull/8363)) +- Fix `matchVariant` that use at-rules and placeholders ([#8392](https://github.com/tailwindlabs/tailwindcss/pull/8392)) +- Improve types of the `tailwindcss/plugin` ([#8400](https://github.com/tailwindlabs/tailwindcss/pull/8400)) +- Allow returning parallel variants from `addVariant` or `matchVariant` callback functions ([#8455](https://github.com/tailwindlabs/tailwindcss/pull/8455)) +- Try using local `postcss` installation first in the CLI ([#8270](https://github.com/tailwindlabs/tailwindcss/pull/8270)) +- Allow default ring color to be a function ([#7587](https://github.com/tailwindlabs/tailwindcss/pull/7587)) +- Don't inherit `to` value from parent gradients ([#8489](https://github.com/tailwindlabs/tailwindcss/pull/8489)) +- Remove process dependency from log functions ([#8530](https://github.com/tailwindlabs/tailwindcss/pull/8530)) +- Ensure we can use `@import 'tailwindcss/...'` without node_modules ([#8537](https://github.com/tailwindlabs/tailwindcss/pull/8537)) + +### Changed + +- Only apply hover styles when supported (future) ([#8394](https://github.com/tailwindlabs/tailwindcss/pull/8394)) +- Respect default ring color opacity (future) ([#8448](https://github.com/tailwindlabs/tailwindcss/pull/8448), [3f4005e](https://github.com/tailwindlabs/tailwindcss/commit/3f4005e833445f7549219eb5ae89728cbb3a2630)) + +### Added + +- Support PostCSS `Document` nodes ([#7291](https://github.com/tailwindlabs/tailwindcss/pull/7291)) +- Add `text-start` and `text-end` utilities ([#6656](https://github.com/tailwindlabs/tailwindcss/pull/6656)) +- Support customizing class name when using `darkMode: 'class'` ([#5800](https://github.com/tailwindlabs/tailwindcss/pull/5800)) +- Add `--poll` option to the CLI ([#7725](https://github.com/tailwindlabs/tailwindcss/pull/7725)) +- Add new `border-spacing` utilities ([#7102](https://github.com/tailwindlabs/tailwindcss/pull/7102)) +- Add `enabled` variant ([#7905](https://github.com/tailwindlabs/tailwindcss/pull/7905)) +- Add TypeScript types for the `tailwind.config.js` file ([#7891](https://github.com/tailwindlabs/tailwindcss/pull/7891)) +- Add `backdrop` variant ([#7924](https://github.com/tailwindlabs/tailwindcss/pull/7924), [#8526](https://github.com/tailwindlabs/tailwindcss/pull/8526)) +- Add `grid-flow-dense` utility ([#8193](https://github.com/tailwindlabs/tailwindcss/pull/8193)) +- Add `mix-blend-plus-lighter` utility ([#8288](https://github.com/tailwindlabs/tailwindcss/pull/8288)) +- Add arbitrary variants ([#8299](https://github.com/tailwindlabs/tailwindcss/pull/8299)) +- Add experimental `matchVariant` API ([#8310](https://github.com/tailwindlabs/tailwindcss/pull/8310), [34fd0fb8](https://github.com/tailwindlabs/tailwindcss/commit/34fd0fb82aa574cddc5c7aa3ad7d1af5e3735e5d)) +- Add `prefers-contrast` media query variants ([#8410](https://github.com/tailwindlabs/tailwindcss/pull/8410)) +- Add opacity support when referencing colors with `theme` function ([#8416](https://github.com/tailwindlabs/tailwindcss/pull/8416)) +- Add `postcss-import` support to the CLI ([#8437](https://github.com/tailwindlabs/tailwindcss/pull/8437)) +- Add `optional` variant ([#8486](https://github.com/tailwindlabs/tailwindcss/pull/8486)) +- Add `` placeholder support for custom colors ([#8501](https://github.com/tailwindlabs/tailwindcss/pull/8501)) + +## [3.0.24] - 2022-04-12 + +### Fixed + +- Prevent nesting plugin from breaking other plugins ([#7563](https://github.com/tailwindlabs/tailwindcss/pull/7563)) +- Recursively collapse adjacent rules ([#7565](https://github.com/tailwindlabs/tailwindcss/pull/7565)) +- Preserve source maps for generated CSS ([#7588](https://github.com/tailwindlabs/tailwindcss/pull/7588)) +- Split box shadows on top-level commas only ([#7479](https://github.com/tailwindlabs/tailwindcss/pull/7479)) +- Use local user CSS cache for `@apply` ([#7524](https://github.com/tailwindlabs/tailwindcss/pull/7524)) +- Invalidate context when main CSS changes ([#7626](https://github.com/tailwindlabs/tailwindcss/pull/7626)) +- Only add `!` to selector class matching template candidate when using important modifier with multi-class selectors ([#7664](https://github.com/tailwindlabs/tailwindcss/pull/7664)) +- Correctly parse and prefix animation names with dots ([#7163](https://github.com/tailwindlabs/tailwindcss/pull/7163)) +- Fix extraction from template literal/function with array ([#7481](https://github.com/tailwindlabs/tailwindcss/pull/7481)) +- Don't output unparsable arbitrary values ([#7789](https://github.com/tailwindlabs/tailwindcss/pull/7789)) +- Fix generation of `div:not(.foo)` if `.foo` is never defined ([#7815](https://github.com/tailwindlabs/tailwindcss/pull/7815)) +- Allow for custom properties in `rgb`, `rgba`, `hsl` and `hsla` colors ([#7933](https://github.com/tailwindlabs/tailwindcss/pull/7933)) +- Remove autoprefixer as explicit peer-dependency to avoid invalid warnings in situations where it isn't actually needed ([#7949](https://github.com/tailwindlabs/tailwindcss/pull/7949)) +- Ensure the `percentage` data type is validated correctly ([#8015](https://github.com/tailwindlabs/tailwindcss/pull/8015)) +- Make sure `font-weight` is inherited by form controls in all browsers ([#8078](https://github.com/tailwindlabs/tailwindcss/pull/8078)) + +### Changed + +- Replace `chalk` with `picocolors` ([#6039](https://github.com/tailwindlabs/tailwindcss/pull/6039)) +- Replace `cosmiconfig` with `lilconfig` ([#6039](https://github.com/tailwindlabs/tailwindcss/pull/6038)) +- Update `cssnano` to avoid removing empty variables when minifying ([#7818](https://github.com/tailwindlabs/tailwindcss/pull/7818)) + +## [3.0.23] - 2022-02-16 + +### Fixed + +- Remove opacity variables from `:visited` pseudo class ([#7458](https://github.com/tailwindlabs/tailwindcss/pull/7458)) +- Support arbitrary values + calc + theme with quotes ([#7462](https://github.com/tailwindlabs/tailwindcss/pull/7462)) +- Don't duplicate layer output when scanning content with variants + wildcards ([#7478](https://github.com/tailwindlabs/tailwindcss/pull/7478)) +- Implement `getClassOrder` instead of `sortClassList` ([#7459](https://github.com/tailwindlabs/tailwindcss/pull/7459)) + +## [3.0.22] - 2022-02-11 + +### Fixed + +- Temporarily move `postcss` to dependencies ([#7424](https://github.com/tailwindlabs/tailwindcss/pull/7424)) + +## [3.0.21] - 2022-02-10 + +### Fixed + +- Move prettier plugin to dev dependencies ([#7418](https://github.com/tailwindlabs/tailwindcss/pull/7418)) + +## [3.0.20] - 2022-02-10 + +### Added + +- Expose `context.sortClassList(classes)` ([#7412](https://github.com/tailwindlabs/tailwindcss/pull/7412)) + +## [3.0.19] - 2022-02-07 + +### Fixed + +- Fix preflight border color fallback ([#7288](https://github.com/tailwindlabs/tailwindcss/pull/7288)) +- Correctly parse shadow lengths without a leading zero ([#7289](https://github.com/tailwindlabs/tailwindcss/pull/7289)) +- Don't crash when scanning extremely long class candidates ([#7331](https://github.com/tailwindlabs/tailwindcss/pull/7331)) +- Use less hacky fix for URLs detected as custom properties ([#7275](https://github.com/tailwindlabs/tailwindcss/pull/7275)) +- Correctly generate negative utilities when dash is before the prefix ([#7295](https://github.com/tailwindlabs/tailwindcss/pull/7295)) +- Detect prefixed negative utilities in the safelist ([#7295](https://github.com/tailwindlabs/tailwindcss/pull/7295)) + +## [3.0.18] - 2022-01-28 + +### Fixed + +- Fix `@apply` order regression (in `addComponents`, `addUtilities`, ...) ([#7232](https://github.com/tailwindlabs/tailwindcss/pull/7232)) +- Quick fix for incorrect arbitrary properties when using URLs ([#7252](https://github.com/tailwindlabs/tailwindcss/pull/7252)) + +## [3.0.17] - 2022-01-26 + +### Fixed + +- Remove false positive warning in CLI when using the `--content` option ([#7220](https://github.com/tailwindlabs/tailwindcss/pull/7220)) + +## [3.0.16] - 2022-01-24 + +### Fixed + +- Ensure to transpile the PostCSS Nesting plugin (tailwindcss/nesting) ([#7080](https://github.com/tailwindlabs/tailwindcss/pull/7080)) +- Improve various warnings ([#7118](https://github.com/tailwindlabs/tailwindcss/pull/7118)) +- Fix grammatical mistake ([cca5a38](https://github.com/tailwindlabs/tailwindcss/commit/cca5a3804e1d3ee0214491921e1aec35bf62a813)) + +## [3.0.15] - 2022-01-15 + +### Fixed + +- Temporarily remove optional chaining in nesting plugin ([#7077](https://github.com/tailwindlabs/tailwindcss/pull/7077)) + +## [3.0.14] - 2022-01-14 + +### Added + +- Show warnings for invalid content config ([#7065](https://github.com/tailwindlabs/tailwindcss/pull/7065)) + +### Fixed + +- Only emit utility/component variants when those layers exist ([#7066](https://github.com/tailwindlabs/tailwindcss/pull/7066)) +- Ensure nesting plugins can receive options ([#7016](https://github.com/tailwindlabs/tailwindcss/pull/7016)) + +## [3.0.13] - 2022-01-11 + +### Fixed + +- Fix consecutive builds with at apply producing different CSS ([#6999](https://github.com/tailwindlabs/tailwindcss/pull/6999)) + +## [3.0.12] - 2022-01-07 + +### Fixed + +- Allow use of falsy values in theme config ([#6917](https://github.com/tailwindlabs/tailwindcss/pull/6917)) +- Ensure we can apply classes that are grouped with non-class selectors ([#6922](https://github.com/tailwindlabs/tailwindcss/pull/6922)) +- Improve standalone CLI compatibility on Linux by switching to the `linuxstatic` build target ([#6914](https://github.com/tailwindlabs/tailwindcss/pull/6914)) +- Ensure `@apply` works consistently with or without `@layer` ([#6938](https://github.com/tailwindlabs/tailwindcss/pull/6938)) +- Only emit defaults when using base layer ([#6926](https://github.com/tailwindlabs/tailwindcss/pull/6926)) +- Emit plugin defaults regardless of usage ([#6926](https://github.com/tailwindlabs/tailwindcss/pull/6926)) +- Move default border color back to preflight ([#6926](https://github.com/tailwindlabs/tailwindcss/pull/6926)) +- Change `experimental.optimizeUniversalDefaults` to only work with `@tailwind base` ([#6926](https://github.com/tailwindlabs/tailwindcss/pull/6926)) + +## [3.0.11] - 2022-01-05 + +### Fixed + +- Preserve casing of CSS variables added by plugins ([#6888](https://github.com/tailwindlabs/tailwindcss/pull/6888)) +- Ignore content paths that are passed in but don't actually exist ([#6901](https://github.com/tailwindlabs/tailwindcss/pull/6901)) +- Revert change that applies Tailwind's defaults in isolated environments like CSS modules ([9fdc391](https://github.com/tailwindlabs/tailwindcss/commit/9fdc391d4ff93e7e350f5ce439060176b1f0162f)) + +## [3.0.10] - 2022-01-04 + +### Fixed + +- Fix `@apply` in files without `@tailwind` directives ([#6580](https://github.com/tailwindlabs/tailwindcss/pull/6580), [#6875](https://github.com/tailwindlabs/tailwindcss/pull/6875)) +- CLI: avoid unnecessary writes to output files ([#6550](https://github.com/tailwindlabs/tailwindcss/pull/6550)) + +### Added + +- Allow piping data into the CLI ([#6876](https://github.com/tailwindlabs/tailwindcss/pull/6876)) + +## [3.0.9] - 2022-01-03 + +### Fixed + +- Improve `DEBUG` flag ([#6797](https://github.com/tailwindlabs/tailwindcss/pull/6797), [#6804](https://github.com/tailwindlabs/tailwindcss/pull/6804)) +- Ensure we can use `<` and `>` characters in modifiers ([#6851](https://github.com/tailwindlabs/tailwindcss/pull/6851)) +- Validate `theme()` works in arbitrary values ([#6852](https://github.com/tailwindlabs/tailwindcss/pull/6852)) +- Properly detect `theme()` value usage in arbitrary properties ([#6854](https://github.com/tailwindlabs/tailwindcss/pull/6854)) +- Improve collapsing of duplicate declarations ([#6856](https://github.com/tailwindlabs/tailwindcss/pull/6856)) +- Remove support for `TAILWIND_MODE=watch` ([#6858](https://github.com/tailwindlabs/tailwindcss/pull/6858)) + +## [3.0.8] - 2021-12-28 + +### Fixed + +- Reduce specificity of `abbr` rule in preflight ([#6671](https://github.com/tailwindlabs/tailwindcss/pull/6671)) +- Support HSL with hue units in arbitrary values ([#6726](https://github.com/tailwindlabs/tailwindcss/pull/6726)) +- Add `node16-linux-arm64` target for standalone CLI ([#6693](https://github.com/tailwindlabs/tailwindcss/pull/6693)) + +## [3.0.7] - 2021-12-17 + +### Fixed + +- Don't mutate custom color palette when overriding per-plugin colors ([#6546](https://github.com/tailwindlabs/tailwindcss/pull/6546)) +- Improve circular dependency detection when using `@apply` ([#6588](https://github.com/tailwindlabs/tailwindcss/pull/6588)) +- Only generate variants for non-`user` layers ([#6589](https://github.com/tailwindlabs/tailwindcss/pull/6589)) +- Properly extract classes with arbitrary values in arrays and classes followed by escaped quotes ([#6590](https://github.com/tailwindlabs/tailwindcss/pull/6590)) +- Improve jsx interpolation candidate matching ([#6593](https://github.com/tailwindlabs/tailwindcss/pull/6593)) +- Ensure `@apply` of a rule inside an AtRule works ([#6594](https://github.com/tailwindlabs/tailwindcss/pull/6594)) + +## [3.0.6] - 2021-12-16 + +### Fixed + +- Support square bracket notation in paths ([#6519](https://github.com/tailwindlabs/tailwindcss/pull/6519)) +- Ensure all plugins are executed for a given candidate ([#6540](https://github.com/tailwindlabs/tailwindcss/pull/6540)) + +## [3.0.5] - 2021-12-15 + +### Fixed + +- Revert: add `li` to list-style reset ([9777562d](https://github.com/tailwindlabs/tailwindcss/commit/9777562da37ee631bbf77374c0d14825f09ef9af)) + +## [3.0.4] - 2021-12-15 + +### Fixed + +- Insert always-on defaults layer in correct spot ([#6526](https://github.com/tailwindlabs/tailwindcss/pull/6526)) + +## [3.0.3] - 2021-12-15 + +### Added + +- Warn about invalid globs in `content` ([#6449](https://github.com/tailwindlabs/tailwindcss/pull/6449)) +- Add standalone tailwindcss CLI ([#6506](https://github.com/tailwindlabs/tailwindcss/pull/6506)) +- Add `li` to list-style reset ([00f60e6](https://github.com/tailwindlabs/tailwindcss/commit/00f60e61013c6e4e3419e4b699371a13eb30b75d)) + +### Fixed + +- Don't output unparsable values ([#6469](https://github.com/tailwindlabs/tailwindcss/pull/6469)) +- Fix text decoration utilities from overriding the new text decoration color/style/thickness utilities when used with a modifier ([#6378](https://github.com/tailwindlabs/tailwindcss/pull/6378)) +- Move defaults to their own always-on layer ([#6500](https://github.com/tailwindlabs/tailwindcss/pull/6500)) +- Support negative values in safelist patterns ([#6480](https://github.com/tailwindlabs/tailwindcss/pull/6480)) + +## [3.0.2] - 2021-12-13 + +### Fixed + +- Temporarily disable optimize universal defaults, fixes issue with transforms/filters/rings not being `@apply`-able in CSS modules/Svelte components/Vue components ([#6461](https://github.com/tailwindlabs/tailwindcss/pull/6461)) + +## [3.0.1] - 2021-12-10 + +### Fixed + +- Ensure complex variants with multiple classes work ([#6311](https://github.com/tailwindlabs/tailwindcss/pull/6311)) +- Re-add `default` interop to public available functions ([#6348](https://github.com/tailwindlabs/tailwindcss/pull/6348)) +- Detect circular dependencies when using `@apply` ([#6365](https://github.com/tailwindlabs/tailwindcss/pull/6365)) +- Fix defaults optimization when vendor prefixes are involved ([#6369](https://github.com/tailwindlabs/tailwindcss/pull/6369)) + +## [3.0.0] - 2021-12-09 + +### Fixed + +- Enforce the order of some variants (like `before` and `after`) ([#6018](https://github.com/tailwindlabs/tailwindcss/pull/6018)) + +### Added + +- Add `placeholder` variant ([#6106](https://github.com/tailwindlabs/tailwindcss/pull/6106)) +- Add composable `touch-action` utilities ([#6115](https://github.com/tailwindlabs/tailwindcss/pull/6115)) +- Add support for "arbitrary properties" ([#6161](https://github.com/tailwindlabs/tailwindcss/pull/6161)) +- Add `portrait` and `landscape` variants ([#6046](https://github.com/tailwindlabs/tailwindcss/pull/6046)) +- Add `text-decoration-style`, `text-decoration-thickness`, and `text-underline-offset` utilities ([#6004](https://github.com/tailwindlabs/tailwindcss/pull/6004)) +- Add `menu` reset to preflight ([#6213](https://github.com/tailwindlabs/tailwindcss/pull/6213)) +- Allow `0` as a valid `length` value ([#6233](https://github.com/tailwindlabs/tailwindcss/pull/6233), [#6259](https://github.com/tailwindlabs/tailwindcss/pull/6259)) +- Add CSS functions to data types ([#6258](https://github.com/tailwindlabs/tailwindcss/pull/6258)) +- Support negative values for `scale-*` utilities ([c48e629](https://github.com/tailwindlabs/tailwindcss/commit/c48e629955585ad18dadba9f470fda59cc448ab7)) +- Improve `length` data type, by validating each value individually ([#6283](https://github.com/tailwindlabs/tailwindcss/pull/6283)) + +### Changed + +- Deprecate `decoration-slice` and `decoration-break` in favor `box-decoration-slice` and `box-decoration-break` _(non-breaking)_ ([#6004](https://github.com/tailwindlabs/tailwindcss/pull/6004)) + +## [3.0.0-alpha.2] - 2021-11-08 + +### Changed + +- Don't use pointer cursor on disabled buttons by default ([#5772](https://github.com/tailwindlabs/tailwindcss/pull/5772)) +- Set default content value in preflight instead of within each before/after utility ([#5820](https://github.com/tailwindlabs/tailwindcss/pull/5820)) +- Remove `prefix` as a function ([#5829](https://github.com/tailwindlabs/tailwindcss/pull/5829)) + +### Added + +- Add `flex-basis` utilities ([#5671](https://github.com/tailwindlabs/tailwindcss/pull/5671)) +- Make negative values a first-class feature ([#5709](https://github.com/tailwindlabs/tailwindcss/pull/5709)) +- Add `fit-content` values for `min/max-width/height` utilities ([#5638](https://github.com/tailwindlabs/tailwindcss/pull/5638)) +- Add `min/max-content` values for `min/max-height` utilities ([#5729](https://github.com/tailwindlabs/tailwindcss/pull/5729)) +- Add all standard `cursor-*` values by default ([#5734](https://github.com/tailwindlabs/tailwindcss/pull/5734)) +- Add `grow-*` and `shrink-*` utilities, deprecate `flex-grow-*` and `flex-shrink-*` ([#5733](https://github.com/tailwindlabs/tailwindcss/pull/5733)) +- Add `text-decoration-color` utilities ([#5760](https://github.com/tailwindlabs/tailwindcss/pull/5760)) +- Add new declarative `addVariant` API ([#5809](https://github.com/tailwindlabs/tailwindcss/pull/5809)) +- Add first-class `print` variant for targeting printed media ([#5885](https://github.com/tailwindlabs/tailwindcss/pull/5885)) +- Add `outline-style`, `outline-color`, `outline-width` and `outline-offset` utilities ([#5887](https://github.com/tailwindlabs/tailwindcss/pull/5887)) +- Add full color palette for `fill-*` and `stroke-*` utilities (#5933[](https://github.com/tailwindlabs/tailwindcss/pull/5933)) +- Add composable API for colored box shadows ([#5979](https://github.com/tailwindlabs/tailwindcss/pull/5979)) + +### Fixed + +- Configure chokidar's `awaitWriteFinish` setting to avoid occasional stale builds on Windows ([#5774](https://github.com/tailwindlabs/tailwindcss/pull/5774)) +- Fix CLI `--content` option ([#5775](https://github.com/tailwindlabs/tailwindcss/pull/5775)) +- Fix before/after utilities overriding custom content values at larger breakpoints ([#5820](https://github.com/tailwindlabs/tailwindcss/pull/5820)) +- Cleanup duplicate properties ([#5830](https://github.com/tailwindlabs/tailwindcss/pull/5830)) +- Allow `_` inside `url()` when using arbitrary values ([#5853](https://github.com/tailwindlabs/tailwindcss/pull/5853)) +- Prevent crashes when using comments in `@layer` AtRules ([#5854](https://github.com/tailwindlabs/tailwindcss/pull/5854)) +- Handle color transformations properly with `theme(...)` for all relevant plugins ([#4533](https://github.com/tailwindlabs/tailwindcss/pull/4533), [#5871](https://github.com/tailwindlabs/tailwindcss/pull/5871)) +- Ensure `@apply`-ing a utility with multiple definitions works ([#5870](https://github.com/tailwindlabs/tailwindcss/pull/5870)) + +## [3.0.0-alpha.1] - 2021-10-01 + +### Changed + +- Remove AOT engine, make JIT the default ([#5340](https://github.com/tailwindlabs/tailwindcss/pull/5340)) +- Throw when trying to `@apply` the `group` class ([#4666](https://github.com/tailwindlabs/tailwindcss/pull/4666)) +- Remove dependency on `modern-normalize`, inline and consolidate with Preflight ([#5358](https://github.com/tailwindlabs/tailwindcss/pull/5358)) +- Enable extended color palette by default with updated color names ([#5384](https://github.com/tailwindlabs/tailwindcss/pull/5384)) +- Move `vertical-align` values to config file instead of hard-coding ([#5487](https://github.com/tailwindlabs/tailwindcss/pull/5487)) +- Rename `overflow-clip` to `text-clip` and `overflow-ellipsis` to `text-ellipsis` ([#5630](https://github.com/tailwindlabs/tailwindcss/pull/5630)) + +### Added + +- Add native `aspect-ratio` utilities ([#5359](https://github.com/tailwindlabs/tailwindcss/pull/5359)) +- Unify config callback helpers into single object ([#5382](https://github.com/tailwindlabs/tailwindcss/pull/5382)) +- Preserve original color format when adding opacity whenever possible ([#5154](https://github.com/tailwindlabs/tailwindcss/pull/5154)) +- Add `accent-color` utilities ([#5387](https://github.com/tailwindlabs/tailwindcss/pull/5387)) +- Add `scroll-behavior` utilities ([#5388](https://github.com/tailwindlabs/tailwindcss/pull/5388)) +- Add `will-change` utilities ([#5448](https://github.com/tailwindlabs/tailwindcss/pull/5448)) +- Add `text-indent` utilities ([#5449](https://github.com/tailwindlabs/tailwindcss/pull/5449)) +- Add `column` utilities ([#5457](https://github.com/tailwindlabs/tailwindcss/pull/5457)) +- Add `border-hidden` utility ([#5485](https://github.com/tailwindlabs/tailwindcss/pull/5485)) +- Add `align-sub` and `align-super` utilities by default ([#5486](https://github.com/tailwindlabs/tailwindcss/pull/5486)) +- Add `break-before`, `break-inside` and `break-after` utilities ([#5530](https://github.com/tailwindlabs/tailwindcss/pull/5530)) +- Add `file` variant for `::file-selector-button` pseudo element ([#4936](https://github.com/tailwindlabs/tailwindcss/pull/4936)) +- Add comprehensive arbitrary value support ([#5568](https://github.com/tailwindlabs/tailwindcss/pull/5568)) +- Add `touch-action` utilities ([#5603](https://github.com/tailwindlabs/tailwindcss/pull/5603)) +- Add `inherit` to default color palette ([#5597](https://github.com/tailwindlabs/tailwindcss/pull/5597)) +- Add `overflow-clip`, `overflow-x-clip` and `overflow-y-clip` utilities ([#5630](https://github.com/tailwindlabs/tailwindcss/pull/5630)) +- Add `[open]` variant ([#5627](https://github.com/tailwindlabs/tailwindcss/pull/5627)) +- Add `scroll-snap` utilities ([#5637](https://github.com/tailwindlabs/tailwindcss/pull/5637)) +- Add `border-x` and `border-y` width and color utilities ([#5639](https://github.com/tailwindlabs/tailwindcss/pull/5639)) + +### Fixed + +- Fix defining colors as functions when color opacity plugins are disabled ([#5470](https://github.com/tailwindlabs/tailwindcss/pull/5470)) +- Fix using negated `content` globs ([#5625](https://github.com/tailwindlabs/tailwindcss/pull/5625)) +- Fix using backslashes in `content` globs ([#5628](https://github.com/tailwindlabs/tailwindcss/pull/5628)) + +## [2.2.19] - 2021-10-29 + +### Fixed + +- Ensure `corePlugins` order is consistent in AOT mode ([#5928](https://github.com/tailwindlabs/tailwindcss/pull/5928)) + +## [2.2.18] - 2021-10-29 + +### Fixed + +- Bump versions for security vulnerabilities ([#5924](https://github.com/tailwindlabs/tailwindcss/pull/5924)) + +## [2.2.17] - 2021-10-13 + +### Fixed + +- Configure chokidar's `awaitWriteFinish` setting to avoid occasional stale builds on Windows ([#5758](https://github.com/tailwindlabs/tailwindcss/pull/5758)) + +## [2.2.16] - 2021-09-26 + +### Fixed + +- JIT: Properly handle animations that use CSS custom properties ([#5602](https://github.com/tailwindlabs/tailwindcss/pull/5602)) + +## [2.2.15] - 2021-09-10 + +### Fixed + +- Ensure using CLI without `-i` for input file continues to work even though deprecated ([#5464](https://github.com/tailwindlabs/tailwindcss/pull/5464)) + +## [2.2.14] - 2021-09-08 + +### Fixed + +- Only use `@defaults` in JIT, switch back to `clean-css` in case there's any meaningful differences in the output ([bf248cb](https://github.com/tailwindlabs/tailwindcss/commit/bf248cb0de889d48854fbdd26536f4a492556efd)) + +## [2.2.13] - 2021-09-08 + +### Fixed + +- Replace `clean-css` with `cssnano` for CDN builds to fix minified builds ([75cc3ca](https://github.com/tailwindlabs/tailwindcss/commit/75cc3ca305aedddc8a85f3df1a420fefad3fb5c4)) + +## [2.2.12] - 2021-09-08 + +### Fixed + +- Ensure that divide utilities inject a default border color ([#5438](https://github.com/tailwindlabs/tailwindcss/pull/5438)) + +## [2.2.11] - 2021-09-07 + +### Fixed + +- Rebundle to fix missing CLI peer dependencies + +## [2.2.10] - 2021-09-06 + +### Fixed + +- Fix build error when using `presets: []` in config file ([#4903](https://github.com/tailwindlabs/tailwindcss/pull/4903)) + +### Added + +- Reintroduce universal selector optimizations under experimental `optimizeUniversalDefaults` flag ([a9e160c](https://github.com/tailwindlabs/tailwindcss/commit/a9e160cf9acb75a2bbac34f8864568b12940f89a)) + +## [2.2.9] - 2021-08-30 + +### Fixed + +- JIT: Fix `@apply`ing utilities that contain variants + the important modifier ([#4854](https://github.com/tailwindlabs/tailwindcss/pull/4854)) +- JIT: Don't strip "null" when parsing tracked file paths ([#5008](https://github.com/tailwindlabs/tailwindcss/pull/5008)) +- Pin `clean-css` to v5.1.4 to fix empty CSS variables in CDN builds ([#5338](https://github.com/tailwindlabs/tailwindcss/pull/5338)) + +## [2.2.8] - 2021-08-27 + +### Fixed + +- Improve accessibility of default link focus styles in Firefox ([#5082](https://github.com/tailwindlabs/tailwindcss/pull/5082)) +- JIT: Fix animation variants corrupting keyframes rules ([#5223](https://github.com/tailwindlabs/tailwindcss/pull/5223)) +- JIT: Ignore escaped commas when splitting selectors to apply prefixes ([#5239](https://github.com/tailwindlabs/tailwindcss/pull/5239/)) +- Nesting: Maintain PostCSS node sources when handling `@apply` ([#5249](https://github.com/tailwindlabs/tailwindcss/pull/5249)) +- JIT: Fix support for animation lists ([#5252](https://github.com/tailwindlabs/tailwindcss/pull/5252)) +- JIT: Fix arbitrary value support for `object-position` utilities ([#5245](https://github.com/tailwindlabs/tailwindcss/pull/5245)) +- CLI: Abort watcher if stdin is closed to avoid zombie processes ([#4997](https://github.com/tailwindlabs/tailwindcss/pull/4997)) +- JIT: Ignore arbitrary values with unbalanced brackets ([#5293](https://github.com/tailwindlabs/tailwindcss/pull/5293)) + +## [2.2.7] - 2021-07-23 + +### Fixed + +- Temporarily revert runtime performance optimizations introduced in v2.2.5, use universal selector again ([#5060](https://github.com/tailwindlabs/tailwindcss/pull/5060)) + +## [2.2.6] - 2021-07-21 + +### Fixed + +- Fix issue where base styles not generated for translate transforms in JIT ([#5038](https://github.com/tailwindlabs/tailwindcss/pull/5038)) + +## [2.2.5] - 2021-07-21 + +### Added + +- Added `self-baseline` utility (I know this is a patch release, no one's going to die relax) ([#5000](https://github.com/tailwindlabs/tailwindcss/pull/5000)) + +### Changed + +- JIT: Optimize universal selector usage by inlining only the relevant selectors ([#4850](https://github.com/tailwindlabs/tailwindcss/pull/4850))) + + This provides a very significant performance boost on pages with a huge number of DOM nodes, but there's a chance it could be a breaking change in very rare edge cases we haven't thought of. Please open an issue if anything related to shadows, rings, transforms, filters, or backdrop-filters seems to be behaving differently after upgrading. + +### Fixed + +- Fix support for `step-start` and `step-end` in animation utilities ([#4795](https://github.com/tailwindlabs/tailwindcss/pull/4795))) +- JIT: Prevent presence of `!*` in templates from ruining everything ([#4816](https://github.com/tailwindlabs/tailwindcss/pull/4816))) +- JIT: Improve support for quotes in arbitrary values ([#4817](https://github.com/tailwindlabs/tailwindcss/pull/4817))) +- Fix filter/backdrop-filter/transform utilities being inserted into the wrong position if not all core plugins are enabled ([#4852](https://github.com/tailwindlabs/tailwindcss/pull/4852))) +- JIT: Fix `@layer` rules being mistakenly inserted during incremental rebuilds ([#4853](https://github.com/tailwindlabs/tailwindcss/pull/4853))) +- Improve build performance for projects with many small non-Tailwind stylesheets ([#4644](https://github.com/tailwindlabs/tailwindcss/pull/4644)) +- Ensure `[hidden]` works as expected on elements where we override the default `display` value in Preflight ([#4873](https://github.com/tailwindlabs/tailwindcss/pull/4873)) +- Fix variant configuration not being applied to `backdropOpacity` utilities ([#4892](https://github.com/tailwindlabs/tailwindcss/pull/4892)) + +## [2.2.4] - 2021-06-23 + +### Fixed + +- Remove `postinstall` script that was preventing people from installing the library ([1eacfb9](https://github.com/tailwindlabs/tailwindcss/commit/1eacfb98849c0d4737e0af3595ddec8c73addaac)) + +## [2.2.3] - 2021-06-23 + +### Added + +- Pass extended color palette to theme closures so it can be used without installing Tailwind when using `npx tailwindcss` ([359252c](https://github.com/tailwindlabs/tailwindcss/commit/359252c9b429e81217c28eb3ca7bab73d8f81e6d)) + +### Fixed + +- JIT: Explicitly error when `-` is used as a custom separator ([#4704](https://github.com/tailwindlabs/tailwindcss/pull/4704)) +- JIT: Don't add multiple `~` when stacking `peer-*` variants ([#4757](https://github.com/tailwindlabs/tailwindcss/pull/4757)) +- Remove outdated focus style fix in Preflight ([#4780](https://github.com/tailwindlabs/tailwindcss/pull/4780)) +- Enable `purge` if provided on the CLI ([#4772](https://github.com/tailwindlabs/tailwindcss/pull/4772)) +- JIT: Fix error when not using a config file with postcss-cli ([#4773](https://github.com/tailwindlabs/tailwindcss/pull/4773)) +- Fix issue with `resolveConfig` not being importable in Next.js pages ([#4725](https://github.com/tailwindlabs/tailwindcss/pull/4725)) + +## [2.2.2] - 2021-06-18 + +### Fixed + +- JIT: Reintroduce `transform`, `filter`, and `backdrop-filter` classes purely to create stacking contexts to minimize the impact of the breaking change ([#4700](https://github.com/tailwindlabs/tailwindcss/pull/4700)) + +## [2.2.1] - 2021-06-18 + +### Fixed + +- Recover from errors gracefully in CLI watch mode ([#4693](https://github.com/tailwindlabs/tailwindcss/pull/4693)) +- Fix issue with media queries not being generated properly when using PostCSS 7 ([#4695](https://github.com/tailwindlabs/tailwindcss/pull/4695)) + +## [2.2.0] - 2021-06-17 + +### Changed + +- JIT: Use "tracking" context by default instead of "watching" context for improved reliability with most bundlers ([#4514](https://github.com/tailwindlabs/tailwindcss/pull/4514)) + + Depending on which tooling you use, you may need to explicitly set `TAILWIND_MODE=watch` until your build runner has been updated to support PostCSS's `dir-dependency` message type. + +### Added + +- Add `background-origin` utilities ([#4117](https://github.com/tailwindlabs/tailwindcss/pull/4117)) +- Improve `@apply` performance in projects that process many CSS sources ([#3178](https://github.com/tailwindlabs/tailwindcss/pull/3718)) +- JIT: Don't use CSS variables for color utilities if color opacity utilities are disabled ([#3984](https://github.com/tailwindlabs/tailwindcss/pull/3984)) +- JIT: Redesign `matchUtilities` API to make it more suitable for third-party use ([#4232](https://github.com/tailwindlabs/tailwindcss/pull/4232)) +- JIT: Support applying important utility variants ([#4260](https://github.com/tailwindlabs/tailwindcss/pull/4260)) +- JIT: Support coercing arbitrary values when the type isn't detectable ([#4263](https://github.com/tailwindlabs/tailwindcss/pull/4263)) +- JIT: Support for `raw` syntax in `purge` config ([#4272](https://github.com/tailwindlabs/tailwindcss/pull/4272)) +- Add `empty` variant ([#3298](https://github.com/tailwindlabs/tailwindcss/pull/3298)) +- Update `modern-normalize` to v1.1 ([#4287](https://github.com/tailwindlabs/tailwindcss/pull/4287)) +- Implement `theme` function internally, remove `postcss-functions` dependency ([#4317](https://github.com/tailwindlabs/tailwindcss/pull/4317)) +- Add `screen` function to improve nesting plugin compatibility ([#4318](https://github.com/tailwindlabs/tailwindcss/pull/4318)) +- JIT: Add universal shorthand color opacity syntax ([#4348](https://github.com/tailwindlabs/tailwindcss/pull/4348)) +- JIT: Add `@tailwind variants` directive to replace `@tailwind screens` ([#4356](https://github.com/tailwindlabs/tailwindcss/pull/4356)) +- JIT: Add support for PostCSS `dir-dependency` messages in `TAILWIND_DISABLE_TOUCH` mode ([#4388](https://github.com/tailwindlabs/tailwindcss/pull/4388)) +- JIT: Add per-side border color utilities ([#4404](https://github.com/tailwindlabs/tailwindcss/pull/4404)) +- JIT: Add support for `before` and `after` pseudo-element variants and `content` utilities ([#4461](https://github.com/tailwindlabs/tailwindcss/pull/4461)) +- Add new `transform` and `extract` APIs to simplify PurgeCSS/JIT customization ([#4469](https://github.com/tailwindlabs/tailwindcss/pull/4469)) +- JIT: Add exhaustive pseudo-class and pseudo-element variant support ([#4482](https://github.com/tailwindlabs/tailwindcss/pull/4482)) +- JIT: Add `caret-color` utilities ([#4499](https://github.com/tailwindlabs/tailwindcss/pull/4499)) +- Rename `lightBlue` to `sky`, emit console warning when using deprecated name ([#4513](https://github.com/tailwindlabs/tailwindcss/pull/4513)) +- New CLI with improved JIT support, `--watch` mode, and more ([#4526](https://github.com/tailwindlabs/tailwindcss/pull/4526), [4558](https://github.com/tailwindlabs/tailwindcss/pull/4558)) +- JIT: Add new `peer-*` variants for styling based on sibling state ([#4556](https://github.com/tailwindlabs/tailwindcss/pull/4556)) +- Expose `safelist` as a top-level option under `purge` for both JIT and classic engines ([#4580](https://github.com/tailwindlabs/tailwindcss/pull/4580)) +- JIT: Remove need for `transform` class when using classes like `scale-*`, `rotate-*`, etc. ([#4604](https://github.com/tailwindlabs/tailwindcss/pull/4604)) +- JIT: Remove need for `filter` and `backdrop-filter` classes when using classes like `contrast-*`, `backdrop-blur-*`, etc. ([#4614](https://github.com/tailwindlabs/tailwindcss/pull/4614)) +- Support passing a custom path for your PostCSS configuration in the Tailwind CLI ([#4607](https://github.com/tailwindlabs/tailwindcss/pull/4607)) +- Add `blur-none` by default with intent to deprecate `blur-0` ([#4614](https://github.com/tailwindlabs/tailwindcss/pull/4614)) + +### Fixed + +- JIT: Improve support for Svelte class bindings ([#4187](https://github.com/tailwindlabs/tailwindcss/pull/4187)) +- JIT: Improve support for `calc` and `var` in arbitrary values ([#4147](https://github.com/tailwindlabs/tailwindcss/pull/4147)) +- Convert `hsl` colors to `hsla` when transforming for opacity support instead of `rgba` ([#3850](https://github.com/tailwindlabs/tailwindcss/pull/3850)) +- Fix `backdropBlur` variants not being generated ([#4188](https://github.com/tailwindlabs/tailwindcss/pull/4188)) +- Improve animation value parsing ([#4250](https://github.com/tailwindlabs/tailwindcss/pull/4250)) +- Ignore unknown object types when hashing config ([82f4eaa](https://github.com/tailwindlabs/tailwindcss/commit/82f4eaa6832ef8a4e3fd90869e7068efdf6e34f2)) +- Ensure variants are grouped properly for plugins with order-dependent utilities ([#4273](https://github.com/tailwindlabs/tailwindcss/pull/4273)) +- JIT: Fix temp file storage when node temp directories are kept on a different drive than the project itself ([#4044](https://github.com/tailwindlabs/tailwindcss/pull/4044)) +- Support border-opacity utilities alongside default `border` utility ([#4277](https://github.com/tailwindlabs/tailwindcss/pull/4277)) +- JIT: Fix source maps for expanded `@tailwind` directives ([2f15411](https://github.com/tailwindlabs/tailwindcss/commit/2f1541123dea29d8a2ab0f1411bf60c79eeb96b4)) +- JIT: Ignore whitespace when collapsing adjacent rules ([15642fb](https://github.com/tailwindlabs/tailwindcss/commit/15642fbcc885eba9cc50b7678a922b09c90d6b51)) +- JIT: Generate group parent classes correctly when using custom separator ([#4508](https://github.com/tailwindlabs/tailwindcss/pull/4508)) +- JIT: Fix incorrect stacking of multiple `group` variants ([#4551](https://github.com/tailwindlabs/tailwindcss/pull/4551)) +- JIT: Fix memory leak due to holding on to unused contexts ([#4571](https://github.com/tailwindlabs/tailwindcss/pull/4571)) + +### Internals + +- Add integration tests for popular build runners ([#4354](https://github.com/tailwindlabs/tailwindcss/pull/4354)) + +## [2.1.4] - 2021-06-02 + +### Fixed + +- Skip `raw` PurgeCSS sources when registering template dependencies ([#4542](https://github.com/tailwindlabs/tailwindcss/pull/4542)) + +## [2.1.3] - 2021-06-01 + +### Fixed + +- Register PurgeCSS paths as PostCSS dependencies to guarantee proper cache-busting in webpack 5 ([#4530](https://github.com/tailwindlabs/tailwindcss/pull/4530)) + +## [2.1.2] - 2021-04-23 + +### Fixed + +- Fix issue where JIT engine would generate the wrong CSS when using PostCSS 7 ([#4078](https://github.com/tailwindlabs/tailwindcss/pull/4078)) + +## [2.1.1] - 2021-04-05 + +### Fixed + +- Fix issue where JIT engine would fail to compile when a source path isn't provided by the build runner for the current input file ([#3978](https://github.com/tailwindlabs/tailwindcss/pull/3978)) + +## [2.1.0] - 2021-04-05 + +### Added + +- Add alternate JIT engine (in preview) ([#3905](https://github.com/tailwindlabs/tailwindcss/pull/3905)) +- Add new `mix-blend-mode` and `background-blend-mode` utilities ([#3920](https://github.com/tailwindlabs/tailwindcss/pull/3920)) +- Add new `box-decoration-break` utilities ([#3911](https://github.com/tailwindlabs/tailwindcss/pull/3911)) +- Add new `isolation` utilities ([#3914](https://github.com/tailwindlabs/tailwindcss/pull/3914)) +- Add `inline-table` display utility ([#3563](https://github.com/tailwindlabs/tailwindcss/pull/3563)) +- Add `list-item` display utility ([#3929](https://github.com/tailwindlabs/tailwindcss/pull/3929)) +- Add new `filter` and `backdrop-filter` utilities ([#3923](https://github.com/tailwindlabs/tailwindcss/pull/3923)) + +## [2.0.4] - 2021-03-17 + +### Fixed + +- Pass full `var(--bg-opacity)` value as `opacityValue` when defining colors as functions + +## [2.0.3] - 2021-02-07 + +### Fixed + +- Ensure sourcemap input is deterministic when using `@apply` in Vue components ([#3356](https://github.com/tailwindlabs/tailwindcss/pull/3356)) +- Ensure placeholder opacity is consistent across browsers ([#3308](https://github.com/tailwindlabs/tailwindcss/pull/3308)) +- Fix issue where `theme()` didn't work with colors defined as functions ([#2919](https://github.com/tailwindlabs/tailwindcss/pull/2919)) +- Enable `dark` variants by default for color opacity utilities ([#2975](https://github.com/tailwindlabs/tailwindcss/pull/2975)) + +### Added + +- Add support for a `tailwind.config.cjs` file in Node ESM projects ([#3181](https://github.com/tailwindlabs/tailwindcss/pull/3181)) +- Add version comment to Preflight ([#3255](https://github.com/tailwindlabs/tailwindcss/pull/3255)) +- Add `cursor-help` by default ([#3199](https://github.com/tailwindlabs/tailwindcss/pull/3199)) + +## [2.0.2] - 2020-12-11 + +### Fixed + +- Fix issue with `@apply` not working as expected with `!important` inside an at-rule ([#2824](https://github.com/tailwindlabs/tailwindcss/pull/2824)) +- Fix issue with `@apply` not working as expected with defined classes ([#2832](https://github.com/tailwindlabs/tailwindcss/pull/2832)) +- Fix memory leak, and broken `@apply` when splitting up files ([#3032](https://github.com/tailwindlabs/tailwindcss/pull/3032)) + +### Added + +- Add default values for the `ring` utility ([#2951](https://github.com/tailwindlabs/tailwindcss/pull/2951)) + +## [2.0.1] - 2020-11-18 + +- Nothing, just the only thing I could do when I found out npm won't let me publish the same version under two tags. + +## [2.0.0] - 2020-11-18 + +### Added + +- Add redesigned color palette ([#2623](https://github.com/tailwindlabs/tailwindcss/pull/2623), [700866c](https://github.com/tailwindlabs/tailwindcss/commit/700866ce5e0c0b8d140be161c4d07fc6f31242bc), [#2633](https://github.com/tailwindlabs/tailwindcss/pull/2633)) +- Add dark mode support ([#2279](https://github.com/tailwindlabs/tailwindcss/pull/2279), [#2631](https://github.com/tailwindlabs/tailwindcss/pull/2631)) +- Add `overflow-ellipsis` and `overflow-clip` utilities ([#1289](https://github.com/tailwindlabs/tailwindcss/pull/1289)) +- Add `transform-gpu` to force hardware acceleration on transforms when desired ([#1380](https://github.com/tailwindlabs/tailwindcss/pull/1380)) +- Extend default spacing scale ([#2630](https://github.com/tailwindlabs/tailwindcss/pull/2630), [7f05204](https://github.com/tailwindlabs/tailwindcss/commit/7f05204ce7a5581b6845591448265c3c21afde86)) +- Add spacing scale to `inset` plugin ([#2630](https://github.com/tailwindlabs/tailwindcss/pull/2630)) +- Add percentage sizes to `translate`, `inset`, and `height` plugins ([#2630](https://github.com/tailwindlabs/tailwindcss/pull/2630), [5259560](https://github.com/tailwindlabs/tailwindcss/commit/525956065272dc53e8f8395f55f9ad13077a38d1)) +- Extend default font size scale ([#2609](https://github.com/tailwindlabs/tailwindcss/pull/2609), [#2619](https://github.com/tailwindlabs/tailwindcss/pull/2619)) +- Support using `@apply` with complex classes, including variants like `lg:hover:bg-blue-500` ([#2159](https://github.com/tailwindlabs/tailwindcss/pull/2159)) +- Add new `2xl` breakpoint at 1536px by default ([#2609](https://github.com/tailwindlabs/tailwindcss/pull/2609)) +- Add default line-height values for font-size utilities ([#2609](https://github.com/tailwindlabs/tailwindcss/pull/2609)) +- Support defining theme values using arrays for CSS properties that support comma separated values ([e13f083c4](https://github.com/tailwindlabs/tailwindcss/commit/e13f083c4bc48bf9870d27c966136a9584943127)) +- Enable `group-hover` for color plugins, `boxShadow`, and `textDecoration` by default ([28985b6](https://github.com/tailwindlabs/tailwindcss/commit/28985b6cd592e72d4849fdb9ce97eb045744e09c), [f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) +- Enable `focus` for z-index utilities by default ([ae5b3d3](https://github.com/tailwindlabs/tailwindcss/commit/ae5b3d312d5000ae9c2065001f3df7add72dc365)) +- Support `extend` in `variants` configuration ([#2651](https://github.com/tailwindlabs/tailwindcss/pull/2651)) +- Add `max-w-prose` class by default ([#2574](https://github.com/tailwindlabs/tailwindcss/pull/2574)) +- Support flattening deeply nested color objects ([#2148](https://github.com/tailwindlabs/tailwindcss/pull/2148)) +- Support defining presets as functions ([#2680](https://github.com/tailwindlabs/tailwindcss/pull/2680)) +- Support deep merging of objects under `extend` ([#2679](https://github.com/tailwindlabs/tailwindcss/pull/2679), [#2700](https://github.com/tailwindlabs/tailwindcss/pull/2700)) +- Enable `focus-within` for all plugins that have `focus` enabled by default ([1a21f072](https://github.com/tailwindlabs/tailwindcss/commit/1a21f0721c7368d61fa3feef33d616de3f78c7d7), [f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) +- Added new `ring` utilities for creating outline/focus rings using box shadows ([#2747](https://github.com/tailwindlabs/tailwindcss/pull/2747), [879f088](https://github.com/tailwindlabs/tailwindcss/commit/879f088), [e0788ef](https://github.com/tailwindlabs/tailwindcss/commit/879f088)) +- Added `5` and `95` to opacity scale ([#2747](https://github.com/tailwindlabs/tailwindcss/pull/2747)) +- Add support for default duration and timing function values whenever enabling transitions ([#2755](https://github.com/tailwindlabs/tailwindcss/pull/2755)) + +### Changed + +- Completely redesign color palette ([#2623](https://github.com/tailwindlabs/tailwindcss/pull/2623), [700866c](https://github.com/tailwindlabs/tailwindcss/commit/700866ce5e0c0b8d140be161c4d07fc6f31242bc), [#2633](https://github.com/tailwindlabs/tailwindcss/pull/2633)) +- Drop support for Node 8 and 10 ([#2582](https://github.com/tailwindlabs/tailwindcss/pull/2582)) +- Removed `target` feature and dropped any compatibility with IE 11 ([#2571](https://github.com/tailwindlabs/tailwindcss/pull/2571)) +- Upgrade to PostCSS 8 (but include PostCSS 7 compatibility build) ([729b400](https://github.com/tailwindlabs/tailwindcss/commit/729b400a685973f46af73c8a68b364f20f7c5e1e), [1d8679d](https://github.com/tailwindlabs/tailwindcss/commit/1d8679d37e0eb1ba8281b2076bade5fc754f47dd), [c238ed1](https://github.com/tailwindlabs/tailwindcss/commit/c238ed15b5c02ff51978965511312018f2bc2cae)) +- Removed `shadow-outline`, `shadow-solid`, and `shadow-xs` by default in favor of new `ring` API ([#2747](https://github.com/tailwindlabs/tailwindcss/pull/2747)) +- Switch `normalize.css` to `modern-normalize` ([#2572](https://github.com/tailwindlabs/tailwindcss/pull/2572)) +- Rename `whitespace-no-wrap` to `whitespace-nowrap` ([#2664](https://github.com/tailwindlabs/tailwindcss/pull/2664)) +- Rename `flex-no-wrap` to `flex-nowrap` ([#2676](https://github.com/tailwindlabs/tailwindcss/pull/2676)) +- Remove `clearfix` utility, recommend `flow-root` instead ([#2766](https://github.com/tailwindlabs/tailwindcss/pull/2766)) +- Disable `hover` and `focus` for `fontWeight` utilities by default ([f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) +- Remove `grid-gap` fallbacks needed for old versions of Safari ([5ec45fa](https://github.com/tailwindlabs/tailwindcss/commit/5ec45fa)) +- Change special use of 'default' in config to 'DEFAULT' ([#2580](https://github.com/tailwindlabs/tailwindcss/pull/2580)) +- New `@apply` implementation, slight backwards incompatibilities with previous behavior ([#2159](https://github.com/tailwindlabs/tailwindcss/pull/2159)) +- Make `theme` retrieve the expected resolved value when theme value is complex ([e13f083c4](https://github.com/tailwindlabs/tailwindcss/commit/e13f083c4bc48bf9870d27c966136a9584943127)) +- Move `truncate` class to `textOverflow` core plugin ([#2562](https://github.com/tailwindlabs/tailwindcss/pull/2562)) +- Remove `scrolling-touch` and `scrolling-auto` utilities ([#2573](https://github.com/tailwindlabs/tailwindcss/pull/2573)) +- Modernize default system font stacks ([#1711](https://github.com/tailwindlabs/tailwindcss/pull/1711)) +- Upgrade to PurgeCSS 3.0 ([8e4e0a0](https://github.com/tailwindlabs/tailwindcss/commit/8e4e0a0eb8dcbf84347c7562988b4f9afd344081)) +- Change default `text-6xl` font-size to 3.75rem instead of 4rem ([#2619](https://github.com/tailwindlabs/tailwindcss/pull/2619)) +- Ignore `[hidden]` elements within `space` and `divide` utilities instead of `template` elements ([#2642](https://github.com/tailwindlabs/tailwindcss/pull/2642)) +- Automatically prefix keyframes and animation names when a prefix is configured ([#2621](https://github.com/tailwindlabs/tailwindcss/pull/2621), [#2641](https://github.com/tailwindlabs/tailwindcss/pull/2641)) +- Merge `extend` objects deeply by default ([#2679](https://github.com/tailwindlabs/tailwindcss/pull/2679)) +- Respect `preserveHtmlElements` option even when using custom PurgeCSS extractor ([#2704](https://github.com/tailwindlabs/tailwindcss/pull/2704)) +- Namespace all internal custom properties under `tw-` to avoid collisions with end-user custom properties ([#2771](https://github.com/tailwindlabs/tailwindcss/pull/2771)) + +## [2.0.0-alpha.25] - 2020-11-17 + +### Fixed + +- Fix issue where `ring-offset-0` didn't work due to unitless `0` in `calc` function ([3de0c48](https://github.com/tailwindlabs/tailwindcss/commit/3de0c48)) + +## [2.0.0-alpha.24] - 2020-11-16 + +### Changed + +- Don't override ring color when overriding ring width with a variant ([e40079a](https://github.com/tailwindlabs/tailwindcss/commit/e40079a)) + +### Fixed + +- Prevent shadow/ring styles from cascading to children ([e40079a](https://github.com/tailwindlabs/tailwindcss/commit/e40079a)) +- Ensure rings have a default color even if `colors.blue.500` is not present in config ([e40079a](https://github.com/tailwindlabs/tailwindcss/commit/e40079a)) + +## [2.0.0-alpha.23] - 2020-11-16 + +### Added + +- Add scripts for generating a PostCSS 7 compatible build alongside PostCSS 8 version ([#2773](https://github.com/tailwindlabs/tailwindcss/pull/2773)) + +### Changed + +- All custom properties have been internally namespaced under `tw-` to avoid collisions with end-user custom properties ([#2771](https://github.com/tailwindlabs/tailwindcss/pull/2771)) + +## [2.0.0-alpha.22] - 2020-11-16 + +### Changed + +- ~~All custom properties have been internally namespaced under `tw-` to avoid collisions with end-user custom properties ([#2771](https://github.com/tailwindlabs/tailwindcss/pull/2771))~~ I made a git boo-boo, check alpha.23 instead + +## [2.0.0-alpha.21] - 2020-11-15 + +### Changed + +- Upgrade to PostCSS 8, Autoprefixer 10, move `postcss` and `autoprefixer` to peerDependencies ([729b400](https://github.com/tailwindlabs/tailwindcss/commit/729b400)) + +## [2.0.0-alpha.20] - 2020-11-13 + +### Changed + +- Remove `clearfix` utility, recommend `flow-root` instead ([#2766](https://github.com/tailwindlabs/tailwindcss/pull/2766)) + +## [2.0.0-alpha.19] - 2020-11-13 + +### Fixed + +- Don't crash when color palette is empty ([278c203](https://github.com/tailwindlabs/tailwindcss/commit/278c203)) + +## [2.0.0-alpha.18] - 2020-11-13 + +### Changed + +- `black` and `white` have been added to `colors.js` ([b3ed724](https://github.com/tailwindlabs/tailwindcss/commit/b3ed724)) + +### Fixed + +- Add support for colors as closures to `ringColor` and `ringOffsetColor`, previously would crash build ([62a47f9](https://github.com/tailwindlabs/tailwindcss/commit/62a47f9)) + +## [2.0.0-alpha.17] - 2020-11-13 + +### Changed + +- Remove `grid-gap` fallbacks needed for old versions of Safari ([5ec45fa](https://github.com/tailwindlabs/tailwindcss/commit/5ec45fa)) + +## [2.0.0-alpha.16] - 2020-11-12 + +### Added + +- Enable `focus`, `focus-within`, and `dark` variants (when enabled) for all ring utilities by default ([e0788ef](https://github.com/tailwindlabs/tailwindcss/commit/879f088)) + +## [2.0.0-alpha.15] - 2020-11-11 + +### Added + +- Added `ring-inset` utility for rendering rings as inset shadows ([879f088](https://github.com/tailwindlabs/tailwindcss/commit/879f088)) + +### Changed + +- `ringWidth` utilities always reset ring styles to ensure no accidental variable inheritance through the cascade ([879f088](https://github.com/tailwindlabs/tailwindcss/commit/879f088)) + +## [2.0.0-alpha.14] - 2020-11-11 + +### Added + +- Enable `focus-within` for `outline` utilities by default ([f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) +- Enable `focus-within` for `ringWidth` utilities by default ([f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) +- Enable `group-hover` for `boxShadow` utilities by default ([f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) +- Enable `group-hover` and `focus-within` for `textDecoration` utilities by default ([f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) + +### Changed + +- Disable `hover` and `focus` for `fontWeight` utilities by default ([f6923b1](https://github.com/tailwindlabs/tailwindcss/commit/f6923b1)) + +## [2.0.0-alpha.13] - 2020-11-11 + +### Added + +- Add support for default duration and timing function values whenever enabling transitions ([#2755](https://github.com/tailwindlabs/tailwindcss/pull/2755)) + +## [2.0.0-alpha.12] - 2020-11-10 + +### Fixed + +- Prevent `boxShadow` utilities from overriding ring shadows added by components like in the custom forms plugin ([c3dd3b6](https://github.com/tailwindlabs/tailwindcss/commit/c3dd3b68454ad418833a9edf7f3409cad66fb5b0)) + +## [2.0.0-alpha.11] - 2020-11-09 + +### Fixed + +- Convert `none` to `0 0 #0000` when used for shadows to ensure compatibility with `ring` utilities ([4eecc27](https://github.com/tailwindlabs/tailwindcss/commit/4eecc2751ca0c461e8da5bd5772ae650197a2e5d)) + +## [2.0.0-alpha.10] - 2020-11-09 + +### Added + +- Added new `ring` utilities ([#2747](https://github.com/tailwindlabs/tailwindcss/pull/2747)) +- Added `5` and `95` to opacity scale ([#2747](https://github.com/tailwindlabs/tailwindcss/pull/2747)) + +### Changed + +- Removed `shadow-outline`, `shadow-solid`, and `shadow-xs` in favor of new `ring` API ([#2747](https://github.com/tailwindlabs/tailwindcss/pull/2747)) + +## [2.0.0-alpha.9] - 2020-11-07 + +### Added + +- Added `shadow-solid` utility, a 2px solid shadow that uses the current text color ([369cfae](https://github.com/tailwindlabs/tailwindcss/commit/369cfae2905a577033529c46a5e8ca58c69f5623)) +- Enable `focus-within` where useful by default ([1a21f072](https://github.com/tailwindlabs/tailwindcss/commit/1a21f0721c7368d61fa3feef33d616de3f78c7d7)) + +### Changed + +- Update `shadow-outline` to use the new blue ([b078238](https://github.com/tailwindlabs/tailwindcss/commit/b0782385c9832d35a10929b38b4fcaf27e055d6b)) + +## [2.0.0-alpha.8] - 2020-11-06 + +### Added + +- Add `11` to spacing scale ([7f05204](https://github.com/tailwindlabs/tailwindcss/commit/7f05204ce7a5581b6845591448265c3c21afde86)) +- Add percentage-based height values ([5259560](https://github.com/tailwindlabs/tailwindcss/commit/525956065272dc53e8f8395f55f9ad13077a38d1)) +- Add indigo to the color palette by default ([700866c](https://github.com/tailwindlabs/tailwindcss/commit/700866ce5e0c0b8d140be161c4d07fc6f31242bc)) + +### Changed + +- Use `coolGray` as the default gray ([700866c](https://github.com/tailwindlabs/tailwindcss/commit/700866ce5e0c0b8d140be161c4d07fc6f31242bc)) + +## [2.0.0-alpha.7] - 2020-11-05 + +### Changed + +- Revert upgrading to PostCSS 8 lol + +## [2.0.0-alpha.6] - 2020-11-04 + +### Changed + +- Respect `preserveHtmlElements` option even when using custom PurgeCSS extractor ([#2704](https://github.com/tailwindlabs/tailwindcss/pull/2704)) +- Set font-family and line-height to `inherit` on `body` to behave more like v1.x ([#2729](https://github.com/tailwindlabs/tailwindcss/pull/2729)) + +## [2.0.0-alpha.5] - 2020-10-30 + +### Changed + +- Upgrade to PostCSS 8 ([59aa484](https://github.com/tailwindlabs/tailwindcss/commit/59aa484dfea0607d96bff6ef41b1150c78576c37)) + +## [2.0.0-alpha.4] - 2020-10-29 + +### Added + +- Support deep merging of arrays of objects under `extend` ([#2700](https://github.com/tailwindlabs/tailwindcss/pull/2700)) + +## [2.0.0-alpha.3] - 2020-10-27 + +### Added + +- Support flattening deeply nested color objects ([#2148](https://github.com/tailwindlabs/tailwindcss/pull/2148)) +- Support defining presets as functions ([#2680](https://github.com/tailwindlabs/tailwindcss/pull/2680)) + +### Changed + +- Merge `extend` objects deeply by default ([#2679](https://github.com/tailwindlabs/tailwindcss/pull/2679)) +- Rename `flex-no-wrap` to `flex-nowrap` ([#2676](https://github.com/tailwindlabs/tailwindcss/pull/2676)) + +## [2.0.0-alpha.2] - 2020-10-25 + +### Added + +- Support `extend` in `variants` configuration ([#2651](https://github.com/tailwindlabs/tailwindcss/pull/2651)) +- Add `max-w-prose` class by default ([#2574](https://github.com/tailwindlabs/tailwindcss/pull/2574)) + +### Changed + +- Revert use of logical properties for `space` and `divide` utilities ([#2644](https://github.com/tailwindlabs/tailwindcss/pull/2644)) +- `space` and `divide` utilities ignore elements with `[hidden]` now instead of only ignoring `template` elements ([#2642](https://github.com/tailwindlabs/tailwindcss/pull/2642)) +- Set default font on `body`, not just `html` ([#2643](https://github.com/tailwindlabs/tailwindcss/pull/2643)) +- Automatically prefix keyframes and animation names when a prefix is configured ([#2621](https://github.com/tailwindlabs/tailwindcss/pull/2621), [#2641](https://github.com/tailwindlabs/tailwindcss/pull/2641)) +- Rename `whitespace-no-wrap` to `whitespace-nowrap` ([#2664](https://github.com/tailwindlabs/tailwindcss/pull/2664)) + +## [1.9.6] - 2020-10-23 + +### Changed + +- The `presets` feature had unexpected behavior where a preset config without its own `presets` key would not extend the default config. ([#2662](https://github.com/tailwindlabs/tailwindcss/pull/2662)) + + If you were depending on this unexpected behavior, just add `presets: []` to your own preset to exclude the default configuration. + +## [2.0.0-alpha.1] - 2020-10-20 + +### Added + +- Added dark mode support ([#2279](https://github.com/tailwindlabs/tailwindcss/pull/2279), [#2631](https://github.com/tailwindlabs/tailwindcss/pull/2631)) +- Added `overflow-ellipsis` and `overflow-clip` utilities ([#1289](https://github.com/tailwindlabs/tailwindcss/pull/1289)) +- Add `transform-gpu` to force hardware acceleration on transforms when beneficial ([#1380](https://github.com/tailwindlabs/tailwindcss/pull/1380)) +- Extended spacing scale ([#2630](https://github.com/tailwindlabs/tailwindcss/pull/2630)) +- Add spacing scale to `inset` plugin ([#2630](https://github.com/tailwindlabs/tailwindcss/pull/2630)) +- Enable useful relative sizes for more plugins ([#2630](https://github.com/tailwindlabs/tailwindcss/pull/2630)) +- Extend font size scale ([#2609](https://github.com/tailwindlabs/tailwindcss/pull/2609), [#2619](https://github.com/tailwindlabs/tailwindcss/pull/2619)) +- Support using `@apply` with complex classes ([#2159](https://github.com/tailwindlabs/tailwindcss/pull/2159)) +- Add new `2xl` breakpoint ([#2609](https://github.com/tailwindlabs/tailwindcss/pull/2609)) +- Add default line-height values for font-size utilities ([#2609](https://github.com/tailwindlabs/tailwindcss/pull/2609)) +- Support defining theme values using arrays wherever it makes sense (box-shadow, transition-property, etc.) ([e13f083c4](https://github.com/tailwindlabs/tailwindcss/commit/e13f083c4bc48bf9870d27c966136a9584943127)) +- Enable `group-hover` for color utilities by default ([28985b6](https://github.com/tailwindlabs/tailwindcss/commit/28985b6cd592e72d4849fdb9ce97eb045744e09c)) +- Enable `focus` for z-index utilities by default ([ae5b3d3](https://github.com/tailwindlabs/tailwindcss/commit/ae5b3d312d5000ae9c2065001f3df7add72dc365)) + +### Changed + +- New `@apply` implementation, slight backwards incompatibilities with previous behavior ([#2159](https://github.com/tailwindlabs/tailwindcss/pull/2159)) +- Move `truncate` class to `textOverflow` core plugin ([#2562](https://github.com/tailwindlabs/tailwindcss/pull/2562)) +- Removed `target` feature and dropped any compatibility with IE 11 ([#2571](https://github.com/tailwindlabs/tailwindcss/pull/2571)) +- Switch `normalize.css` to `modern-normalize` ([#2572](https://github.com/tailwindlabs/tailwindcss/pull/2572)) +- Remove `scrolling-touch` and `scrolling-auto` utilities ([#2573](https://github.com/tailwindlabs/tailwindcss/pull/2573)) +- Change special use of 'default' in config to 'DEFAULT' ([#2580](https://github.com/tailwindlabs/tailwindcss/pull/2580)) +- Drop support for Node 8 and 10 ([#2582](https://github.com/tailwindlabs/tailwindcss/pull/2582)) +- Modernize default system font stacks ([#1711](https://github.com/tailwindlabs/tailwindcss/pull/1711)) +- Upgrade to PurgeCSS 3.0 +- ~~Upgrade to PostCSS 8.0~~ Reverted for now +- Use logical properties for `space` and `divide` utilities ([#1883](https://github.com/tailwindlabs/tailwindcss/pull/1883)) +- Make `theme` retrieve the expected resolved value when theme value is complex ([e13f083c4](https://github.com/tailwindlabs/tailwindcss/commit/e13f083c4bc48bf9870d27c966136a9584943127)) +- Adjust default font-size scale to include 60px instead of 64px ([#2619](https://github.com/tailwindlabs/tailwindcss/pull/2619)) +- Update default colors in Preflight to match new color palette ([#2633](https://github.com/tailwindlabs/tailwindcss/pull/2633)) + +## [1.9.5] - 2020-10-19 + +### Fixed + +- Fix issue where using `theme` with default line-heights did not resolve correctly + +## [1.9.4] - 2020-10-17 + +### Fixed + +- Fix issue changing plugins defined using the `withOptions` API would not trigger rebuilds in watch processes + +## [1.9.3] - 2020-10-16 + +### Fixed + +- Fix issue where `tailwindcss init --full` scaffolded a corrupt config file (https://github.com/tailwindlabs/tailwindcss/issues/2556) + +### Changed + +- Remove console warnings about upcoming breaking changes + +## [1.9.2] - 2020-10-14 + +### Fixed + +- Merge plugins when merging config with preset ([#2561](https://github.com/tailwindlabs/tailwindcss/pulls/#2561) +- Use `word-wrap` and `overflow-wrap` together, not one or the other since `word-wrap` is IE-only + +## [1.9.1] - 2020-10-14 + +### Fixed + +- Don't import `corePlugins` in `resolveConfig` to avoid bundling browser-incompatible code ([#2548](https://github.com/tailwindlabs/tailwindcss/pull/2548)) + +## [1.9.0] - 2020-10-12 + +### Added + +- Add new `presets` config option ([#2474](https://github.com/tailwindlabs/tailwindcss/pull/2474)) +- Scaffold new `tailwind.config.js` files with available `future` flags commented out ([#2379](https://github.com/tailwindlabs/tailwindcss/pull/2379)) +- Add `col-span-full` and `row-span-full` ([#2471](https://github.com/tailwindlabs/tailwindcss/pull/2471)) +- Make `outline` configurable, `outline-none` more accessible by default, and add `outline-black` and `outline-white` ([#2460](https://github.com/tailwindlabs/tailwindcss/pull/2460)) +- Add additional small `rotate` and `skew` values ([#2528](https://github.com/tailwindlabs/tailwindcss/pull/2528)) +- Add `xl`, `2xl`, and `3xl` border radius values ([#2529](https://github.com/tailwindlabs/tailwindcss/pull/2529)) +- Add new utilities for `grid-auto-columns` and `grid-auto-rows` ([#2531](https://github.com/tailwindlabs/tailwindcss/pull/2531)) +- Promote `defaultLineHeights` and `standardFontWeights` from experimental to future + +### Fixed + +- Don't escape keyframe values ([#2432](https://github.com/tailwindlabs/tailwindcss/pull/2432)) +- Use `word-wrap` instead of `overflow-wrap` in `ie11` target mode ([#2391](https://github.com/tailwindlabs/tailwindcss/pull/2391)) + +### Experimental + +- Add experimental `2xl` breakpoint ([#2468](https://github.com/tailwindlabs/tailwindcss/pull/2468)) +- Rename `{u}-max-content` and `{u}-min-content` utilities to `{u}-max` and `{u}-min` in experimental extended spacing scale ([#2532](https://github.com/tailwindlabs/tailwindcss/pull/2532)) +- Support disabling dark mode variants globally ([#2530](https://github.com/tailwindlabs/tailwindcss/pull/2530)) + +## [1.8.13] - 2020-10-09 + +### Fixed + +- Support defining colors as closures even when opacity variables are not supported ([#2536](https://github.com/tailwindlabs/tailwindcss/pull/2515)) + +## [1.8.12] - 2020-10-07 + +### Fixed + +- Reset color opacity variable in utilities generated using closure colors ([#2515](https://github.com/tailwindlabs/tailwindcss/pull/2515)) + +## [1.8.11] - 2020-10-06 + +- Make `tailwindcss.plugin` work in ESM environments for reasons + +## [1.8.10] - 2020-09-14 + +### Fixed + +- Prevent new `dark` experiment from causing third-party `dark` variants to inherit stacking behavior ([#2382](https://github.com/tailwindlabs/tailwindcss/pull/2382)) + +## [1.8.9] - 2020-09-13 + +### Fixed + +- Add negative spacing values to inset plugin in the `extendedSpacingScale` experiment ([#2358](https://github.com/tailwindlabs/tailwindcss/pull/2358)) +- Fix issue where `!important` was stripped from declarations within rules that used `@apply` with `applyComplexClasses` ([#2376](https://github.com/tailwindlabs/tailwindcss/pull/2376)) + +### Changed + +- Add `future` section to config stubs ([#2372](https://github.com/tailwindlabs/tailwindcss/pull/2372), [3090b98](https://github.com/tailwindlabs/tailwindcss/commit/3090b98ece766b1046abe5bbaa94204e811f7fac)) + +## [1.8.8] - 2020-09-11 + +### Fixed + +- Register dark mode plugin outside of `resolveConfig` code path ([#2368](https://github.com/tailwindlabs/tailwindcss/pull/2368)) + +## [1.8.7] - 2020-09-10 + +### Fixed + +- Fix issue where classes in escaped strings (like `class=\"block\"`) weren't extracted properly for purging ([#2364](https://github.com/tailwindlabs/tailwindcss/pull/2364)) + +## [1.8.6] - 2020-09-09 + +### Fixed + +- Fix issue where container padding not applied when using object syntax ([#2353](https://github.com/tailwindlabs/tailwindcss/pull/2353)) + +## [1.8.5] - 2020-09-07 + +### Fixed + +- Fix issue where `resolveConfig` didn't take into account configs added by feature flags ([#2347](https://github.com/tailwindlabs/tailwindcss/pull/2347)) + +## [1.8.4] - 2020-09-06 + +### Fixed + +- Fix [issue](https://github.com/tailwindlabs/tailwindcss/issues/2258) where inserting extra PurgeCSS control comments could break integrated PurgeCSS support +- Fix issue where dark variant in 'class' mode was incompatible with 'group-hover' variant ([#2337](https://github.com/tailwindlabs/tailwindcss/pull/2337)) +- Support basic nesting structure with `@apply` when using the `applyComplexClasses` experiment ([#2271](https://github.com/tailwindlabs/tailwindcss/pull/2271)) + +### Changed + +- Rename `font-hairline` and `font-thin` to `font-thin` and `font-extralight` behind `standardFontWeights` flag (experimental until v1.9.0) ([#2333](https://github.com/tailwindlabs/tailwindcss/pull/2333)) + +## [1.8.3] - 2020-09-05 + +### Fixed + +- Fix issue where `font-variant-numeric` utilities would break in combination with most CSS minifier configurations ([f3660ce](https://github.com/tailwindlabs/tailwindcss/commit/f3660ceed391cfc9390ca4ea1a729a955e64b895)) +- Only warn about `conservative` purge mode being deprecated once per process ([58781b5](https://github.com/tailwindlabs/tailwindcss/commit/58781b517daffbaf80fc5c0791d311f53b2d67d8)) + +## [1.8.2] - 2020-09-04 + +### Fixed + +- Fix bug where dark mode variants would cause an error if you had a `plugins` array in your config ([#2322](https://github.com/tailwindlabs/tailwindcss/pull/2322)) + +## [1.8.1] - 2020-09-04 + +### Fixed + +- Fix bug in the new font-variant-numeric utilities which broke the whole rule ([#2318](https://github.com/tailwindlabs/tailwindcss/pull/2318)) +- Fix bug while purging ([#2320](https://github.com/tailwindlabs/tailwindcss/pull/2320)) + +## [1.8.0] - 2020-09-04 + +### Added + +- Dark mode variant (experimental) ([#2279](https://github.com/tailwindlabs/tailwindcss/pull/2279)) +- New `preserveHtmlElements` option for `purge` ([#2283](https://github.com/tailwindlabs/tailwindcss/pull/2283)) +- New `layers` mode for `purge` ([#2288](https://github.com/tailwindlabs/tailwindcss/pull/2288)) +- New `font-variant-numeric` utilities ([#2305](https://github.com/tailwindlabs/tailwindcss/pull/2305)) +- New `place-items`, `place-content`, `place-self`, `justify-items`, and `justify-self` utilities ([#2306](https://github.com/tailwindlabs/tailwindcss/pull/2306)) +- Support configuring variants as functions ([#2309](https://github.com/tailwindlabs/tailwindcss/pull/2309)) + +### Changed + +- CSS within `@layer` at-rules are now grouped with the corresponding `@tailwind` at-rule ([#2312](https://github.com/tailwindlabs/tailwindcss/pull/2312)) + +### Deprecated + +- `conservative` purge mode, deprecated in favor of `layers` + +## [1.7.6] - 2020-08-29 + +### Fixed + +- Fix bug where the new experimental `@apply` implementation broke when applying a variant class with the important option globally enabled + +## [1.7.5] - 2020-08-28 + +### Changed + +- Update lodash to latest to silence security warnings + +## [1.7.4] - 2020-08-26 + +### Added + +- Add new -p flag to CLI to quickly scaffold a `postcss.config.js` file + +### Changed + +- Make `@apply` insensitive to whitespace in the new `applyComplexClasses` experiment + +### Fixed + +- Fix bug where the new `applyComplexClasses` experiment didn't behave as expected with rules with multiple selectors, like `.foo, .bar { color: red }` + +## [1.7.3] - 2020-08-20 + +### Changed + +- Log feature flag notices to stderr instead of stdout to preserve compatibility with pipe-based build systems +- Add missing bg-none utility for disabling background images + +### Fixed + +- Fix bug that prevented defining colors as closures when the `gradientColorStops` plugin was enabled + +## [1.7.2] - 2020-08-19 + +### Added + +- Reuse generated CSS as much as possible in long-running processes instead of needlessly recalculating + +## [1.7.1] - 2020-08-28 + +### Changed + +- Don't issue duplicate flag notices in long-running build processes + +## [1.7.0] - 2020-08-28 + +### Added + +- Gradients +- New background-clip utilities +- New `contents` display utility +- Default letter-spacing per font-size +- Divide border styles +- Access entire config object from plugins +- Define colors as closures +- Use `@apply` with variants and other complex classes (experimental) +- New additional color-palette (experimental) +- Extended spacing scale (experimental) +- Default line-heights per font-size by default (experimental) +- Extended font size scale (experimental) + +### Deprecated + +- Deprecated gap utilities + +## [1.6.3] - 2020-08-18 + +### Fixed + +- Fixes issue where motion-safe and motion-reduce variants didn't stack correctly with group-hover variants + +## [1.6.2] - 2020-08-03 + +### Fixed + +- Fixes issue where `@keyframes` respecting the important option would break animations in Chrome + +## [1.6.1] - 2020-08-02 + +### Fixed + +- Fixes an issue where animation keyframes weren't included in the build without @tailwind base (#2108) + +## [1.6.0] - 2020-07-28 + +### Added + +- Animation support +- New `prefers-reduced-motion` variants +- New `overscroll-behaviour` utilities +- Generate CSS without an input file + +## [1.5.2] - 2020-07-21 + +### Fixed + +- Fixes issue where you could no longer use `@apply` with unprefixed class names if you had configured a prefix + +## [1.5.1] - 2020-07-15 + +### Fixed + +- Fixes accidental breaking change where adding component variants using the old manual syntax (as recommended in the docs) stopped working + +## [1.5.0] - 2020-07-15 + +### Added + +- Component `variants` support +- Responsive `container` variants +- New `focus-visible` variant +- New `checked` variant + +## v0.0.0-658250a96 - 2020-07-12 [YANKED] + +No release notes + +## [1.4.6] - 2020-05-08 + +### Changed + +- Explicitly error when using a class as the important config option instead of just generating the wrong CSS + +## [1.4.5] - 2020-05-06 + +### Fixed + +- Fix bug where the `divideColor` plugin was using the wrong '' in IE11 target mode + +## [1.4.4] - 2020-05-01 + +### Fixed + +- Fix bug where target: 'browserslist' didn't work, only `target: ['browserslist', {...}]` did + +## [1.4.3] - 2020-05-01 + +### Changed + +- Don't generate unnecessary CSS in color plugins when color opacity utilities are disabled + +## [1.4.2] - 2020-05-01 + +### Fixed + +- Fix issue where `purge: { enabled: false }` was ignored, add `purge: false` shorthand + +## [1.4.1] - 2020-04-30 + +### Changed + +- Improve built-in PurgeCSS extractor to better support Haml and Slim templates + +## [1.4.0] - 2020-04-29 + +### Added + +- New color opacity utilities +- Built-in PurgeCSS +- IE 11 target mode (experimental) + +## [1.3.5] - 2020-04-23 + +### Removed + +- Drop `fs-extra` dependency to `^8.0.0` to preserve Node 8 compatibility until Tailwind 2.0 + +### Fixed + +- Fix missing unit in calc bug in space plugin (`space-x-0` didn't work for example) + +## [1.3.4] - 2020-04-21 + +### Fixed + +- Fix bug where `divide-{x/y}-0` utilities didn't work due to missing unit in `calc` call + +## [1.3.3] - 2020-04-21 + +### Added + +- Add forgotten responsive variants for `space`, `divideWidth`, and `divideColor` utilities + +## [1.3.1] - 2020-04-21 + +### Fixed + +- Fix bug where the `space-x` utilities were not being applied correctly due to referencing `--space-y-reverse` instead of `--space-x-reverse` + +## [1.3.0] - 2020-04-21 + +### Added + +- New `space` and `divide` layout utilities +- New `transition-delay` utilities +- New `group-focus` variant +- Support for specifying a default line-height for each font-size utility +- Support for breakpoint-specific padding for `container` class +- Added `current` to the default color palette +- New `inline-grid` utility +- New `flow-root` display utility +- New `clear-none` utility + +## [1.2.0] - 2020-02-05 + +### Added + +- CSS Transition support +- CSS Transform support +- CSS Grid support +- Added `max-w-{screen}` utilities +- Added `max-w-none` utility +- Added `rounded-md` utility +- Added `shadow-sm` utility +- Added `shadow-xs` utility +- Added `stroke-width` utilities +- Added fixed line-height utilities +- Added additional display utilities for table elements +- Added box-sizing utilities +- Added clear utilities +- Config file dependencies are now watchable +- Added new `plugin` and `plugin.withOptions` APIs + +### Changed + +- Allow plugins to extend the user's config + +## [1.2.0-canary.8] - 2020-02-05 + +### Added + +- Add additional fixed-size line-height utilities + +## [1.2.0-canary.7] - 2020-02-04 + +### Removed + +- Remove Inter from font-sans, plan to add later under new class + +## [1.2.0-canary.6] - 2020-02-03 + +### Added + +- Add system-ui to default font stack +- Add shadow-xs, increase shadow-sm alpha to 0.05 +- Support import syntax even without postcss-import +- Alias tailwind bin to tailwindcss +- Add fill/stroke to transition-colors +- Add transition-shadow, add box-shadow to default transition +- Combine gap/columnGap/rowGap +- Add grid row utilities +- Add skew utilities + +### Changed + +- Use font-sans as default font + +## [1.2.0-canary.5] - 2020-01-08 + +### Added + +- Adds missing dependency `resolve` which is required for making config dependencies watchable + +## [1.2.0-canary.4] - 2020-01-08 + +### Added + +- CSS Transition support +- CSS Transform support +- CSS Grid support +- New `max-w-{screen}` utilities +- Added `max-w-none` utility +- Added "Inter" to the default sans-serif font stack +- Add `rounded-md` utility +- Add `shadow-sm` utility +- Added stroke-width utilities +- Added additional display utilities for table elements +- Added box-sizing utilities +- Added clear utilities +- Config file dependencies are now watchable +- Allow plugins to extend the user's config +- Add new `plugin` and `plugin.withOptions` APIs + +## [v1.2.0-canary.3] - 2020-01-08 [YANKED] + +No release notes + +## [1.1.4] - 2019-11-25 + +### Changed + +- Note: Although this is a bugfix it could affect your site if you were working around the bug in your own code by not prefixing the `.group` class. I'm sorry 😞 + +### Fixed + +- Fixes a bug where the `.group` class was not receiving the user's configured prefix when using the `prefix` option + +## [1.2.0-canary.1] - 2019-10-22 + +### Changed + +- Don't watch `node_modules` files for changes + +### Fixed + +- Fixes significant build performance regression in `v1.2.0-canary.0` + +## [1.1.3] - 2019-10-22 + +### Fixed + +- Fixes an issue where in some cases function properties in the user's `theme` config didn't receive the second utils argument + +## [1.2.0-canary.0] - 2019-10-14 + +### Added + +- Automatically watch all config file dependencies (plugins, design tokens imported from other files, etc.) for changes when build watcher is running +- Add `justify-evenly` utility + +### Changed + +- Allow plugins to add their own config file to be resolved with the user's custom config + +## [1.1.2] - 2019-08-14 + +### Fixed + +- Fixes a bug with horizontal rules where they were displayed with a 2px border instead of a 1px border +- Fixes a bug with horizontal rules where they were rendered with default top/bottom margin + +## [1.1.1] - 2019-08-09 + +### Fixed + +- Fixes issue where values like `auto` would fail to make it through the default negative margin config + +## [1.1.0] - 2019-08-06 + +### Added + +- Added utilities for screenreader visibility +- Added utilities for placeholder color +- First, last, even, and odd child variants +- Disabled variant +- Visited variant +- Increase utility specificity using a scope instead of !important +- Add hover/focus variants for opacity by default +- Added `border-double` utility +- Support negative prefix for boxShadow and letterSpacing plugins +- Support passing config path via object + +### Fixed + +- Placeholders no longer have a default opacity +- Make horizontal rules visible by default +- Generate correct negative margins when using calc + +## [1.0.6] - 2019-08-01 + +### Fixed + +- Fixes issue where modifiers would mutate nested rules + +## [1.0.5] - 2019-07-11 + +### Added + +- Support built-in variants for utilities that include pseudo-elements + +### Changed + +- Update several dependencies, including postcss-js which fixes an issue with using `!important` directly in Tailwind utility plugins + +## [1.0.4] - 2019-06-11 + +### Changed + +- Increase precision of percentage width values to avoid 1px rounding issues in grid layouts + +## [1.0.3] - 2019-06-01 + +### Changed + +- Throws an error when someone tries to use `@tailwind preflight` instead of `@tailwind base`, this is the source of many support requests + +## [1.0.2] - 2019-05-27 + +### Fixed + +- Fixes a bug where `@screen` rules weren't bubbled properly when nested in plugins + +## [1.0.1] - 2019-05-13 + +### Fixed + +- Fixes a bug where global variants weren't properly merged + +## [1.0.0] - 2019-05-13 + +No release notes + +## [1.0.0-beta.10] - 2019-05-12 + +### Changed + +- Use `9999` and `-9999` for `order-last` and `order-first` utilities respectively + +## [1.0.0-beta.9] - 2019-05-12 + +### Added + +- Add `bg-repeat-round` and `bg-repeat-space` utilities +- Add `select-all` and `select-auto` utilities + +### Changed + +- Make all utilities responsive by default + +## [1.0.0-beta.8] - 2019-04-28 + +### Added + +- Adds `responsive` variants for the new order utilities by default, should have been there all along + +## [1.0.0-beta.7] - 2019-04-27 + +### Fixed + +- Fixes a bug where you couldn't extend the margin config + +## [1.0.0-beta.6] - 2019-04-27 + +### Added + +- Added support for negative inset (`-top-6`, `-right-4`) and z-index (`-z-10`) utilities, using the same negative key syntax supported by the margin plugin +- Add missing fractions as well as x/12 fractions to width scale +- Add `order` utilities +- Add `cursor-text` class by default + +### Changed + +- Make it possible to access your fully merged config file in JS + +### Removed + +- Removed `negativeMargin` plugin, now the regular `margin` plugin supports generating negative classes (like `-mx-6`) by using negative keys in the config, like `-6` + +## [1.0.0-beta.5] - 2019-04-18 + +### Changed + +- Make it possible to disable all core plugins using `corePlugins: false` +- Make it possible to configure a single list of variants that applies to all utility plugins +- Make it possible to safelist which core plugins should be enabled + +### Fixed + +- Fix a bug where stroke and fill plugins didn't properly handle the next object syntax for color definitions +- Fix a bug where you couldn't have comments near `@apply` directives + +## [1.0.0-beta.4] - 2019-03-29 + +### Added + +- Add the `container` key to the scaffolded config file when generated with `--full` + +### Changed + +- Bumps node dependency to 8.9.0 so we can keep our default config file clean, 6.9.0 is EOL next month anyways + +### Removed + +- Removes `SFMono-Regular` from the beginning of the default monospace font stack, it has no italic support and Menlo looks better anyways + +### Fixed + +- Fixes an issue where the user's config object was being mutated during processing (only affects @bradlc 😅) +- Fixes an issue where you couldn't use a closure to define theme sections under `extend` + +## [1.0.0-beta.3] - 2019-03-18 + +### Added + +- Support lazy evaluation in `theme.extend` + +### Changed + +- Use lighter default border color +- Revert #745 and use `bolder` for strong tags by default instead of `fontWeight.bold` + +## [1.0.0-beta.2] - 2019-03-17 + +### Changed + +- Closures in the `theme` section of the config file are now passed a `theme` function instead of an object + +### Fixed + +- Fix issue where `@screen` didn't work at all 🙃 + +## [1.0.0-beta.1] - 2019-03-17 + +### Added + +- New config file structure +- New expanded default color palette +- New default `maxWidth` scale +- Added utilities for `list-style-type` and `list-style-position` +- Added `break-all` utility + +### Changed + +- `object-position` utilities are now customizable under `theme.objectPosition` +- `cursor` utilities are now customizable under `theme.cursors` +- `flex-grow/shrink` utilities are now customizable under `theme.flexGrow/flexShrink` +- Default variant output position can be customized +- Extended default line-height scale +- Extended default letter-spacing scale + +## [0.7.4] - 2019-01-23 + +### Changed + +- Update our PostCSS related dependencies + +### Fixed + +- Fix bug where class names containing a `.`character had the responsive prefix added in the wrong place + +## [0.7.3] - 2018-12-03 + +### Changed + +- Update Normalize to v8.0.1 + +## [0.7.2] - 2018-11-05 + +### Added + +- Add `--no-autoprefixer` option to CLI `build` command + +## [0.7.1] - 2018-11-05 + +### Changed + +- Update autoprefixer dependency + +## [0.7.0] - 2018-10-31 + +### Added + +- Registering new variants from plugins +- Variant order can be customized per module +- Added focus-within variant +- Fancy CLI updates +- Option to generate config without comments +- Make configured prefix optional when using @apply +- Improve Flexbox behavior in IE 10/11 + +### Changed + +- Variant order in modules is now significant +- Normalize.css updated to v8.0.0 +- Removed CSS fix for Chrome 62 button border radius change + +## [0.6.6] - 2018-09-21 + +### Changed + +- Promote `shadowLookup` from experiment to official feature + +## [0.6.5] - 2018-08-18 + +### Fixed + +- Fixes an issue where units were stripped from zero value properties + +## [0.6.4] - 2018-07-16 + +### Fixed + +- Fixes an issue where changes to your configuration file were ignored when using `webpack --watch` + +## [0.6.3] - 2018-07-11 + +### Fixed + +- Fixes an issue where `@tailwind utilities` generated no output + +## [0.6.2] - 2018-03-11 + +### Added + +- Added table layout utilities for styling tables +- Configuration can now be passed as an object +- Registering new variants from plugins (experimental) +- Allow `@apply`-ing classes that aren't defined but would be generated (experimental) + +### Changed + +- Default config file changes + +## [0.6.1] - 2018-06-22 + +### Fixed + +- Fix incorrect box-shadow syntax for the `.shadow-outline` utility 🤦‍♂️ + +## [0.6.0] - 2018-06-21 + +### Added + +- Added border collapse utilities for styling tables +- Added more axis-specific overflow utilities +- Added `.outline-none` utility for suppressing focus styles +- Added `.shadow-outline` utility as an alternative to default browser focus styles +- Extended default padding, margin, negative margin, width, and height scales +- Enable focus and hover variants for more modules by default + +### Changed + +- Removed default `outline: none !important` styles from focusable but keyboard-inaccessible elements +- Moved screen prefix for responsive `group-hover` variants +- Default config file changes + +## [0.5.3] - 2018-05-07 + +### Changed + +- Improve sourcemaps for replaced styles like `preflight` + +### Fixed + +- Fix bug where informational messages were being logged to stdout during build, preventing the ability to use Tailwind's output in Unix pipelines + +## [0.5.2] - 2018-03-29 + +### Fixed + +- Fixes an issue with a dependency that had a security vulnerability + +## [0.5.1] - 2018-03-13 + +### Removed + +- Reverts a change that renamed the `.roman` class to `.not-italic` due to the fact that it breaks compatibility with cssnext: [postcss/postcss-selector-not#10](https://github.com/postcss/postcss-selector-not/issues/10). We'll stick with `.roman` for now with a plan to switch to `.not-italic` in another breaking version should that issue get resolved in postcss-selector-not. + +## [0.5.0] - 2018-03-13 + +### Added + +- Plugin system +- Added `.sticky position` utility +- Added `.cursor-wait` and `.cursor-move` utilities +- Added `.bg-auto` background size utility +- Background sizes are now customizable +- Support for active variants +- Better postcss-import support +- Configuration options for the `.container` component + +### Changed + +- The `.container` component is now a built-in plugin +- State variant precedence changes +- New config file keys +- `.overflow-x/y-scroll` now set `overflow: scroll` instead of `overflow: auto` +- `.roman` renamed to `.not-italic` + +## [0.4.3] - 2018-03-13 + +### Changed + +- Use `global.Object` to avoid issues with polyfills when importing the Tailwind config into other JS + +## [0.4.2] - 2018-03-01 + +### Added + +- Add support for using a function to define class prefixes in addition to a simple string + +### Changed + +- Improve the performance of @apply by using a lookup table instead of searching + +### Fixed + +- Fix an issue where borders couldn't be applied to `img` tags without specifying a border style + +## [0.4.1] - 2018-01-22 + +### Changed + +- Make default sans-serif font stack more future proof and safe to use with CSS `font` shorthand +- Replace stylefmt with Perfectionist to avoid weird stylelint conflicts + +## [0.4.0] - 2017-12-15 + +### Added + +- `@apply`'d classes can now be made `!important` explicitly + +### Changed + +- `@apply` now strips `!important` from any mixed in classes +- Default color palette tweaks + +## [0.3.0] - 2017-12-01 + +### Added + +- Enable/disable modules and control which variants are generated for each +- Focus variants +- Group hover variants +- New `@variants` at-rule +- Customize the separator character +- Missing config keys now fallback to their default values +- New utilities + +### Changed + +- Lists now have no margins by default +- `.pin` no longer sets width and height to 100% +- SVG `fill` no longer defaults to currentColor + +## [0.2.2] - 2017-11-19 + +### Fixed + +- Fix issue with dist files not being published due to bug in latest npm + +## [0.2.1] - 2017-11-18 + +### Fixed + +- Fix overly specific border-radius reset for Chrome 62 button styles + +## [0.2.0] - 2017-11-17 + +### Added + +- Add a custom prefix to all utilities +- Optionally make all utilities `!important` +- Round element corners independently +- Cascading border colors and styles + +### Changed + +- `auto` is no longer a hard-coded margin value +- The `defaultConfig` function is now a separate module +- Rounded utilities now combine position and radius size +- Border width utilities no longer affect border color/style +- `@apply` is now very strict about what classes can be applied +- Add `options` key to your config +- Spacing, radius, and border width utility declaration order changes + +## [0.1.6] - 2017-11-09 + +### Fixed + +- Fix CDN files not being published to npm + +## [0.1.5] - 2017-11-08 + +### Changed + +- Apply the same default placeholder styling that's applied to inputs to textareas + +### Fixed + +- Fix CLI tool not loading config files properly + +## [0.1.4] - 2017-11-06 + +### Added + +- Autoprefix dist assets for quick hacking and prototyping +- Add `my-auto`, `mt-auto`, and `mb-auto` margin utilities +- Add `sans-serif` to end of default `sans` font stack + +### Changed + +- If using Webpack, it will now watch your config file changes +- When running `tailwind init [filename]`, automatically append `.js` to filename if not present +- Support default fallback value in `config(...)` function, ie. `config('colors.blue', #0000ff)` +- Don't output empty media queries if Tailwind processes a file that doesn't use Tailwind + +### Fixed + +- Move list utilities earlier in stylesheet to allow overriding with spacing utilities + +## [0.1.3] - 2017-11-02 + +### Added + +- Add new `.scrolling-touch` and `.scrolling-auto` utilities for controlling inertial scroll behavior on WebKit touch devices +- Generate separate dist files for preflight, utilities, and tailwind for CDN usage + +## [0.1.2] - 2017-11-01 + +### Changed + +- Target Node 6.9.0 explicitly (instead of 8.6 implicitly) to support more users + +### Fixed + +- Fix issue with config option not being respected in `tailwind build` + +## [0.1.1] - 2017-11-01 + +### Fixed + +- Fix `tailwind build` CLI command not writing output files + +## [0.1.0] - 2017-11-01 + +### Added + +- Everything! + +[unreleased]: https://github.com/tailwindlabs/tailwindcss/compare/v3.3.2...HEAD +[3.3.2]: https://github.com/tailwindlabs/tailwindcss/compare/v3.3.1...v3.3.2 +[3.3.1]: https://github.com/tailwindlabs/tailwindcss/compare/v3.3.0...v3.3.1 +[3.3.0]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.7...v3.3.0 +[3.2.7]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.6...v3.2.7 +[3.2.6]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.5...v3.2.6 +[3.2.5]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.4...v3.2.5 +[3.2.4]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.3...v3.2.4 +[3.2.3]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.2...v3.2.3 +[3.2.2]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.1...v3.2.2 +[3.2.1]: https://github.com/tailwindlabs/tailwindcss/compare/v3.2.0...v3.2.1 +[3.2.0]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.8...v3.2.0 +[3.1.8]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.7...v3.1.8 +[3.1.7]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.6...v3.1.7 +[3.1.6]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.5...v3.1.6 +[3.1.5]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.4...v3.1.5 +[3.1.4]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.3...v3.1.4 +[3.1.3]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.2...v3.1.3 +[3.1.2]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.1...v3.1.2 +[3.1.1]: https://github.com/tailwindlabs/tailwindcss/compare/v3.1.0...v3.1.1 +[3.1.0]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.24...v3.1.0 +[3.0.24]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.23...v3.0.24 +[3.0.23]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.22...v3.0.23 +[3.0.22]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.21...v3.0.22 +[3.0.21]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.20...v3.0.21 +[3.0.20]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.19...v3.0.20 +[3.0.19]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.18...v3.0.19 +[3.0.18]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.17...v3.0.18 +[3.0.17]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.16...v3.0.17 +[3.0.16]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.15...v3.0.16 +[3.0.15]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.14...v3.0.15 +[3.0.14]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.13...v3.0.14 +[3.0.13]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.12...v3.0.13 +[3.0.12]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.11...v3.0.12 +[3.0.11]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.10...v3.0.11 +[3.0.10]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.9...v3.0.10 +[3.0.9]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.8...v3.0.9 +[3.0.8]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.7...v3.0.8 +[3.0.7]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.6...v3.0.7 +[3.0.6]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.5...v3.0.6 +[3.0.5]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.4...v3.0.5 +[3.0.4]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.3...v3.0.4 +[3.0.3]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.2...v3.0.3 +[3.0.2]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.1...v3.0.2 +[3.0.1]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.0...v3.0.1 +[3.0.0]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.0-alpha.2...v3.0.0 +[3.0.0-alpha.2]: https://github.com/tailwindlabs/tailwindcss/compare/v3.0.0-alpha.1...v3.0.0-alpha.2 +[3.0.0-alpha.1]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.19...v3.0.0-alpha.1 +[2.2.19]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.18...v2.2.19 +[2.2.18]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.17...v2.2.18 +[2.2.17]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.16...v2.2.17 +[2.2.16]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.15...v2.2.16 +[2.2.15]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.14...v2.2.15 +[2.2.14]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.13...v2.2.14 +[2.2.13]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.12...v2.2.13 +[2.2.12]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.11...v2.2.12 +[2.2.11]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.10...v2.2.11 +[2.2.10]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.9...v2.2.10 +[2.2.9]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.8...v2.2.9 +[2.2.8]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.7...v2.2.8 +[2.2.7]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.6...v2.2.7 +[2.2.6]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.5...v2.2.6 +[2.2.5]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.4...v2.2.5 +[2.2.4]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.3...v2.2.4 +[2.2.3]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.2...v2.2.3 +[2.2.2]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.1...v2.2.2 +[2.2.1]: https://github.com/tailwindlabs/tailwindcss/compare/v2.2.0...v2.2.1 +[2.2.0]: https://github.com/tailwindlabs/tailwindcss/compare/v2.1.4...v2.2.0 +[2.1.4]: https://github.com/tailwindlabs/tailwindcss/compare/v2.1.3...v2.1.4 +[2.1.3]: https://github.com/tailwindlabs/tailwindcss/compare/v2.1.2...v2.1.3 +[2.1.2]: https://github.com/tailwindlabs/tailwindcss/compare/v2.1.1...v2.1.2 +[2.1.1]: https://github.com/tailwindlabs/tailwindcss/compare/v2.1.0...v2.1.1 +[2.1.0]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.4...v2.1.0 +[2.0.4]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.3...v2.0.4 +[2.0.3]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.2...v2.0.3 +[2.0.2]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.1...v2.0.2 +[2.0.1]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0...v2.0.1 +[2.0.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.6...v2.0.0 +[2.0.0-alpha.25]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.24...v2.0.0-alpha.25 +[2.0.0-alpha.24]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.23...v2.0.0-alpha.24 +[2.0.0-alpha.23]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.22...v2.0.0-alpha.23 +[2.0.0-alpha.22]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.21...v2.0.0-alpha.22 +[2.0.0-alpha.21]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.20...v2.0.0-alpha.21 +[2.0.0-alpha.20]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.19...v2.0.0-alpha.20 +[2.0.0-alpha.19]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.18...v2.0.0-alpha.19 +[2.0.0-alpha.18]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.17...v2.0.0-alpha.18 +[2.0.0-alpha.17]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.16...v2.0.0-alpha.17 +[2.0.0-alpha.16]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.15...v2.0.0-alpha.16 +[2.0.0-alpha.15]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.14...v2.0.0-alpha.15 +[2.0.0-alpha.14]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.13...v2.0.0-alpha.14 +[2.0.0-alpha.13]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.12...v2.0.0-alpha.13 +[2.0.0-alpha.12]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.11...v2.0.0-alpha.12 +[2.0.0-alpha.11]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.10...v2.0.0-alpha.11 +[2.0.0-alpha.10]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.9...v2.0.0-alpha.10 +[2.0.0-alpha.9]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.8...v2.0.0-alpha.9 +[2.0.0-alpha.8]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.7...v2.0.0-alpha.8 +[2.0.0-alpha.7]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.6...v2.0.0-alpha.7 +[2.0.0-alpha.6]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.5...v2.0.0-alpha.6 +[2.0.0-alpha.5]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.4...v2.0.0-alpha.5 +[2.0.0-alpha.4]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.3...v2.0.0-alpha.4 +[2.0.0-alpha.3]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.2...v2.0.0-alpha.3 +[2.0.0-alpha.2]: https://github.com/tailwindlabs/tailwindcss/compare/v2.0.0-alpha.1...v2.0.0-alpha.2 +[1.9.6]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.5...v1.9.6 +[2.0.0-alpha.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.5...v2.0.0-alpha.1 +[1.9.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.4...v1.9.5 +[1.9.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.3...v1.9.4 +[1.9.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.2...v1.9.3 +[1.9.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.1...v1.9.2 +[1.9.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.9.0...v1.9.1 +[1.9.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.13...v1.9.0 +[1.8.13]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.12...v1.8.13 +[1.8.12]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.11...v1.8.12 +[1.8.11]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.10...v1.8.11 +[1.8.10]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.9...v1.8.10 +[1.8.9]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.8...v1.8.9 +[1.8.8]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.7...v1.8.8 +[1.8.7]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.6...v1.8.7 +[1.8.6]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.5...v1.8.6 +[1.8.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.4...v1.8.5 +[1.8.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.3...v1.8.4 +[1.8.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.2...v1.8.3 +[1.8.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.1...v1.8.2 +[1.8.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.8.0...v1.8.1 +[1.8.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.7.6...v1.8.0 +[1.7.6]: https://github.com/tailwindlabs/tailwindcss/compare/v1.7.5...v1.7.6 +[1.7.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.7.4...v1.7.5 +[1.7.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.7.3...v1.7.4 +[1.7.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.7.2...v1.7.3 +[1.7.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.7.1...v1.7.2 +[1.7.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.7.0...v1.7.1 +[1.7.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.6.3...v1.7.0 +[1.6.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.6.2...v1.6.3 +[1.6.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.6.1...v1.6.2 +[1.6.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.6.0...v1.6.1 +[1.6.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.5.2...v1.6.0 +[1.5.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.5.1...v1.5.2 +[1.5.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.5.0...v1.5.1 +[1.5.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.4.6...v1.5.0 +[1.4.6]: https://github.com/tailwindlabs/tailwindcss/compare/v1.4.5...v1.4.6 +[1.4.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.4.4...v1.4.5 +[1.4.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.4.3...v1.4.4 +[1.4.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.4.2...v1.4.3 +[1.4.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.4.1...v1.4.2 +[1.4.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.4.0...v1.4.1 +[1.4.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.3.5...v1.4.0 +[1.3.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.3.4...v1.3.5 +[1.3.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.3.3...v1.3.4 +[1.3.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.3.1...v1.3.3 +[1.3.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.3.0...v1.3.1 +[1.3.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.2.0...v1.3.0 +[1.2.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.1.4...v1.2.0 +[1.2.0-canary.8]: https://github.com/tailwindlabs/tailwindcss/compare/v1.2.0-canary.7...v1.2.0-canary.8 +[1.2.0-canary.7]: https://github.com/tailwindlabs/tailwindcss/compare/v1.2.0-canary.6...v1.2.0-canary.7 +[1.2.0-canary.6]: https://github.com/tailwindlabs/tailwindcss/compare/v1.2.0-canary.5...v1.2.0-canary.6 +[1.2.0-canary.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.2.0-canary.4...v1.2.0-canary.5 +[1.2.0-canary.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.2.0-canary.3...v1.2.0-canary.4 +[1.1.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.1.3...v1.1.4 +[1.2.0-canary.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.2.0-canary.0...v1.2.0-canary.1 +[1.1.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.1.2...v1.1.3 +[1.2.0-canary.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.1.2...v1.2.0-canary.0 +[1.1.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.1.1...v1.1.2 +[1.1.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.1.0...v1.1.1 +[1.1.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.6...v1.1.0 +[1.0.6]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.5...v1.0.6 +[1.0.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.4...v1.0.5 +[1.0.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.3...v1.0.4 +[1.0.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.2...v1.0.3 +[1.0.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.1...v1.0.2 +[1.0.1]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0...v1.0.1 +[1.0.0]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.10...v1.0.0 +[1.0.0-beta.10]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.9...v1.0.0-beta.10 +[1.0.0-beta.9]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.8...v1.0.0-beta.9 +[1.0.0-beta.8]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.7...v1.0.0-beta.8 +[1.0.0-beta.7]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.6...v1.0.0-beta.7 +[1.0.0-beta.6]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.5...v1.0.0-beta.6 +[1.0.0-beta.5]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.4...v1.0.0-beta.5 +[1.0.0-beta.4]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.3...v1.0.0-beta.4 +[1.0.0-beta.3]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.2...v1.0.0-beta.3 +[1.0.0-beta.2]: https://github.com/tailwindlabs/tailwindcss/compare/v1.0.0-beta.1...v1.0.0-beta.2 +[1.0.0-beta.1]: https://github.com/tailwindlabs/tailwindcss/compare/v0.7.4...v1.0.0-beta.1 +[0.7.4]: https://github.com/tailwindlabs/tailwindcss/compare/v0.7.3...v0.7.4 +[0.7.3]: https://github.com/tailwindlabs/tailwindcss/compare/v0.7.2...v0.7.3 +[0.7.2]: https://github.com/tailwindlabs/tailwindcss/compare/v0.7.1...v0.7.2 +[0.7.1]: https://github.com/tailwindlabs/tailwindcss/compare/v0.7.0...v0.7.1 +[0.7.0]: https://github.com/tailwindlabs/tailwindcss/compare/v0.6.6...v0.7.0 +[0.6.6]: https://github.com/tailwindlabs/tailwindcss/compare/v0.6.5...v0.6.6 +[0.6.5]: https://github.com/tailwindlabs/tailwindcss/compare/v0.6.4...v0.6.5 +[0.6.4]: https://github.com/tailwindlabs/tailwindcss/compare/v0.6.3...v0.6.4 +[0.6.3]: https://github.com/tailwindlabs/tailwindcss/compare/v0.6.2...v0.6.3 +[0.6.2]: https://github.com/tailwindlabs/tailwindcss/compare/v0.6.1...v0.6.2 +[0.6.1]: https://github.com/tailwindlabs/tailwindcss/compare/v0.6.0...v0.6.1 +[0.6.0]: https://github.com/tailwindlabs/tailwindcss/compare/v0.5.3...v0.6.0 +[0.5.3]: https://github.com/tailwindlabs/tailwindcss/compare/v0.5.2...v0.5.3 +[0.5.2]: https://github.com/tailwindlabs/tailwindcss/compare/v0.5.1...v0.5.2 +[0.5.1]: https://github.com/tailwindlabs/tailwindcss/compare/v0.5.0...v0.5.1 +[0.5.0]: https://github.com/tailwindlabs/tailwindcss/compare/v0.4.3...v0.5.0 +[0.4.3]: https://github.com/tailwindlabs/tailwindcss/compare/v0.4.2...v0.4.3 +[0.4.2]: https://github.com/tailwindlabs/tailwindcss/compare/v0.4.1...v0.4.2 +[0.4.1]: https://github.com/tailwindlabs/tailwindcss/compare/v0.4.0...v0.4.1 +[0.4.0]: https://github.com/tailwindlabs/tailwindcss/compare/v0.3.0...v0.4.0 +[0.3.0]: https://github.com/tailwindlabs/tailwindcss/compare/v0.2.2...v0.3.0 +[0.2.2]: https://github.com/tailwindlabs/tailwindcss/compare/v0.2.1...v0.2.2 +[0.2.1]: https://github.com/tailwindlabs/tailwindcss/compare/v0.2.0...v0.2.1 +[0.2.0]: https://github.com/tailwindlabs/tailwindcss/compare/v0.1.6...v0.2.0 +[0.1.6]: https://github.com/tailwindlabs/tailwindcss/compare/v0.1.5...v0.1.6 +[0.1.5]: https://github.com/tailwindlabs/tailwindcss/compare/v0.1.4...v0.1.5 +[0.1.4]: https://github.com/tailwindlabs/tailwindcss/compare/v0.1.3...v0.1.4 +[0.1.3]: https://github.com/tailwindlabs/tailwindcss/compare/v0.1.2...v0.1.3 +[0.1.2]: https://github.com/tailwindlabs/tailwindcss/compare/v0.1.1...v0.1.2 +[0.1.1]: https://github.com/tailwindlabs/tailwindcss/compare/v0.1.0...v0.1.1 +[0.1.0]: https://github.com/tailwindlabs/tailwindcss/releases/tag/v0.1.0 diff --git a/node_modules/tailwindcss/LICENSE b/node_modules/tailwindcss/LICENSE new file mode 100644 index 0000000..d6a8229 --- /dev/null +++ b/node_modules/tailwindcss/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Tailwind Labs, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/tailwindcss/README.md b/node_modules/tailwindcss/README.md new file mode 100644 index 0000000..1766378 --- /dev/null +++ b/node_modules/tailwindcss/README.md @@ -0,0 +1,41 @@ +

+ + + + + Tailwind CSS + + +

+ +

+ A utility-first CSS framework for rapidly building custom user interfaces. +

+ + +

+ Build Status + Total Downloads + Latest Release + License +

+ +------ + +## Documentation + +For full documentation, visit [tailwindcss.com](https://tailwindcss.com/). + +## Community + +For help, discussion about best practices, or any other conversation that would benefit from being searchable: + +[Discuss Tailwind CSS on GitHub](https://github.com/tailwindcss/tailwindcss/discussions) + +For casual chit-chat with others using the framework: + +[Join the Tailwind CSS Discord Server](https://discord.gg/7NF8GNe) + +## Contributing + +If you're interested in contributing to Tailwind CSS, please read our [contributing docs](https://github.com/tailwindcss/tailwindcss/blob/master/.github/CONTRIBUTING.md) **before submitting a pull request**. diff --git a/node_modules/tailwindcss/base.css b/node_modules/tailwindcss/base.css new file mode 100644 index 0000000..2f02db5 --- /dev/null +++ b/node_modules/tailwindcss/base.css @@ -0,0 +1 @@ +@tailwind base; diff --git a/node_modules/tailwindcss/colors.d.ts b/node_modules/tailwindcss/colors.d.ts new file mode 100644 index 0000000..d85ab86 --- /dev/null +++ b/node_modules/tailwindcss/colors.d.ts @@ -0,0 +1,3 @@ +import type { DefaultColors } from './types/generated/colors' +declare const colors: DefaultColors +export = colors diff --git a/node_modules/tailwindcss/colors.js b/node_modules/tailwindcss/colors.js new file mode 100644 index 0000000..c6f9149 --- /dev/null +++ b/node_modules/tailwindcss/colors.js @@ -0,0 +1,2 @@ +let colors = require('./lib/public/colors') +module.exports = (colors.__esModule ? colors : { default: colors }).default diff --git a/node_modules/tailwindcss/components.css b/node_modules/tailwindcss/components.css new file mode 100644 index 0000000..020aaba --- /dev/null +++ b/node_modules/tailwindcss/components.css @@ -0,0 +1 @@ +@tailwind components; diff --git a/node_modules/tailwindcss/defaultConfig.d.ts b/node_modules/tailwindcss/defaultConfig.d.ts new file mode 100644 index 0000000..2c2bccf --- /dev/null +++ b/node_modules/tailwindcss/defaultConfig.d.ts @@ -0,0 +1,3 @@ +import type { Config } from './types/config' +declare const config: Config +export = config diff --git a/node_modules/tailwindcss/defaultConfig.js b/node_modules/tailwindcss/defaultConfig.js new file mode 100644 index 0000000..7b63587 --- /dev/null +++ b/node_modules/tailwindcss/defaultConfig.js @@ -0,0 +1,2 @@ +let defaultConfig = require('./lib/public/default-config') +module.exports = (defaultConfig.__esModule ? defaultConfig : { default: defaultConfig }).default diff --git a/node_modules/tailwindcss/defaultTheme.d.ts b/node_modules/tailwindcss/defaultTheme.d.ts new file mode 100644 index 0000000..2bc9dc7 --- /dev/null +++ b/node_modules/tailwindcss/defaultTheme.d.ts @@ -0,0 +1,4 @@ +import type { Config } from './types/config' +import { DefaultTheme } from './types/generated/default-theme' +declare const theme: Config['theme'] & DefaultTheme +export = theme diff --git a/node_modules/tailwindcss/defaultTheme.js b/node_modules/tailwindcss/defaultTheme.js new file mode 100644 index 0000000..991526d --- /dev/null +++ b/node_modules/tailwindcss/defaultTheme.js @@ -0,0 +1,2 @@ +let defaultTheme = require('./lib/public/default-theme') +module.exports = (defaultTheme.__esModule ? defaultTheme : { default: defaultTheme }).default diff --git a/node_modules/tailwindcss/lib/cli-peer-dependencies.js b/node_modules/tailwindcss/lib/cli-peer-dependencies.js new file mode 100644 index 0000000..4b64be2 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli-peer-dependencies.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +function _export(target, all) { + for(var name in all)Object.defineProperty(target, name, { + enumerable: true, + get: all[name] + }); +} +_export(exports, { + lazyPostcss: function() { + return lazyPostcss; + }, + lazyPostcssImport: function() { + return lazyPostcssImport; + }, + lazyAutoprefixer: function() { + return lazyAutoprefixer; + }, + lazyCssnano: function() { + return lazyCssnano; + } +}); +function lazyPostcss() { + return require("postcss"); +} +function lazyPostcssImport() { + return require("postcss-import"); +} +function lazyAutoprefixer() { + return require("autoprefixer"); +} +function lazyCssnano() { + return require("cssnano"); +} diff --git a/node_modules/tailwindcss/lib/cli.js b/node_modules/tailwindcss/lib/cli.js new file mode 100755 index 0000000..c7043a3 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli.js @@ -0,0 +1,7 @@ +#!/usr/bin/env node +"use strict"; +if (false) { + module.exports = require("./oxide/cli"); +} else { + module.exports = require("./cli/index"); +} diff --git a/node_modules/tailwindcss/lib/cli/build/deps.js b/node_modules/tailwindcss/lib/cli/build/deps.js new file mode 100644 index 0000000..1aa8116 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/build/deps.js @@ -0,0 +1,62 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +function _export(target, all) { + for(var name in all)Object.defineProperty(target, name, { + enumerable: true, + get: all[name] + }); +} +_export(exports, { + loadPostcss: function() { + return loadPostcss; + }, + loadPostcssImport: function() { + return loadPostcssImport; + }, + loadCssNano: function() { + return loadCssNano; + }, + loadAutoprefixer: function() { + return loadAutoprefixer; + } +}); +const _index = require("../../../peers/index.js"); +function loadPostcss() { + // Try to load a local `postcss` version first + try { + return require("postcss"); + } catch {} + return (0, _index.lazyPostcss)(); +} +function loadPostcssImport() { + // Try to load a local `postcss-import` version first + try { + return require("postcss-import"); + } catch {} + return (0, _index.lazyPostcssImport)(); +} +function loadCssNano() { + let options = { + preset: [ + "default", + { + cssDeclarationSorter: false + } + ] + }; + // Try to load a local `cssnano` version first + try { + return require("cssnano"); + } catch {} + return (0, _index.lazyCssnano)()(options); +} +function loadAutoprefixer() { + // Try to load a local `autoprefixer` version first + try { + return require("autoprefixer"); + } catch {} + return (0, _index.lazyAutoprefixer)(); +} diff --git a/node_modules/tailwindcss/lib/cli/build/index.js b/node_modules/tailwindcss/lib/cli/build/index.js new file mode 100644 index 0000000..60304f6 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/build/index.js @@ -0,0 +1,54 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "build", { + enumerable: true, + get: function() { + return build; + } +}); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _path = /*#__PURE__*/ _interop_require_default(require("path")); +const _resolveConfigPath = require("../../util/resolveConfigPath.js"); +const _plugin = require("./plugin.js"); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +async function build(args) { + let input = args["--input"]; + let shouldWatch = args["--watch"]; + // TODO: Deprecate this in future versions + if (!input && args["_"][1]) { + console.error("[deprecation] Running tailwindcss without -i, please provide an input file."); + input = args["--input"] = args["_"][1]; + } + if (input && input !== "-" && !_fs.default.existsSync(input = _path.default.resolve(input))) { + console.error(`Specified input file ${args["--input"]} does not exist.`); + process.exit(9); + } + if (args["--config"] && !_fs.default.existsSync(args["--config"] = _path.default.resolve(args["--config"]))) { + console.error(`Specified config file ${args["--config"]} does not exist.`); + process.exit(9); + } + // TODO: Reference the @config path here if exists + let configPath = args["--config"] ? args["--config"] : (0, _resolveConfigPath.resolveDefaultConfigPath)(); + let processor = await (0, _plugin.createProcessor)(args, configPath); + if (shouldWatch) { + // Abort the watcher if stdin is closed to avoid zombie processes + // You can disable this behavior with --watch=always + if (args["--watch"] !== "always") { + process.stdin.on("end", ()=>process.exit(0)); + } + process.stdin.resume(); + await processor.watch(); + } else { + await processor.build().catch((e)=>{ + console.error(e); + process.exit(1); + }); + } +} diff --git a/node_modules/tailwindcss/lib/cli/build/plugin.js b/node_modules/tailwindcss/lib/cli/build/plugin.js new file mode 100644 index 0000000..d36ac01 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/build/plugin.js @@ -0,0 +1,378 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "createProcessor", { + enumerable: true, + get: function() { + return createProcessor; + } +}); +const _path = /*#__PURE__*/ _interop_require_default(require("path")); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _postcssloadconfig = /*#__PURE__*/ _interop_require_default(require("postcss-load-config")); +const _lilconfig = require("lilconfig"); +const _plugins = /*#__PURE__*/ _interop_require_default(require("postcss-load-config/src/plugins" // Little bit scary, looking at private/internal API +)); +const _options = /*#__PURE__*/ _interop_require_default(require("postcss-load-config/src/options" // Little bit scary, looking at private/internal API +)); +const _processTailwindFeatures = /*#__PURE__*/ _interop_require_default(require("../../processTailwindFeatures")); +const _deps = require("./deps"); +const _utils = require("./utils"); +const _sharedState = require("../../lib/sharedState"); +const _resolveConfig = /*#__PURE__*/ _interop_require_default(require("../../../resolveConfig.js")); +const _content = require("../../lib/content.js"); +const _watching = require("./watching.js"); +const _fastglob = /*#__PURE__*/ _interop_require_default(require("fast-glob")); +const _findAtConfigPath = require("../../lib/findAtConfigPath.js"); +const _log = /*#__PURE__*/ _interop_require_default(require("../../util/log")); +const _loadconfig = require("../../lib/load-config"); +const _getModuleDependencies = /*#__PURE__*/ _interop_require_default(require("../../lib/getModuleDependencies")); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +/** + * + * @param {string} [customPostCssPath ] + * @returns + */ async function loadPostCssPlugins(customPostCssPath) { + let config = customPostCssPath ? await (async ()=>{ + let file = _path.default.resolve(customPostCssPath); + // Implementation, see: https://unpkg.com/browse/postcss-load-config@3.1.0/src/index.js + // @ts-ignore + let { config ={} } = await (0, _lilconfig.lilconfig)("postcss").load(file); + if (typeof config === "function") { + config = config(); + } else { + config = Object.assign({}, config); + } + if (!config.plugins) { + config.plugins = []; + } + return { + file, + plugins: (0, _plugins.default)(config, file), + options: (0, _options.default)(config, file) + }; + })() : await (0, _postcssloadconfig.default)(); + let configPlugins = config.plugins; + let configPluginTailwindIdx = configPlugins.findIndex((plugin)=>{ + if (typeof plugin === "function" && plugin.name === "tailwindcss") { + return true; + } + if (typeof plugin === "object" && plugin !== null && plugin.postcssPlugin === "tailwindcss") { + return true; + } + return false; + }); + let beforePlugins = configPluginTailwindIdx === -1 ? [] : configPlugins.slice(0, configPluginTailwindIdx); + let afterPlugins = configPluginTailwindIdx === -1 ? configPlugins : configPlugins.slice(configPluginTailwindIdx + 1); + return [ + beforePlugins, + afterPlugins, + config.options + ]; +} +function loadBuiltinPostcssPlugins() { + let postcss = (0, _deps.loadPostcss)(); + let IMPORT_COMMENT = "__TAILWIND_RESTORE_IMPORT__: "; + return [ + [ + (root)=>{ + root.walkAtRules("import", (rule)=>{ + if (rule.params.slice(1).startsWith("tailwindcss/")) { + rule.after(postcss.comment({ + text: IMPORT_COMMENT + rule.params + })); + rule.remove(); + } + }); + }, + (0, _deps.loadPostcssImport)(), + (root)=>{ + root.walkComments((rule)=>{ + if (rule.text.startsWith(IMPORT_COMMENT)) { + rule.after(postcss.atRule({ + name: "import", + params: rule.text.replace(IMPORT_COMMENT, "") + })); + rule.remove(); + } + }); + } + ], + [], + {} + ]; +} +let state = { + /** @type {any} */ context: null, + /** @type {ReturnType | null} */ watcher: null, + /** @type {{content: string, extension: string}[]} */ changedContent: [], + /** @type {ReturnType | null} */ configBag: null, + contextDependencies: new Set(), + /** @type {import('../../lib/content.js').ContentPath[]} */ contentPaths: [], + refreshContentPaths () { + var _this_context; + this.contentPaths = (0, _content.parseCandidateFiles)(this.context, (_this_context = this.context) === null || _this_context === void 0 ? void 0 : _this_context.tailwindConfig); + }, + get config () { + return this.context.tailwindConfig; + }, + get contentPatterns () { + return { + all: this.contentPaths.map((contentPath)=>contentPath.pattern), + dynamic: this.contentPaths.filter((contentPath)=>contentPath.glob !== undefined).map((contentPath)=>contentPath.pattern) + }; + }, + loadConfig (configPath, content) { + if (this.watcher && configPath) { + this.refreshConfigDependencies(); + } + let config = (0, _loadconfig.loadConfig)(configPath); + let dependencies = (0, _getModuleDependencies.default)(configPath); + this.configBag = { + config, + dependencies, + dispose () { + for (let file of dependencies){ + delete require.cache[require.resolve(file)]; + } + } + }; + // @ts-ignore + this.configBag.config = (0, _resolveConfig.default)(this.configBag.config, { + content: { + files: [] + } + }); + // Override content files if `--content` has been passed explicitly + if ((content === null || content === void 0 ? void 0 : content.length) > 0) { + this.configBag.config.content.files = content; + } + return this.configBag.config; + }, + refreshConfigDependencies () { + var _this_configBag; + _sharedState.env.DEBUG && console.time("Module dependencies"); + (_this_configBag = this.configBag) === null || _this_configBag === void 0 ? void 0 : _this_configBag.dispose(); + _sharedState.env.DEBUG && console.timeEnd("Module dependencies"); + }, + readContentPaths () { + let content = []; + // Resolve globs from the content config + // TODO: When we make the postcss plugin async-capable this can become async + let files = _fastglob.default.sync(this.contentPatterns.all); + for (let file of files){ + if (false) { + content.push({ + file, + extension: _path.default.extname(file).slice(1) + }); + } else { + content.push({ + content: _fs.default.readFileSync(_path.default.resolve(file), "utf8"), + extension: _path.default.extname(file).slice(1) + }); + } + } + // Resolve raw content in the tailwind config + let rawContent = this.config.content.files.filter((file)=>{ + return file !== null && typeof file === "object"; + }); + for (let { raw: htmlContent , extension ="html" } of rawContent){ + content.push({ + content: htmlContent, + extension + }); + } + return content; + }, + getContext ({ createContext , cliConfigPath , root , result , content }) { + if (this.context) { + this.context.changedContent = this.changedContent.splice(0); + return this.context; + } + _sharedState.env.DEBUG && console.time("Searching for config"); + var _findAtConfigPath1; + let configPath = (_findAtConfigPath1 = (0, _findAtConfigPath.findAtConfigPath)(root, result)) !== null && _findAtConfigPath1 !== void 0 ? _findAtConfigPath1 : cliConfigPath; + _sharedState.env.DEBUG && console.timeEnd("Searching for config"); + _sharedState.env.DEBUG && console.time("Loading config"); + let config = this.loadConfig(configPath, content); + _sharedState.env.DEBUG && console.timeEnd("Loading config"); + _sharedState.env.DEBUG && console.time("Creating context"); + this.context = createContext(config, []); + Object.assign(this.context, { + userConfigPath: configPath + }); + _sharedState.env.DEBUG && console.timeEnd("Creating context"); + _sharedState.env.DEBUG && console.time("Resolving content paths"); + this.refreshContentPaths(); + _sharedState.env.DEBUG && console.timeEnd("Resolving content paths"); + if (this.watcher) { + _sharedState.env.DEBUG && console.time("Watch new files"); + this.watcher.refreshWatchedFiles(); + _sharedState.env.DEBUG && console.timeEnd("Watch new files"); + } + for (let file of this.readContentPaths()){ + this.context.changedContent.push(file); + } + return this.context; + } +}; +async function createProcessor(args, cliConfigPath) { + var _args_content; + let postcss = (0, _deps.loadPostcss)(); + let input = args["--input"]; + let output = args["--output"]; + let includePostCss = args["--postcss"]; + let customPostCssPath = typeof args["--postcss"] === "string" ? args["--postcss"] : undefined; + let [beforePlugins, afterPlugins, postcssOptions] = includePostCss ? await loadPostCssPlugins(customPostCssPath) : loadBuiltinPostcssPlugins(); + if (args["--purge"]) { + _log.default.warn("purge-flag-deprecated", [ + "The `--purge` flag has been deprecated.", + "Please use `--content` instead." + ]); + if (!args["--content"]) { + args["--content"] = args["--purge"]; + } + } + var _args_content_split; + let content = (_args_content_split = (_args_content = args["--content"]) === null || _args_content === void 0 ? void 0 : _args_content.split(/(?{ + return { + postcssPlugin: "tailwindcss", + Once (root, { result }) { + _sharedState.env.DEBUG && console.time("Compiling CSS"); + (0, _processTailwindFeatures.default)(({ createContext })=>{ + console.error(); + console.error("Rebuilding..."); + return ()=>{ + return state.getContext({ + createContext, + cliConfigPath, + root, + result, + content + }); + }; + })(root, result); + _sharedState.env.DEBUG && console.timeEnd("Compiling CSS"); + } + }; + }; + tailwindPlugin.postcss = true; + let plugins = [ + ...beforePlugins, + tailwindPlugin, + !args["--minify"] && _utils.formatNodes, + ...afterPlugins, + !args["--no-autoprefixer"] && (0, _deps.loadAutoprefixer)(), + args["--minify"] && (0, _deps.loadCssNano)() + ].filter(Boolean); + /** @type {import('postcss').Processor} */ // @ts-ignore + let processor = postcss(plugins); + async function readInput() { + // Piping in data, let's drain the stdin + if (input === "-") { + return (0, _utils.drainStdin)(); + } + // Input file has been provided + if (input) { + return _fs.default.promises.readFile(_path.default.resolve(input), "utf8"); + } + // No input file provided, fallback to default atrules + return "@tailwind base; @tailwind components; @tailwind utilities"; + } + async function build() { + let start = process.hrtime.bigint(); + return readInput().then((css)=>processor.process(css, { + ...postcssOptions, + from: input, + to: output + })).then((result)=>{ + if (!state.watcher) { + return result; + } + _sharedState.env.DEBUG && console.time("Recording PostCSS dependencies"); + for (let message of result.messages){ + if (message.type === "dependency") { + state.contextDependencies.add(message.file); + } + } + _sharedState.env.DEBUG && console.timeEnd("Recording PostCSS dependencies"); + // TODO: This needs to be in a different spot + _sharedState.env.DEBUG && console.time("Watch new files"); + state.watcher.refreshWatchedFiles(); + _sharedState.env.DEBUG && console.timeEnd("Watch new files"); + return result; + }).then((result)=>{ + if (!output) { + process.stdout.write(result.css); + return; + } + return Promise.all([ + (0, _utils.outputFile)(result.opts.to, result.css), + result.map && (0, _utils.outputFile)(result.opts.to + ".map", result.map.toString()) + ]); + }).then(()=>{ + let end = process.hrtime.bigint(); + console.error(); + console.error("Done in", (end - start) / BigInt(1e6) + "ms."); + }).then(()=>{}, (err)=>{ + // TODO: If an initial build fails we can't easily pick up any PostCSS dependencies + // that were collected before the error occurred + // The result is not stored on the error so we have to store it externally + // and pull the messages off of it here somehow + // This results in a less than ideal DX because the watcher will not pick up + // changes to imported CSS if one of them caused an error during the initial build + // If you fix it and then save the main CSS file so there's no error + // The watcher will start watching the imported CSS files and will be + // resilient to future errors. + if (state.watcher) { + console.error(err); + } else { + return Promise.reject(err); + } + }); + } + /** + * @param {{file: string, content(): Promise, extension: string}[]} changes + */ async function parseChanges(changes) { + return Promise.all(changes.map(async (change)=>({ + content: await change.content(), + extension: change.extension + }))); + } + if (input !== undefined && input !== "-") { + state.contextDependencies.add(_path.default.resolve(input)); + } + return { + build, + watch: async ()=>{ + state.watcher = (0, _watching.createWatcher)(args, { + state, + /** + * @param {{file: string, content(): Promise, extension: string}[]} changes + */ async rebuild (changes) { + let needsNewContext = changes.some((change)=>{ + var _state_configBag; + return ((_state_configBag = state.configBag) === null || _state_configBag === void 0 ? void 0 : _state_configBag.dependencies.has(change.file)) || state.contextDependencies.has(change.file); + }); + if (needsNewContext) { + state.context = null; + } else { + for (let change of (await parseChanges(changes))){ + state.changedContent.push(change); + } + } + return build(); + } + }); + await build(); + } + }; +} diff --git a/node_modules/tailwindcss/lib/cli/build/utils.js b/node_modules/tailwindcss/lib/cli/build/utils.js new file mode 100644 index 0000000..3bed060 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/build/utils.js @@ -0,0 +1,88 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +function _export(target, all) { + for(var name in all)Object.defineProperty(target, name, { + enumerable: true, + get: all[name] + }); +} +_export(exports, { + indentRecursive: function() { + return indentRecursive; + }, + formatNodes: function() { + return formatNodes; + }, + readFileWithRetries: function() { + return readFileWithRetries; + }, + drainStdin: function() { + return drainStdin; + }, + outputFile: function() { + return outputFile; + } +}); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _path = /*#__PURE__*/ _interop_require_default(require("path")); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function indentRecursive(node, indent = 0) { + node.each && node.each((child, i)=>{ + if (!child.raws.before || !child.raws.before.trim() || child.raws.before.includes("\n")) { + child.raws.before = `\n${node.type !== "rule" && i > 0 ? "\n" : ""}${" ".repeat(indent)}`; + } + child.raws.after = `\n${" ".repeat(indent)}`; + indentRecursive(child, indent + 1); + }); +} +function formatNodes(root) { + indentRecursive(root); + if (root.first) { + root.first.raws.before = ""; + } +} +async function readFileWithRetries(path, tries = 5) { + for(let n = 0; n <= tries; n++){ + try { + return await _fs.default.promises.readFile(path, "utf8"); + } catch (err) { + if (n !== tries) { + if (err.code === "ENOENT" || err.code === "EBUSY") { + await new Promise((resolve)=>setTimeout(resolve, 10)); + continue; + } + } + throw err; + } + } +} +function drainStdin() { + return new Promise((resolve, reject)=>{ + let result = ""; + process.stdin.on("data", (chunk)=>{ + result += chunk; + }); + process.stdin.on("end", ()=>resolve(result)); + process.stdin.on("error", (err)=>reject(err)); + }); +} +async function outputFile(file, newContents) { + try { + let currentContents = await _fs.default.promises.readFile(file, "utf8"); + if (currentContents === newContents) { + return; // Skip writing the file + } + } catch {} + // Write the file + await _fs.default.promises.mkdir(_path.default.dirname(file), { + recursive: true + }); + await _fs.default.promises.writeFile(file, newContents, "utf8"); +} diff --git a/node_modules/tailwindcss/lib/cli/build/watching.js b/node_modules/tailwindcss/lib/cli/build/watching.js new file mode 100644 index 0000000..0c2ef76 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/build/watching.js @@ -0,0 +1,182 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "createWatcher", { + enumerable: true, + get: function() { + return createWatcher; + } +}); +const _chokidar = /*#__PURE__*/ _interop_require_default(require("chokidar")); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _micromatch = /*#__PURE__*/ _interop_require_default(require("micromatch")); +const _normalizepath = /*#__PURE__*/ _interop_require_default(require("normalize-path")); +const _path = /*#__PURE__*/ _interop_require_default(require("path")); +const _utils = require("./utils.js"); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function createWatcher(args, { state , rebuild }) { + let shouldPoll = args["--poll"]; + let shouldCoalesceWriteEvents = shouldPoll || process.platform === "win32"; + // Polling interval in milliseconds + // Used only when polling or coalescing add/change events on Windows + let pollInterval = 10; + let watcher = _chokidar.default.watch([], { + // Force checking for atomic writes in all situations + // This causes chokidar to wait up to 100ms for a file to re-added after it's been unlinked + // This only works when watching directories though + atomic: true, + usePolling: shouldPoll, + interval: shouldPoll ? pollInterval : undefined, + ignoreInitial: true, + awaitWriteFinish: shouldCoalesceWriteEvents ? { + stabilityThreshold: 50, + pollInterval: pollInterval + } : false + }); + // A queue of rebuilds, file reads, etc… to run + let chain = Promise.resolve(); + /** + * A list of files that have been changed since the last rebuild + * + * @type {{file: string, content: () => Promise, extension: string}[]} + */ let changedContent = []; + /** + * A list of files for which a rebuild has already been queued. + * This is used to prevent duplicate rebuilds when multiple events are fired for the same file. + * The rebuilt file is cleared from this list when it's associated rebuild has _started_ + * This is because if the file is changed during a rebuild it won't trigger a new rebuild which it should + **/ let pendingRebuilds = new Set(); + let _timer; + let _reject; + /** + * Rebuilds the changed files and resolves when the rebuild is + * complete regardless of whether it was successful or not + */ async function rebuildAndContinue() { + let changes = changedContent.splice(0); + // There are no changes to rebuild so we can just do nothing + if (changes.length === 0) { + return Promise.resolve(); + } + // Clear all pending rebuilds for the about-to-be-built files + changes.forEach((change)=>pendingRebuilds.delete(change.file)); + // Resolve the promise even when the rebuild fails + return rebuild(changes).then(()=>{}, (e)=>{ + console.error(e.toString()); + }); + } + /** + * + * @param {*} file + * @param {(() => Promise) | null} content + * @param {boolean} skipPendingCheck + * @returns {Promise} + */ function recordChangedFile(file, content = null, skipPendingCheck = false) { + file = _path.default.resolve(file); + // Applications like Vim/Neovim fire both rename and change events in succession for atomic writes + // In that case rebuild has already been queued by rename, so can be skipped in change + if (pendingRebuilds.has(file) && !skipPendingCheck) { + return Promise.resolve(); + } + // Mark that a rebuild of this file is going to happen + // It MUST happen synchronously before the rebuild is queued for this to be effective + pendingRebuilds.add(file); + changedContent.push({ + file, + content: content !== null && content !== void 0 ? content : ()=>_fs.default.promises.readFile(file, "utf8"), + extension: _path.default.extname(file).slice(1) + }); + if (_timer) { + clearTimeout(_timer); + _reject(); + } + // If a rebuild is already in progress we don't want to start another one until the 10ms timer has expired + chain = chain.then(()=>new Promise((resolve, reject)=>{ + _timer = setTimeout(resolve, 10); + _reject = reject; + })); + // Resolves once this file has been rebuilt (or the rebuild for this file has failed) + // This queues as many rebuilds as there are changed files + // But those rebuilds happen after some delay + // And will immediately resolve if there are no changes + chain = chain.then(rebuildAndContinue, rebuildAndContinue); + return chain; + } + watcher.on("change", (file)=>recordChangedFile(file)); + watcher.on("add", (file)=>recordChangedFile(file)); + // Restore watching any files that are "removed" + // This can happen when a file is pseudo-atomically replaced (a copy is created, overwritten, the old one is unlinked, and the new one is renamed) + // TODO: An an optimization we should allow removal when the config changes + watcher.on("unlink", (file)=>{ + file = (0, _normalizepath.default)(file); + // Only re-add the file if it's not covered by a dynamic pattern + if (!_micromatch.default.some([ + file + ], state.contentPatterns.dynamic)) { + watcher.add(file); + } + }); + // Some applications such as Visual Studio (but not VS Code) + // will only fire a rename event for atomic writes and not a change event + // This is very likely a chokidar bug but it's one we need to work around + // We treat this as a change event and rebuild the CSS + watcher.on("raw", (evt, filePath, meta)=>{ + if (evt !== "rename") { + return; + } + let watchedPath = meta.watchedPath; + // Watched path might be the file itself + // Or the directory it is in + filePath = watchedPath.endsWith(filePath) ? watchedPath : _path.default.join(watchedPath, filePath); + // Skip this event since the files it is for does not match any of the registered content globs + if (!_micromatch.default.some([ + filePath + ], state.contentPatterns.all)) { + return; + } + // Skip since we've already queued a rebuild for this file that hasn't happened yet + if (pendingRebuilds.has(filePath)) { + return; + } + // We'll go ahead and add the file to the pending rebuilds list here + // It'll be removed when the rebuild starts unless the read fails + // which will be taken care of as well + pendingRebuilds.add(filePath); + async function enqueue() { + try { + // We need to read the file as early as possible outside of the chain + // because it may be gone by the time we get to it. doing the read + // immediately increases the chance that the file is still there + let content = await (0, _utils.readFileWithRetries)(_path.default.resolve(filePath)); + if (content === undefined) { + return; + } + // This will push the rebuild onto the chain + // We MUST skip the rebuild check here otherwise the rebuild will never happen on Linux + // This is because the order of events and timing is different on Linux + // @ts-ignore: TypeScript isn't picking up that content is a string here + await recordChangedFile(filePath, ()=>content, true); + } catch { + // If reading the file fails, it's was probably a deleted temporary file + // So we can ignore it and no rebuild is needed + } + } + enqueue().then(()=>{ + // If the file read fails we still need to make sure the file isn't stuck in the pending rebuilds list + pendingRebuilds.delete(filePath); + }); + }); + return { + fswatcher: watcher, + refreshWatchedFiles () { + watcher.add(Array.from(state.contextDependencies)); + watcher.add(Array.from(state.configBag.dependencies)); + watcher.add(state.contentPatterns.all); + } + }; +} diff --git a/node_modules/tailwindcss/lib/cli/help/index.js b/node_modules/tailwindcss/lib/cli/help/index.js new file mode 100644 index 0000000..030997f --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/help/index.js @@ -0,0 +1,73 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "help", { + enumerable: true, + get: function() { + return help; + } +}); +const _packagejson = /*#__PURE__*/ _interop_require_default(require("../../../package.json")); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function help({ message , usage , commands , options }) { + let indent = 2; + // Render header + console.log(); + console.log(`${_packagejson.default.name} v${_packagejson.default.version}`); + // Render message + if (message) { + console.log(); + for (let msg of message.split("\n")){ + console.log(msg); + } + } + // Render usage + if (usage && usage.length > 0) { + console.log(); + console.log("Usage:"); + for (let example of usage){ + console.log(" ".repeat(indent), example); + } + } + // Render commands + if (commands && commands.length > 0) { + console.log(); + console.log("Commands:"); + for (let command of commands){ + console.log(" ".repeat(indent), command); + } + } + // Render options + if (options) { + let groupedOptions = {}; + for (let [key, value] of Object.entries(options)){ + if (typeof value === "object") { + groupedOptions[key] = { + ...value, + flags: [ + key + ] + }; + } else { + groupedOptions[value].flags.push(key); + } + } + console.log(); + console.log("Options:"); + for (let { flags , description , deprecated } of Object.values(groupedOptions)){ + if (deprecated) continue; + if (flags.length === 1) { + console.log(" ".repeat(indent + 4 /* 4 = "-i, ".length */ ), flags.slice().reverse().join(", ").padEnd(20, " "), description); + } else { + console.log(" ".repeat(indent), flags.slice().reverse().join(", ").padEnd(24, " "), description); + } + } + } + console.log(); +} diff --git a/node_modules/tailwindcss/lib/cli/index.js b/node_modules/tailwindcss/lib/cli/index.js new file mode 100644 index 0000000..e6e2e27 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/index.js @@ -0,0 +1,230 @@ +#!/usr/bin/env node +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +const _path = /*#__PURE__*/ _interop_require_default(require("path")); +const _arg = /*#__PURE__*/ _interop_require_default(require("arg")); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _build = require("./build"); +const _help = require("./help"); +const _init = require("./init"); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function oneOf(...options) { + return Object.assign((value = true)=>{ + for (let option of options){ + let parsed = option(value); + if (parsed === value) { + return parsed; + } + } + throw new Error("..."); + }, { + manualParsing: true + }); +} +let commands = { + init: { + run: _init.init, + args: { + "--esm": { + type: Boolean, + description: `Initialize configuration file as ESM` + }, + "--ts": { + type: Boolean, + description: `Initialize configuration file as TypeScript` + }, + "--postcss": { + type: Boolean, + description: `Initialize a \`postcss.config.js\` file` + }, + "--full": { + type: Boolean, + description: `Include the default values for all options in the generated configuration file` + }, + "-f": "--full", + "-p": "--postcss" + } + }, + build: { + run: _build.build, + args: { + "--input": { + type: String, + description: "Input file" + }, + "--output": { + type: String, + description: "Output file" + }, + "--watch": { + type: oneOf(String, Boolean), + description: "Watch for changes and rebuild as needed" + }, + "--poll": { + type: Boolean, + description: "Use polling instead of filesystem events when watching" + }, + "--content": { + type: String, + description: "Content paths to use for removing unused classes" + }, + "--purge": { + type: String, + deprecated: true + }, + "--postcss": { + type: oneOf(String, Boolean), + description: "Load custom PostCSS configuration" + }, + "--minify": { + type: Boolean, + description: "Minify the output" + }, + "--config": { + type: String, + description: "Path to a custom config file" + }, + "--no-autoprefixer": { + type: Boolean, + description: "Disable autoprefixer" + }, + "-c": "--config", + "-i": "--input", + "-o": "--output", + "-m": "--minify", + "-w": "--watch", + "-p": "--poll" + } + } +}; +let sharedFlags = { + "--help": { + type: Boolean, + description: "Display usage information" + }, + "-h": "--help" +}; +if (process.stdout.isTTY /* Detect redirecting output to a file */ && (process.argv[2] === undefined || process.argv.slice(2).every((flag)=>sharedFlags[flag] !== undefined))) { + (0, _help.help)({ + usage: [ + "tailwindcss [--input input.css] [--output output.css] [--watch] [options...]", + "tailwindcss init [--full] [--postcss] [options...]" + ], + commands: Object.keys(commands).filter((command)=>command !== "build").map((command)=>`${command} [options]`), + options: { + ...commands.build.args, + ...sharedFlags + } + }); + process.exit(0); +} +let command = ((arg = "")=>arg.startsWith("-") ? undefined : arg)(process.argv[2]) || "build"; +if (commands[command] === undefined) { + if (_fs.default.existsSync(_path.default.resolve(command))) { + // TODO: Deprecate this in future versions + // Check if non-existing command, might be a file. + command = "build"; + } else { + (0, _help.help)({ + message: `Invalid command: ${command}`, + usage: [ + "tailwindcss [options]" + ], + commands: Object.keys(commands).filter((command)=>command !== "build").map((command)=>`${command} [options]`), + options: sharedFlags + }); + process.exit(1); + } +} +// Execute command +let { args: flags , run } = commands[command]; +let args = (()=>{ + try { + let result = (0, _arg.default)(Object.fromEntries(Object.entries({ + ...flags, + ...sharedFlags + }).filter(([_key, value])=>{ + var _value_type; + return !(value === null || value === void 0 ? void 0 : (_value_type = value.type) === null || _value_type === void 0 ? void 0 : _value_type.manualParsing); + }).map(([key, value])=>[ + key, + typeof value === "object" ? value.type : value + ])), { + permissive: true + }); + // Manual parsing of flags to allow for special flags like oneOf(Boolean, String) + for(let i = result["_"].length - 1; i >= 0; --i){ + let flag = result["_"][i]; + if (!flag.startsWith("-")) continue; + let [flagName, flagValue] = flag.split("="); + let handler = flags[flagName]; + // Resolve flagName & handler + while(typeof handler === "string"){ + flagName = handler; + handler = flags[handler]; + } + if (!handler) continue; + let args = []; + let offset = i + 1; + // --flag value syntax was used so we need to pull `value` from `args` + if (flagValue === undefined) { + // Parse args for current flag + while(result["_"][offset] && !result["_"][offset].startsWith("-")){ + args.push(result["_"][offset++]); + } + // Cleanup manually parsed flags + args + result["_"].splice(i, 1 + args.length); + // No args were provided, use default value defined in handler + // One arg was provided, use that directly + // Multiple args were provided so pass them all in an array + flagValue = args.length === 0 ? undefined : args.length === 1 ? args[0] : args; + } else { + // Remove the whole flag from the args array + result["_"].splice(i, 1); + } + // Set the resolved value in the `result` object + result[flagName] = handler.type(flagValue, flagName); + } + // Ensure that the `command` is always the first argument in the `args`. + // This is important so that we don't have to check if a default command + // (build) was used or not from within each plugin. + // + // E.g.: tailwindcss input.css -> _: ['build', 'input.css'] + // E.g.: tailwindcss build input.css -> _: ['build', 'input.css'] + if (result["_"][0] !== command) { + result["_"].unshift(command); + } + return result; + } catch (err) { + if (err.code === "ARG_UNKNOWN_OPTION") { + (0, _help.help)({ + message: err.message, + usage: [ + "tailwindcss [options]" + ], + options: sharedFlags + }); + process.exit(1); + } + throw err; + } +})(); +if (args["--help"]) { + (0, _help.help)({ + options: { + ...flags, + ...sharedFlags + }, + usage: [ + `tailwindcss ${command} [options]` + ] + }); + process.exit(0); +} +run(args); diff --git a/node_modules/tailwindcss/lib/cli/init/index.js b/node_modules/tailwindcss/lib/cli/init/index.js new file mode 100644 index 0000000..47caf30 --- /dev/null +++ b/node_modules/tailwindcss/lib/cli/init/index.js @@ -0,0 +1,63 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "init", { + enumerable: true, + get: function() { + return init; + } +}); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _path = /*#__PURE__*/ _interop_require_default(require("path")); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function isESM() { + const pkgPath = _path.default.resolve("./package.json"); + try { + let pkg = JSON.parse(_fs.default.readFileSync(pkgPath, "utf8")); + return pkg.type && pkg.type === "module"; + } catch (err) { + return false; + } +} +function init(args) { + let messages = []; + let isProjectESM = args["--ts"] || args["--esm"] || isESM(); + let syntax = args["--ts"] ? "ts" : isProjectESM ? "js" : "cjs"; + let extension = args["--ts"] ? "ts" : "js"; + var _args___; + let tailwindConfigLocation = _path.default.resolve((_args___ = args["_"][1]) !== null && _args___ !== void 0 ? _args___ : `./tailwind.config.${extension}`); + if (_fs.default.existsSync(tailwindConfigLocation)) { + messages.push(`${_path.default.basename(tailwindConfigLocation)} already exists.`); + } else { + let stubContentsFile = _fs.default.readFileSync(args["--full"] ? _path.default.resolve(__dirname, "../../../stubs/config.full.js") : _path.default.resolve(__dirname, "../../../stubs/config.simple.js"), "utf8"); + let stubFile = _fs.default.readFileSync(_path.default.resolve(__dirname, `../../../stubs/tailwind.config.${syntax}`), "utf8"); + // Change colors import + stubContentsFile = stubContentsFile.replace("../colors", "tailwindcss/colors"); + // Replace contents of {ts,js,cjs} file with the stub {simple,full}. + stubFile = stubFile.replace("__CONFIG__", stubContentsFile.replace("module.exports =", "").trim()).trim() + "\n\n"; + _fs.default.writeFileSync(tailwindConfigLocation, stubFile, "utf8"); + messages.push(`Created Tailwind CSS config file: ${_path.default.basename(tailwindConfigLocation)}`); + } + if (args["--postcss"]) { + let postcssConfigLocation = _path.default.resolve("./postcss.config.js"); + if (_fs.default.existsSync(postcssConfigLocation)) { + messages.push(`${_path.default.basename(postcssConfigLocation)} already exists.`); + } else { + let stubFile = _fs.default.readFileSync(isProjectESM ? _path.default.resolve(__dirname, "../../../stubs/postcss.config.js") : _path.default.resolve(__dirname, "../../../stubs/postcss.config.cjs"), "utf8"); + _fs.default.writeFileSync(postcssConfigLocation, stubFile, "utf8"); + messages.push(`Created PostCSS config file: ${_path.default.basename(postcssConfigLocation)}`); + } + } + if (messages.length > 0) { + console.log(); + for (let message of messages){ + console.log(message); + } + } +} diff --git a/node_modules/tailwindcss/lib/corePluginList.js b/node_modules/tailwindcss/lib/corePluginList.js new file mode 100644 index 0000000..b94745c --- /dev/null +++ b/node_modules/tailwindcss/lib/corePluginList.js @@ -0,0 +1,187 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "default", { + enumerable: true, + get: function() { + return _default; + } +}); +const _default = [ + "preflight", + "container", + "accessibility", + "pointerEvents", + "visibility", + "position", + "inset", + "isolation", + "zIndex", + "order", + "gridColumn", + "gridColumnStart", + "gridColumnEnd", + "gridRow", + "gridRowStart", + "gridRowEnd", + "float", + "clear", + "margin", + "boxSizing", + "lineClamp", + "display", + "aspectRatio", + "height", + "maxHeight", + "minHeight", + "width", + "minWidth", + "maxWidth", + "flex", + "flexShrink", + "flexGrow", + "flexBasis", + "tableLayout", + "captionSide", + "borderCollapse", + "borderSpacing", + "transformOrigin", + "translate", + "rotate", + "skew", + "scale", + "transform", + "animation", + "cursor", + "touchAction", + "userSelect", + "resize", + "scrollSnapType", + "scrollSnapAlign", + "scrollSnapStop", + "scrollMargin", + "scrollPadding", + "listStylePosition", + "listStyleType", + "listStyleImage", + "appearance", + "columns", + "breakBefore", + "breakInside", + "breakAfter", + "gridAutoColumns", + "gridAutoFlow", + "gridAutoRows", + "gridTemplateColumns", + "gridTemplateRows", + "flexDirection", + "flexWrap", + "placeContent", + "placeItems", + "alignContent", + "alignItems", + "justifyContent", + "justifyItems", + "gap", + "space", + "divideWidth", + "divideStyle", + "divideColor", + "divideOpacity", + "placeSelf", + "alignSelf", + "justifySelf", + "overflow", + "overscrollBehavior", + "scrollBehavior", + "textOverflow", + "hyphens", + "whitespace", + "wordBreak", + "borderRadius", + "borderWidth", + "borderStyle", + "borderColor", + "borderOpacity", + "backgroundColor", + "backgroundOpacity", + "backgroundImage", + "gradientColorStops", + "boxDecorationBreak", + "backgroundSize", + "backgroundAttachment", + "backgroundClip", + "backgroundPosition", + "backgroundRepeat", + "backgroundOrigin", + "fill", + "stroke", + "strokeWidth", + "objectFit", + "objectPosition", + "padding", + "textAlign", + "textIndent", + "verticalAlign", + "fontFamily", + "fontSize", + "fontWeight", + "textTransform", + "fontStyle", + "fontVariantNumeric", + "lineHeight", + "letterSpacing", + "textColor", + "textOpacity", + "textDecoration", + "textDecorationColor", + "textDecorationStyle", + "textDecorationThickness", + "textUnderlineOffset", + "fontSmoothing", + "placeholderColor", + "placeholderOpacity", + "caretColor", + "accentColor", + "opacity", + "backgroundBlendMode", + "mixBlendMode", + "boxShadow", + "boxShadowColor", + "outlineStyle", + "outlineWidth", + "outlineOffset", + "outlineColor", + "ringWidth", + "ringColor", + "ringOpacity", + "ringOffsetWidth", + "ringOffsetColor", + "blur", + "brightness", + "contrast", + "dropShadow", + "grayscale", + "hueRotate", + "invert", + "saturate", + "sepia", + "filter", + "backdropBlur", + "backdropBrightness", + "backdropContrast", + "backdropGrayscale", + "backdropHueRotate", + "backdropInvert", + "backdropOpacity", + "backdropSaturate", + "backdropSepia", + "backdropFilter", + "transitionProperty", + "transitionDelay", + "transitionDuration", + "transitionTimingFunction", + "willChange", + "content" +]; diff --git a/node_modules/tailwindcss/lib/corePlugins.js b/node_modules/tailwindcss/lib/corePlugins.js new file mode 100644 index 0000000..ece3b3f --- /dev/null +++ b/node_modules/tailwindcss/lib/corePlugins.js @@ -0,0 +1,4173 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +function _export(target, all) { + for(var name in all)Object.defineProperty(target, name, { + enumerable: true, + get: all[name] + }); +} +_export(exports, { + variantPlugins: function() { + return variantPlugins; + }, + corePlugins: function() { + return corePlugins; + } +}); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _path = /*#__PURE__*/ _interop_require_wildcard(require("path")); +const _postcss = /*#__PURE__*/ _interop_require_default(require("postcss")); +const _createUtilityPlugin = /*#__PURE__*/ _interop_require_default(require("./util/createUtilityPlugin")); +const _buildMediaQuery = /*#__PURE__*/ _interop_require_default(require("./util/buildMediaQuery")); +const _escapeClassName = /*#__PURE__*/ _interop_require_default(require("./util/escapeClassName")); +const _parseAnimationValue = /*#__PURE__*/ _interop_require_default(require("./util/parseAnimationValue")); +const _flattenColorPalette = /*#__PURE__*/ _interop_require_default(require("./util/flattenColorPalette")); +const _withAlphaVariable = /*#__PURE__*/ _interop_require_wildcard(require("./util/withAlphaVariable")); +const _toColorValue = /*#__PURE__*/ _interop_require_default(require("./util/toColorValue")); +const _isPlainObject = /*#__PURE__*/ _interop_require_default(require("./util/isPlainObject")); +const _transformThemeValue = /*#__PURE__*/ _interop_require_default(require("./util/transformThemeValue")); +const _packagejson = require("../package.json"); +const _log = /*#__PURE__*/ _interop_require_default(require("./util/log")); +const _normalizeScreens = require("./util/normalizeScreens"); +const _parseBoxShadowValue = require("./util/parseBoxShadowValue"); +const _removeAlphaVariables = require("./util/removeAlphaVariables"); +const _featureFlags = require("./featureFlags"); +const _dataTypes = require("./util/dataTypes"); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function _getRequireWildcardCache(nodeInterop) { + if (typeof WeakMap !== "function") return null; + var cacheBabelInterop = new WeakMap(); + var cacheNodeInterop = new WeakMap(); + return (_getRequireWildcardCache = function(nodeInterop) { + return nodeInterop ? cacheNodeInterop : cacheBabelInterop; + })(nodeInterop); +} +function _interop_require_wildcard(obj, nodeInterop) { + if (!nodeInterop && obj && obj.__esModule) { + return obj; + } + if (obj === null || typeof obj !== "object" && typeof obj !== "function") { + return { + default: obj + }; + } + var cache = _getRequireWildcardCache(nodeInterop); + if (cache && cache.has(obj)) { + return cache.get(obj); + } + var newObj = {}; + var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; + for(var key in obj){ + if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { + var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; + if (desc && (desc.get || desc.set)) { + Object.defineProperty(newObj, key, desc); + } else { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + if (cache) { + cache.set(obj, newObj); + } + return newObj; +} +let variantPlugins = { + pseudoElementVariants: ({ addVariant })=>{ + addVariant("first-letter", "&::first-letter"); + addVariant("first-line", "&::first-line"); + addVariant("marker", [ + ({ container })=>{ + (0, _removeAlphaVariables.removeAlphaVariables)(container, [ + "--tw-text-opacity" + ]); + return "& *::marker"; + }, + ({ container })=>{ + (0, _removeAlphaVariables.removeAlphaVariables)(container, [ + "--tw-text-opacity" + ]); + return "&::marker"; + } + ]); + addVariant("selection", [ + "& *::selection", + "&::selection" + ]); + addVariant("file", "&::file-selector-button"); + addVariant("placeholder", "&::placeholder"); + addVariant("backdrop", "&::backdrop"); + addVariant("before", ({ container })=>{ + container.walkRules((rule)=>{ + let foundContent = false; + rule.walkDecls("content", ()=>{ + foundContent = true; + }); + if (!foundContent) { + rule.prepend(_postcss.default.decl({ + prop: "content", + value: "var(--tw-content)" + })); + } + }); + return "&::before"; + }); + addVariant("after", ({ container })=>{ + container.walkRules((rule)=>{ + let foundContent = false; + rule.walkDecls("content", ()=>{ + foundContent = true; + }); + if (!foundContent) { + rule.prepend(_postcss.default.decl({ + prop: "content", + value: "var(--tw-content)" + })); + } + }); + return "&::after"; + }); + }, + pseudoClassVariants: ({ addVariant , matchVariant , config })=>{ + let pseudoVariants = [ + // Positional + [ + "first", + "&:first-child" + ], + [ + "last", + "&:last-child" + ], + [ + "only", + "&:only-child" + ], + [ + "odd", + "&:nth-child(odd)" + ], + [ + "even", + "&:nth-child(even)" + ], + "first-of-type", + "last-of-type", + "only-of-type", + // State + [ + "visited", + ({ container })=>{ + (0, _removeAlphaVariables.removeAlphaVariables)(container, [ + "--tw-text-opacity", + "--tw-border-opacity", + "--tw-bg-opacity" + ]); + return "&:visited"; + } + ], + "target", + [ + "open", + "&[open]" + ], + // Forms + "default", + "checked", + "indeterminate", + "placeholder-shown", + "autofill", + "optional", + "required", + "valid", + "invalid", + "in-range", + "out-of-range", + "read-only", + // Content + "empty", + // Interactive + "focus-within", + [ + "hover", + !(0, _featureFlags.flagEnabled)(config(), "hoverOnlyWhenSupported") ? "&:hover" : "@media (hover: hover) and (pointer: fine) { &:hover }" + ], + "focus", + "focus-visible", + "active", + "enabled", + "disabled" + ].map((variant)=>Array.isArray(variant) ? variant : [ + variant, + `&:${variant}` + ]); + for (let [variantName, state] of pseudoVariants){ + addVariant(variantName, (ctx)=>{ + let result = typeof state === "function" ? state(ctx) : state; + return result; + }); + } + let variants = { + group: (_, { modifier })=>modifier ? [ + `:merge(.group\\/${(0, _escapeClassName.default)(modifier)})`, + " &" + ] : [ + `:merge(.group)`, + " &" + ], + peer: (_, { modifier })=>modifier ? [ + `:merge(.peer\\/${(0, _escapeClassName.default)(modifier)})`, + " ~ &" + ] : [ + `:merge(.peer)`, + " ~ &" + ] + }; + for (let [name, fn] of Object.entries(variants)){ + matchVariant(name, (value = "", extra)=>{ + let result = (0, _dataTypes.normalize)(typeof value === "function" ? value(extra) : value); + if (!result.includes("&")) result = "&" + result; + let [a, b] = fn("", extra); + let start = null; + let end = null; + let quotes = 0; + for(let i = 0; i < result.length; ++i){ + let c = result[i]; + if (c === "&") { + start = i; + } else if (c === "'" || c === '"') { + quotes += 1; + } else if (start !== null && c === " " && !quotes) { + end = i; + } + } + if (start !== null && end === null) { + end = result.length; + } + // Basically this but can handle quotes: + // result.replace(/&(\S+)?/g, (_, pseudo = '') => a + pseudo + b) + return result.slice(0, start) + a + result.slice(start + 1, end) + b + result.slice(end); + }, { + values: Object.fromEntries(pseudoVariants) + }); + } + }, + directionVariants: ({ addVariant })=>{ + addVariant("ltr", ':is([dir="ltr"] &)'); + addVariant("rtl", ':is([dir="rtl"] &)'); + }, + reducedMotionVariants: ({ addVariant })=>{ + addVariant("motion-safe", "@media (prefers-reduced-motion: no-preference)"); + addVariant("motion-reduce", "@media (prefers-reduced-motion: reduce)"); + }, + darkVariants: ({ config , addVariant })=>{ + let [mode, className = ".dark"] = [].concat(config("darkMode", "media")); + if (mode === false) { + mode = "media"; + _log.default.warn("darkmode-false", [ + "The `darkMode` option in your Tailwind CSS configuration is set to `false`, which now behaves the same as `media`.", + "Change `darkMode` to `media` or remove it entirely.", + "https://tailwindcss.com/docs/upgrade-guide#remove-dark-mode-configuration" + ]); + } + if (mode === "class") { + addVariant("dark", `:is(${className} &)`); + } else if (mode === "media") { + addVariant("dark", "@media (prefers-color-scheme: dark)"); + } + }, + printVariant: ({ addVariant })=>{ + addVariant("print", "@media print"); + }, + screenVariants: ({ theme , addVariant , matchVariant })=>{ + var _theme; + let rawScreens = (_theme = theme("screens")) !== null && _theme !== void 0 ? _theme : {}; + let areSimpleScreens = Object.values(rawScreens).every((v)=>typeof v === "string"); + let screens = (0, _normalizeScreens.normalizeScreens)(theme("screens")); + /** @type {Set} */ let unitCache = new Set([]); + /** @param {string} value */ function units(value) { + var _value_match; + var _value_match_; + return (_value_match_ = (_value_match = value.match(/(\D+)$/)) === null || _value_match === void 0 ? void 0 : _value_match[1]) !== null && _value_match_ !== void 0 ? _value_match_ : "(none)"; + } + /** @param {string} value */ function recordUnits(value) { + if (value !== undefined) { + unitCache.add(units(value)); + } + } + /** @param {string} value */ function canUseUnits(value) { + recordUnits(value); + // If the cache was empty it'll become 1 because we've just added the current unit + // If the cache was not empty and the units are the same the size doesn't change + // Otherwise, if the units are different from what is already known the size will always be > 1 + return unitCache.size === 1; + } + for (const screen of screens){ + for (const value of screen.values){ + recordUnits(value.min); + recordUnits(value.max); + } + } + let screensUseConsistentUnits = unitCache.size <= 1; + /** + * @typedef {import('./util/normalizeScreens').Screen} Screen + */ /** + * @param {'min' | 'max'} type + * @returns {Record} + */ function buildScreenValues(type) { + return Object.fromEntries(screens.filter((screen)=>(0, _normalizeScreens.isScreenSortable)(screen).result).map((screen)=>{ + let { min , max } = screen.values[0]; + if (type === "min" && min !== undefined) { + return screen; + } else if (type === "min" && max !== undefined) { + return { + ...screen, + not: !screen.not + }; + } else if (type === "max" && max !== undefined) { + return screen; + } else if (type === "max" && min !== undefined) { + return { + ...screen, + not: !screen.not + }; + } + }).map((screen)=>[ + screen.name, + screen + ])); + } + /** + * @param {'min' | 'max'} type + * @returns {(a: { value: string | Screen }, z: { value: string | Screen }) => number} + */ function buildSort(type) { + return (a, z)=>(0, _normalizeScreens.compareScreens)(type, a.value, z.value); + } + let maxSort = buildSort("max"); + let minSort = buildSort("min"); + /** @param {'min'|'max'} type */ function buildScreenVariant(type) { + return (value)=>{ + if (!areSimpleScreens) { + _log.default.warn("complex-screen-config", [ + "The `min-*` and `max-*` variants are not supported with a `screens` configuration containing objects." + ]); + return []; + } else if (!screensUseConsistentUnits) { + _log.default.warn("mixed-screen-units", [ + "The `min-*` and `max-*` variants are not supported with a `screens` configuration containing mixed units." + ]); + return []; + } else if (typeof value === "string" && !canUseUnits(value)) { + _log.default.warn("minmax-have-mixed-units", [ + "The `min-*` and `max-*` variants are not supported with a `screens` configuration containing mixed units." + ]); + return []; + } + return [ + `@media ${(0, _buildMediaQuery.default)((0, _normalizeScreens.toScreen)(value, type))}` + ]; + }; + } + matchVariant("max", buildScreenVariant("max"), { + sort: maxSort, + values: areSimpleScreens ? buildScreenValues("max") : {} + }); + // screens and min-* are sorted together when they can be + let id = "min-screens"; + for (let screen of screens){ + addVariant(screen.name, `@media ${(0, _buildMediaQuery.default)(screen)}`, { + id, + sort: areSimpleScreens && screensUseConsistentUnits ? minSort : undefined, + value: screen + }); + } + matchVariant("min", buildScreenVariant("min"), { + id, + sort: minSort + }); + }, + supportsVariants: ({ matchVariant , theme })=>{ + var _theme; + matchVariant("supports", (value = "")=>{ + let check = (0, _dataTypes.normalize)(value); + let isRaw = /^\w*\s*\(/.test(check); + // Chrome has a bug where `(condtion1)or(condition2)` is not valid + // But `(condition1) or (condition2)` is supported. + check = isRaw ? check.replace(/\b(and|or|not)\b/g, " $1 ") : check; + if (isRaw) { + return `@supports ${check}`; + } + if (!check.includes(":")) { + check = `${check}: var(--tw)`; + } + if (!(check.startsWith("(") && check.endsWith(")"))) { + check = `(${check})`; + } + return `@supports ${check}`; + }, { + values: (_theme = theme("supports")) !== null && _theme !== void 0 ? _theme : {} + }); + }, + ariaVariants: ({ matchVariant , theme })=>{ + var _theme; + matchVariant("aria", (value)=>`&[aria-${(0, _dataTypes.normalize)(value)}]`, { + values: (_theme = theme("aria")) !== null && _theme !== void 0 ? _theme : {} + }); + var _theme1; + matchVariant("group-aria", (value, { modifier })=>modifier ? `:merge(.group\\/${modifier})[aria-${(0, _dataTypes.normalize)(value)}] &` : `:merge(.group)[aria-${(0, _dataTypes.normalize)(value)}] &`, { + values: (_theme1 = theme("aria")) !== null && _theme1 !== void 0 ? _theme1 : {} + }); + var _theme2; + matchVariant("peer-aria", (value, { modifier })=>modifier ? `:merge(.peer\\/${modifier})[aria-${(0, _dataTypes.normalize)(value)}] ~ &` : `:merge(.peer)[aria-${(0, _dataTypes.normalize)(value)}] ~ &`, { + values: (_theme2 = theme("aria")) !== null && _theme2 !== void 0 ? _theme2 : {} + }); + }, + dataVariants: ({ matchVariant , theme })=>{ + var _theme; + matchVariant("data", (value)=>`&[data-${(0, _dataTypes.normalize)(value)}]`, { + values: (_theme = theme("data")) !== null && _theme !== void 0 ? _theme : {} + }); + var _theme1; + matchVariant("group-data", (value, { modifier })=>modifier ? `:merge(.group\\/${modifier})[data-${(0, _dataTypes.normalize)(value)}] &` : `:merge(.group)[data-${(0, _dataTypes.normalize)(value)}] &`, { + values: (_theme1 = theme("data")) !== null && _theme1 !== void 0 ? _theme1 : {} + }); + var _theme2; + matchVariant("peer-data", (value, { modifier })=>modifier ? `:merge(.peer\\/${modifier})[data-${(0, _dataTypes.normalize)(value)}] ~ &` : `:merge(.peer)[data-${(0, _dataTypes.normalize)(value)}] ~ &`, { + values: (_theme2 = theme("data")) !== null && _theme2 !== void 0 ? _theme2 : {} + }); + }, + orientationVariants: ({ addVariant })=>{ + addVariant("portrait", "@media (orientation: portrait)"); + addVariant("landscape", "@media (orientation: landscape)"); + }, + prefersContrastVariants: ({ addVariant })=>{ + addVariant("contrast-more", "@media (prefers-contrast: more)"); + addVariant("contrast-less", "@media (prefers-contrast: less)"); + } +}; +let cssTransformValue = [ + "translate(var(--tw-translate-x), var(--tw-translate-y))", + "rotate(var(--tw-rotate))", + "skewX(var(--tw-skew-x))", + "skewY(var(--tw-skew-y))", + "scaleX(var(--tw-scale-x))", + "scaleY(var(--tw-scale-y))" +].join(" "); +let cssFilterValue = [ + "var(--tw-blur)", + "var(--tw-brightness)", + "var(--tw-contrast)", + "var(--tw-grayscale)", + "var(--tw-hue-rotate)", + "var(--tw-invert)", + "var(--tw-saturate)", + "var(--tw-sepia)", + "var(--tw-drop-shadow)" +].join(" "); +let cssBackdropFilterValue = [ + "var(--tw-backdrop-blur)", + "var(--tw-backdrop-brightness)", + "var(--tw-backdrop-contrast)", + "var(--tw-backdrop-grayscale)", + "var(--tw-backdrop-hue-rotate)", + "var(--tw-backdrop-invert)", + "var(--tw-backdrop-opacity)", + "var(--tw-backdrop-saturate)", + "var(--tw-backdrop-sepia)" +].join(" "); +let corePlugins = { + preflight: ({ addBase })=>{ + let preflightStyles = _postcss.default.parse(_fs.default.readFileSync(_path.join(__dirname, "./css/preflight.css"), "utf8")); + addBase([ + _postcss.default.comment({ + text: `! tailwindcss v${_packagejson.version} | MIT License | https://tailwindcss.com` + }), + ...preflightStyles.nodes + ]); + }, + container: (()=>{ + function extractMinWidths(breakpoints = []) { + return breakpoints.flatMap((breakpoint)=>breakpoint.values.map((breakpoint)=>breakpoint.min)).filter((v)=>v !== undefined); + } + function mapMinWidthsToPadding(minWidths, screens, paddings) { + if (typeof paddings === "undefined") { + return []; + } + if (!(typeof paddings === "object" && paddings !== null)) { + return [ + { + screen: "DEFAULT", + minWidth: 0, + padding: paddings + } + ]; + } + let mapping = []; + if (paddings.DEFAULT) { + mapping.push({ + screen: "DEFAULT", + minWidth: 0, + padding: paddings.DEFAULT + }); + } + for (let minWidth of minWidths){ + for (let screen of screens){ + for (let { min } of screen.values){ + if (min === minWidth) { + mapping.push({ + minWidth, + padding: paddings[screen.name] + }); + } + } + } + } + return mapping; + } + return function({ addComponents , theme }) { + let screens = (0, _normalizeScreens.normalizeScreens)(theme("container.screens", theme("screens"))); + let minWidths = extractMinWidths(screens); + let paddings = mapMinWidthsToPadding(minWidths, screens, theme("container.padding")); + let generatePaddingFor = (minWidth)=>{ + let paddingConfig = paddings.find((padding)=>padding.minWidth === minWidth); + if (!paddingConfig) { + return {}; + } + return { + paddingRight: paddingConfig.padding, + paddingLeft: paddingConfig.padding + }; + }; + let atRules = Array.from(new Set(minWidths.slice().sort((a, z)=>parseInt(a) - parseInt(z)))).map((minWidth)=>({ + [`@media (min-width: ${minWidth})`]: { + ".container": { + "max-width": minWidth, + ...generatePaddingFor(minWidth) + } + } + })); + addComponents([ + { + ".container": Object.assign({ + width: "100%" + }, theme("container.center", false) ? { + marginRight: "auto", + marginLeft: "auto" + } : {}, generatePaddingFor(0)) + }, + ...atRules + ]); + }; + })(), + accessibility: ({ addUtilities })=>{ + addUtilities({ + ".sr-only": { + position: "absolute", + width: "1px", + height: "1px", + padding: "0", + margin: "-1px", + overflow: "hidden", + clip: "rect(0, 0, 0, 0)", + whiteSpace: "nowrap", + borderWidth: "0" + }, + ".not-sr-only": { + position: "static", + width: "auto", + height: "auto", + padding: "0", + margin: "0", + overflow: "visible", + clip: "auto", + whiteSpace: "normal" + } + }); + }, + pointerEvents: ({ addUtilities })=>{ + addUtilities({ + ".pointer-events-none": { + "pointer-events": "none" + }, + ".pointer-events-auto": { + "pointer-events": "auto" + } + }); + }, + visibility: ({ addUtilities })=>{ + addUtilities({ + ".visible": { + visibility: "visible" + }, + ".invisible": { + visibility: "hidden" + }, + ".collapse": { + visibility: "collapse" + } + }); + }, + position: ({ addUtilities })=>{ + addUtilities({ + ".static": { + position: "static" + }, + ".fixed": { + position: "fixed" + }, + ".absolute": { + position: "absolute" + }, + ".relative": { + position: "relative" + }, + ".sticky": { + position: "sticky" + } + }); + }, + inset: (0, _createUtilityPlugin.default)("inset", [ + [ + "inset", + [ + "inset" + ] + ], + [ + [ + "inset-x", + [ + "left", + "right" + ] + ], + [ + "inset-y", + [ + "top", + "bottom" + ] + ] + ], + [ + [ + "start", + [ + "inset-inline-start" + ] + ], + [ + "end", + [ + "inset-inline-end" + ] + ], + [ + "top", + [ + "top" + ] + ], + [ + "right", + [ + "right" + ] + ], + [ + "bottom", + [ + "bottom" + ] + ], + [ + "left", + [ + "left" + ] + ] + ] + ], { + supportsNegativeValues: true + }), + isolation: ({ addUtilities })=>{ + addUtilities({ + ".isolate": { + isolation: "isolate" + }, + ".isolation-auto": { + isolation: "auto" + } + }); + }, + zIndex: (0, _createUtilityPlugin.default)("zIndex", [ + [ + "z", + [ + "zIndex" + ] + ] + ], { + supportsNegativeValues: true + }), + order: (0, _createUtilityPlugin.default)("order", undefined, { + supportsNegativeValues: true + }), + gridColumn: (0, _createUtilityPlugin.default)("gridColumn", [ + [ + "col", + [ + "gridColumn" + ] + ] + ]), + gridColumnStart: (0, _createUtilityPlugin.default)("gridColumnStart", [ + [ + "col-start", + [ + "gridColumnStart" + ] + ] + ]), + gridColumnEnd: (0, _createUtilityPlugin.default)("gridColumnEnd", [ + [ + "col-end", + [ + "gridColumnEnd" + ] + ] + ]), + gridRow: (0, _createUtilityPlugin.default)("gridRow", [ + [ + "row", + [ + "gridRow" + ] + ] + ]), + gridRowStart: (0, _createUtilityPlugin.default)("gridRowStart", [ + [ + "row-start", + [ + "gridRowStart" + ] + ] + ]), + gridRowEnd: (0, _createUtilityPlugin.default)("gridRowEnd", [ + [ + "row-end", + [ + "gridRowEnd" + ] + ] + ]), + float: ({ addUtilities })=>{ + addUtilities({ + ".float-right": { + float: "right" + }, + ".float-left": { + float: "left" + }, + ".float-none": { + float: "none" + } + }); + }, + clear: ({ addUtilities })=>{ + addUtilities({ + ".clear-left": { + clear: "left" + }, + ".clear-right": { + clear: "right" + }, + ".clear-both": { + clear: "both" + }, + ".clear-none": { + clear: "none" + } + }); + }, + margin: (0, _createUtilityPlugin.default)("margin", [ + [ + "m", + [ + "margin" + ] + ], + [ + [ + "mx", + [ + "margin-left", + "margin-right" + ] + ], + [ + "my", + [ + "margin-top", + "margin-bottom" + ] + ] + ], + [ + [ + "ms", + [ + "margin-inline-start" + ] + ], + [ + "me", + [ + "margin-inline-end" + ] + ], + [ + "mt", + [ + "margin-top" + ] + ], + [ + "mr", + [ + "margin-right" + ] + ], + [ + "mb", + [ + "margin-bottom" + ] + ], + [ + "ml", + [ + "margin-left" + ] + ] + ] + ], { + supportsNegativeValues: true + }), + boxSizing: ({ addUtilities })=>{ + addUtilities({ + ".box-border": { + "box-sizing": "border-box" + }, + ".box-content": { + "box-sizing": "content-box" + } + }); + }, + lineClamp: ({ matchUtilities , addUtilities , theme })=>{ + matchUtilities({ + "line-clamp": (value)=>({ + overflow: "hidden", + display: "-webkit-box", + "-webkit-box-orient": "vertical", + "-webkit-line-clamp": `${value}` + }) + }, { + values: theme("lineClamp") + }); + addUtilities({ + ".line-clamp-none": { + overflow: "visible", + display: "block", + "-webkit-box-orient": "horizontal", + "-webkit-line-clamp": "none" + } + }); + }, + display: ({ addUtilities })=>{ + addUtilities({ + ".block": { + display: "block" + }, + ".inline-block": { + display: "inline-block" + }, + ".inline": { + display: "inline" + }, + ".flex": { + display: "flex" + }, + ".inline-flex": { + display: "inline-flex" + }, + ".table": { + display: "table" + }, + ".inline-table": { + display: "inline-table" + }, + ".table-caption": { + display: "table-caption" + }, + ".table-cell": { + display: "table-cell" + }, + ".table-column": { + display: "table-column" + }, + ".table-column-group": { + display: "table-column-group" + }, + ".table-footer-group": { + display: "table-footer-group" + }, + ".table-header-group": { + display: "table-header-group" + }, + ".table-row-group": { + display: "table-row-group" + }, + ".table-row": { + display: "table-row" + }, + ".flow-root": { + display: "flow-root" + }, + ".grid": { + display: "grid" + }, + ".inline-grid": { + display: "inline-grid" + }, + ".contents": { + display: "contents" + }, + ".list-item": { + display: "list-item" + }, + ".hidden": { + display: "none" + } + }); + }, + aspectRatio: (0, _createUtilityPlugin.default)("aspectRatio", [ + [ + "aspect", + [ + "aspect-ratio" + ] + ] + ]), + height: (0, _createUtilityPlugin.default)("height", [ + [ + "h", + [ + "height" + ] + ] + ]), + maxHeight: (0, _createUtilityPlugin.default)("maxHeight", [ + [ + "max-h", + [ + "maxHeight" + ] + ] + ]), + minHeight: (0, _createUtilityPlugin.default)("minHeight", [ + [ + "min-h", + [ + "minHeight" + ] + ] + ]), + width: (0, _createUtilityPlugin.default)("width", [ + [ + "w", + [ + "width" + ] + ] + ]), + minWidth: (0, _createUtilityPlugin.default)("minWidth", [ + [ + "min-w", + [ + "minWidth" + ] + ] + ]), + maxWidth: (0, _createUtilityPlugin.default)("maxWidth", [ + [ + "max-w", + [ + "maxWidth" + ] + ] + ]), + flex: (0, _createUtilityPlugin.default)("flex"), + flexShrink: (0, _createUtilityPlugin.default)("flexShrink", [ + [ + "flex-shrink", + [ + "flex-shrink" + ] + ], + [ + "shrink", + [ + "flex-shrink" + ] + ] + ]), + flexGrow: (0, _createUtilityPlugin.default)("flexGrow", [ + [ + "flex-grow", + [ + "flex-grow" + ] + ], + [ + "grow", + [ + "flex-grow" + ] + ] + ]), + flexBasis: (0, _createUtilityPlugin.default)("flexBasis", [ + [ + "basis", + [ + "flex-basis" + ] + ] + ]), + tableLayout: ({ addUtilities })=>{ + addUtilities({ + ".table-auto": { + "table-layout": "auto" + }, + ".table-fixed": { + "table-layout": "fixed" + } + }); + }, + captionSide: ({ addUtilities })=>{ + addUtilities({ + ".caption-top": { + "caption-side": "top" + }, + ".caption-bottom": { + "caption-side": "bottom" + } + }); + }, + borderCollapse: ({ addUtilities })=>{ + addUtilities({ + ".border-collapse": { + "border-collapse": "collapse" + }, + ".border-separate": { + "border-collapse": "separate" + } + }); + }, + borderSpacing: ({ addDefaults , matchUtilities , theme })=>{ + addDefaults("border-spacing", { + "--tw-border-spacing-x": 0, + "--tw-border-spacing-y": 0 + }); + matchUtilities({ + "border-spacing": (value)=>{ + return { + "--tw-border-spacing-x": value, + "--tw-border-spacing-y": value, + "@defaults border-spacing": {}, + "border-spacing": "var(--tw-border-spacing-x) var(--tw-border-spacing-y)" + }; + }, + "border-spacing-x": (value)=>{ + return { + "--tw-border-spacing-x": value, + "@defaults border-spacing": {}, + "border-spacing": "var(--tw-border-spacing-x) var(--tw-border-spacing-y)" + }; + }, + "border-spacing-y": (value)=>{ + return { + "--tw-border-spacing-y": value, + "@defaults border-spacing": {}, + "border-spacing": "var(--tw-border-spacing-x) var(--tw-border-spacing-y)" + }; + } + }, { + values: theme("borderSpacing") + }); + }, + transformOrigin: (0, _createUtilityPlugin.default)("transformOrigin", [ + [ + "origin", + [ + "transformOrigin" + ] + ] + ]), + translate: (0, _createUtilityPlugin.default)("translate", [ + [ + [ + "translate-x", + [ + [ + "@defaults transform", + {} + ], + "--tw-translate-x", + [ + "transform", + cssTransformValue + ] + ] + ], + [ + "translate-y", + [ + [ + "@defaults transform", + {} + ], + "--tw-translate-y", + [ + "transform", + cssTransformValue + ] + ] + ] + ] + ], { + supportsNegativeValues: true + }), + rotate: (0, _createUtilityPlugin.default)("rotate", [ + [ + "rotate", + [ + [ + "@defaults transform", + {} + ], + "--tw-rotate", + [ + "transform", + cssTransformValue + ] + ] + ] + ], { + supportsNegativeValues: true + }), + skew: (0, _createUtilityPlugin.default)("skew", [ + [ + [ + "skew-x", + [ + [ + "@defaults transform", + {} + ], + "--tw-skew-x", + [ + "transform", + cssTransformValue + ] + ] + ], + [ + "skew-y", + [ + [ + "@defaults transform", + {} + ], + "--tw-skew-y", + [ + "transform", + cssTransformValue + ] + ] + ] + ] + ], { + supportsNegativeValues: true + }), + scale: (0, _createUtilityPlugin.default)("scale", [ + [ + "scale", + [ + [ + "@defaults transform", + {} + ], + "--tw-scale-x", + "--tw-scale-y", + [ + "transform", + cssTransformValue + ] + ] + ], + [ + [ + "scale-x", + [ + [ + "@defaults transform", + {} + ], + "--tw-scale-x", + [ + "transform", + cssTransformValue + ] + ] + ], + [ + "scale-y", + [ + [ + "@defaults transform", + {} + ], + "--tw-scale-y", + [ + "transform", + cssTransformValue + ] + ] + ] + ] + ], { + supportsNegativeValues: true + }), + transform: ({ addDefaults , addUtilities })=>{ + addDefaults("transform", { + "--tw-translate-x": "0", + "--tw-translate-y": "0", + "--tw-rotate": "0", + "--tw-skew-x": "0", + "--tw-skew-y": "0", + "--tw-scale-x": "1", + "--tw-scale-y": "1" + }); + addUtilities({ + ".transform": { + "@defaults transform": {}, + transform: cssTransformValue + }, + ".transform-cpu": { + transform: cssTransformValue + }, + ".transform-gpu": { + transform: cssTransformValue.replace("translate(var(--tw-translate-x), var(--tw-translate-y))", "translate3d(var(--tw-translate-x), var(--tw-translate-y), 0)") + }, + ".transform-none": { + transform: "none" + } + }); + }, + animation: ({ matchUtilities , theme , config })=>{ + let prefixName = (name)=>`${config("prefix")}${(0, _escapeClassName.default)(name)}`; + var _theme; + let keyframes = Object.fromEntries(Object.entries((_theme = theme("keyframes")) !== null && _theme !== void 0 ? _theme : {}).map(([key, value])=>{ + return [ + key, + { + [`@keyframes ${prefixName(key)}`]: value + } + ]; + })); + matchUtilities({ + animate: (value)=>{ + let animations = (0, _parseAnimationValue.default)(value); + return [ + ...animations.flatMap((animation)=>keyframes[animation.name]), + { + animation: animations.map(({ name , value })=>{ + if (name === undefined || keyframes[name] === undefined) { + return value; + } + return value.replace(name, prefixName(name)); + }).join(", ") + } + ]; + } + }, { + values: theme("animation") + }); + }, + cursor: (0, _createUtilityPlugin.default)("cursor"), + touchAction: ({ addDefaults , addUtilities })=>{ + addDefaults("touch-action", { + "--tw-pan-x": " ", + "--tw-pan-y": " ", + "--tw-pinch-zoom": " " + }); + let cssTouchActionValue = "var(--tw-pan-x) var(--tw-pan-y) var(--tw-pinch-zoom)"; + addUtilities({ + ".touch-auto": { + "touch-action": "auto" + }, + ".touch-none": { + "touch-action": "none" + }, + ".touch-pan-x": { + "@defaults touch-action": {}, + "--tw-pan-x": "pan-x", + "touch-action": cssTouchActionValue + }, + ".touch-pan-left": { + "@defaults touch-action": {}, + "--tw-pan-x": "pan-left", + "touch-action": cssTouchActionValue + }, + ".touch-pan-right": { + "@defaults touch-action": {}, + "--tw-pan-x": "pan-right", + "touch-action": cssTouchActionValue + }, + ".touch-pan-y": { + "@defaults touch-action": {}, + "--tw-pan-y": "pan-y", + "touch-action": cssTouchActionValue + }, + ".touch-pan-up": { + "@defaults touch-action": {}, + "--tw-pan-y": "pan-up", + "touch-action": cssTouchActionValue + }, + ".touch-pan-down": { + "@defaults touch-action": {}, + "--tw-pan-y": "pan-down", + "touch-action": cssTouchActionValue + }, + ".touch-pinch-zoom": { + "@defaults touch-action": {}, + "--tw-pinch-zoom": "pinch-zoom", + "touch-action": cssTouchActionValue + }, + ".touch-manipulation": { + "touch-action": "manipulation" + } + }); + }, + userSelect: ({ addUtilities })=>{ + addUtilities({ + ".select-none": { + "user-select": "none" + }, + ".select-text": { + "user-select": "text" + }, + ".select-all": { + "user-select": "all" + }, + ".select-auto": { + "user-select": "auto" + } + }); + }, + resize: ({ addUtilities })=>{ + addUtilities({ + ".resize-none": { + resize: "none" + }, + ".resize-y": { + resize: "vertical" + }, + ".resize-x": { + resize: "horizontal" + }, + ".resize": { + resize: "both" + } + }); + }, + scrollSnapType: ({ addDefaults , addUtilities })=>{ + addDefaults("scroll-snap-type", { + "--tw-scroll-snap-strictness": "proximity" + }); + addUtilities({ + ".snap-none": { + "scroll-snap-type": "none" + }, + ".snap-x": { + "@defaults scroll-snap-type": {}, + "scroll-snap-type": "x var(--tw-scroll-snap-strictness)" + }, + ".snap-y": { + "@defaults scroll-snap-type": {}, + "scroll-snap-type": "y var(--tw-scroll-snap-strictness)" + }, + ".snap-both": { + "@defaults scroll-snap-type": {}, + "scroll-snap-type": "both var(--tw-scroll-snap-strictness)" + }, + ".snap-mandatory": { + "--tw-scroll-snap-strictness": "mandatory" + }, + ".snap-proximity": { + "--tw-scroll-snap-strictness": "proximity" + } + }); + }, + scrollSnapAlign: ({ addUtilities })=>{ + addUtilities({ + ".snap-start": { + "scroll-snap-align": "start" + }, + ".snap-end": { + "scroll-snap-align": "end" + }, + ".snap-center": { + "scroll-snap-align": "center" + }, + ".snap-align-none": { + "scroll-snap-align": "none" + } + }); + }, + scrollSnapStop: ({ addUtilities })=>{ + addUtilities({ + ".snap-normal": { + "scroll-snap-stop": "normal" + }, + ".snap-always": { + "scroll-snap-stop": "always" + } + }); + }, + scrollMargin: (0, _createUtilityPlugin.default)("scrollMargin", [ + [ + "scroll-m", + [ + "scroll-margin" + ] + ], + [ + [ + "scroll-mx", + [ + "scroll-margin-left", + "scroll-margin-right" + ] + ], + [ + "scroll-my", + [ + "scroll-margin-top", + "scroll-margin-bottom" + ] + ] + ], + [ + [ + "scroll-ms", + [ + "scroll-margin-inline-start" + ] + ], + [ + "scroll-me", + [ + "scroll-margin-inline-end" + ] + ], + [ + "scroll-mt", + [ + "scroll-margin-top" + ] + ], + [ + "scroll-mr", + [ + "scroll-margin-right" + ] + ], + [ + "scroll-mb", + [ + "scroll-margin-bottom" + ] + ], + [ + "scroll-ml", + [ + "scroll-margin-left" + ] + ] + ] + ], { + supportsNegativeValues: true + }), + scrollPadding: (0, _createUtilityPlugin.default)("scrollPadding", [ + [ + "scroll-p", + [ + "scroll-padding" + ] + ], + [ + [ + "scroll-px", + [ + "scroll-padding-left", + "scroll-padding-right" + ] + ], + [ + "scroll-py", + [ + "scroll-padding-top", + "scroll-padding-bottom" + ] + ] + ], + [ + [ + "scroll-ps", + [ + "scroll-padding-inline-start" + ] + ], + [ + "scroll-pe", + [ + "scroll-padding-inline-end" + ] + ], + [ + "scroll-pt", + [ + "scroll-padding-top" + ] + ], + [ + "scroll-pr", + [ + "scroll-padding-right" + ] + ], + [ + "scroll-pb", + [ + "scroll-padding-bottom" + ] + ], + [ + "scroll-pl", + [ + "scroll-padding-left" + ] + ] + ] + ]), + listStylePosition: ({ addUtilities })=>{ + addUtilities({ + ".list-inside": { + "list-style-position": "inside" + }, + ".list-outside": { + "list-style-position": "outside" + } + }); + }, + listStyleType: (0, _createUtilityPlugin.default)("listStyleType", [ + [ + "list", + [ + "listStyleType" + ] + ] + ]), + listStyleImage: (0, _createUtilityPlugin.default)("listStyleImage", [ + [ + "list-image", + [ + "listStyleImage" + ] + ] + ]), + appearance: ({ addUtilities })=>{ + addUtilities({ + ".appearance-none": { + appearance: "none" + } + }); + }, + columns: (0, _createUtilityPlugin.default)("columns", [ + [ + "columns", + [ + "columns" + ] + ] + ]), + breakBefore: ({ addUtilities })=>{ + addUtilities({ + ".break-before-auto": { + "break-before": "auto" + }, + ".break-before-avoid": { + "break-before": "avoid" + }, + ".break-before-all": { + "break-before": "all" + }, + ".break-before-avoid-page": { + "break-before": "avoid-page" + }, + ".break-before-page": { + "break-before": "page" + }, + ".break-before-left": { + "break-before": "left" + }, + ".break-before-right": { + "break-before": "right" + }, + ".break-before-column": { + "break-before": "column" + } + }); + }, + breakInside: ({ addUtilities })=>{ + addUtilities({ + ".break-inside-auto": { + "break-inside": "auto" + }, + ".break-inside-avoid": { + "break-inside": "avoid" + }, + ".break-inside-avoid-page": { + "break-inside": "avoid-page" + }, + ".break-inside-avoid-column": { + "break-inside": "avoid-column" + } + }); + }, + breakAfter: ({ addUtilities })=>{ + addUtilities({ + ".break-after-auto": { + "break-after": "auto" + }, + ".break-after-avoid": { + "break-after": "avoid" + }, + ".break-after-all": { + "break-after": "all" + }, + ".break-after-avoid-page": { + "break-after": "avoid-page" + }, + ".break-after-page": { + "break-after": "page" + }, + ".break-after-left": { + "break-after": "left" + }, + ".break-after-right": { + "break-after": "right" + }, + ".break-after-column": { + "break-after": "column" + } + }); + }, + gridAutoColumns: (0, _createUtilityPlugin.default)("gridAutoColumns", [ + [ + "auto-cols", + [ + "gridAutoColumns" + ] + ] + ]), + gridAutoFlow: ({ addUtilities })=>{ + addUtilities({ + ".grid-flow-row": { + gridAutoFlow: "row" + }, + ".grid-flow-col": { + gridAutoFlow: "column" + }, + ".grid-flow-dense": { + gridAutoFlow: "dense" + }, + ".grid-flow-row-dense": { + gridAutoFlow: "row dense" + }, + ".grid-flow-col-dense": { + gridAutoFlow: "column dense" + } + }); + }, + gridAutoRows: (0, _createUtilityPlugin.default)("gridAutoRows", [ + [ + "auto-rows", + [ + "gridAutoRows" + ] + ] + ]), + gridTemplateColumns: (0, _createUtilityPlugin.default)("gridTemplateColumns", [ + [ + "grid-cols", + [ + "gridTemplateColumns" + ] + ] + ]), + gridTemplateRows: (0, _createUtilityPlugin.default)("gridTemplateRows", [ + [ + "grid-rows", + [ + "gridTemplateRows" + ] + ] + ]), + flexDirection: ({ addUtilities })=>{ + addUtilities({ + ".flex-row": { + "flex-direction": "row" + }, + ".flex-row-reverse": { + "flex-direction": "row-reverse" + }, + ".flex-col": { + "flex-direction": "column" + }, + ".flex-col-reverse": { + "flex-direction": "column-reverse" + } + }); + }, + flexWrap: ({ addUtilities })=>{ + addUtilities({ + ".flex-wrap": { + "flex-wrap": "wrap" + }, + ".flex-wrap-reverse": { + "flex-wrap": "wrap-reverse" + }, + ".flex-nowrap": { + "flex-wrap": "nowrap" + } + }); + }, + placeContent: ({ addUtilities })=>{ + addUtilities({ + ".place-content-center": { + "place-content": "center" + }, + ".place-content-start": { + "place-content": "start" + }, + ".place-content-end": { + "place-content": "end" + }, + ".place-content-between": { + "place-content": "space-between" + }, + ".place-content-around": { + "place-content": "space-around" + }, + ".place-content-evenly": { + "place-content": "space-evenly" + }, + ".place-content-baseline": { + "place-content": "baseline" + }, + ".place-content-stretch": { + "place-content": "stretch" + } + }); + }, + placeItems: ({ addUtilities })=>{ + addUtilities({ + ".place-items-start": { + "place-items": "start" + }, + ".place-items-end": { + "place-items": "end" + }, + ".place-items-center": { + "place-items": "center" + }, + ".place-items-baseline": { + "place-items": "baseline" + }, + ".place-items-stretch": { + "place-items": "stretch" + } + }); + }, + alignContent: ({ addUtilities })=>{ + addUtilities({ + ".content-normal": { + "align-content": "normal" + }, + ".content-center": { + "align-content": "center" + }, + ".content-start": { + "align-content": "flex-start" + }, + ".content-end": { + "align-content": "flex-end" + }, + ".content-between": { + "align-content": "space-between" + }, + ".content-around": { + "align-content": "space-around" + }, + ".content-evenly": { + "align-content": "space-evenly" + }, + ".content-baseline": { + "align-content": "baseline" + }, + ".content-stretch": { + "align-content": "stretch" + } + }); + }, + alignItems: ({ addUtilities })=>{ + addUtilities({ + ".items-start": { + "align-items": "flex-start" + }, + ".items-end": { + "align-items": "flex-end" + }, + ".items-center": { + "align-items": "center" + }, + ".items-baseline": { + "align-items": "baseline" + }, + ".items-stretch": { + "align-items": "stretch" + } + }); + }, + justifyContent: ({ addUtilities })=>{ + addUtilities({ + ".justify-normal": { + "justify-content": "normal" + }, + ".justify-start": { + "justify-content": "flex-start" + }, + ".justify-end": { + "justify-content": "flex-end" + }, + ".justify-center": { + "justify-content": "center" + }, + ".justify-between": { + "justify-content": "space-between" + }, + ".justify-around": { + "justify-content": "space-around" + }, + ".justify-evenly": { + "justify-content": "space-evenly" + }, + ".justify-stretch": { + "justify-content": "stretch" + } + }); + }, + justifyItems: ({ addUtilities })=>{ + addUtilities({ + ".justify-items-start": { + "justify-items": "start" + }, + ".justify-items-end": { + "justify-items": "end" + }, + ".justify-items-center": { + "justify-items": "center" + }, + ".justify-items-stretch": { + "justify-items": "stretch" + } + }); + }, + gap: (0, _createUtilityPlugin.default)("gap", [ + [ + "gap", + [ + "gap" + ] + ], + [ + [ + "gap-x", + [ + "columnGap" + ] + ], + [ + "gap-y", + [ + "rowGap" + ] + ] + ] + ]), + space: ({ matchUtilities , addUtilities , theme })=>{ + matchUtilities({ + "space-x": (value)=>{ + value = value === "0" ? "0px" : value; + if (false) { + return { + "& > :not([hidden]) ~ :not([hidden])": { + "--tw-space-x-reverse": "0", + "margin-inline-end": `calc(${value} * var(--tw-space-x-reverse))`, + "margin-inline-start": `calc(${value} * calc(1 - var(--tw-space-x-reverse)))` + } + }; + } + return { + "& > :not([hidden]) ~ :not([hidden])": { + "--tw-space-x-reverse": "0", + "margin-right": `calc(${value} * var(--tw-space-x-reverse))`, + "margin-left": `calc(${value} * calc(1 - var(--tw-space-x-reverse)))` + } + }; + }, + "space-y": (value)=>{ + value = value === "0" ? "0px" : value; + return { + "& > :not([hidden]) ~ :not([hidden])": { + "--tw-space-y-reverse": "0", + "margin-top": `calc(${value} * calc(1 - var(--tw-space-y-reverse)))`, + "margin-bottom": `calc(${value} * var(--tw-space-y-reverse))` + } + }; + } + }, { + values: theme("space"), + supportsNegativeValues: true + }); + addUtilities({ + ".space-y-reverse > :not([hidden]) ~ :not([hidden])": { + "--tw-space-y-reverse": "1" + }, + ".space-x-reverse > :not([hidden]) ~ :not([hidden])": { + "--tw-space-x-reverse": "1" + } + }); + }, + divideWidth: ({ matchUtilities , addUtilities , theme })=>{ + matchUtilities({ + "divide-x": (value)=>{ + value = value === "0" ? "0px" : value; + if (false) { + return { + "& > :not([hidden]) ~ :not([hidden])": { + "@defaults border-width": {}, + "--tw-divide-x-reverse": "0", + "border-inline-end-width": `calc(${value} * var(--tw-divide-x-reverse))`, + "border-inline-start-width": `calc(${value} * calc(1 - var(--tw-divide-x-reverse)))` + } + }; + } + return { + "& > :not([hidden]) ~ :not([hidden])": { + "@defaults border-width": {}, + "--tw-divide-x-reverse": "0", + "border-right-width": `calc(${value} * var(--tw-divide-x-reverse))`, + "border-left-width": `calc(${value} * calc(1 - var(--tw-divide-x-reverse)))` + } + }; + }, + "divide-y": (value)=>{ + value = value === "0" ? "0px" : value; + return { + "& > :not([hidden]) ~ :not([hidden])": { + "@defaults border-width": {}, + "--tw-divide-y-reverse": "0", + "border-top-width": `calc(${value} * calc(1 - var(--tw-divide-y-reverse)))`, + "border-bottom-width": `calc(${value} * var(--tw-divide-y-reverse))` + } + }; + } + }, { + values: theme("divideWidth"), + type: [ + "line-width", + "length", + "any" + ] + }); + addUtilities({ + ".divide-y-reverse > :not([hidden]) ~ :not([hidden])": { + "@defaults border-width": {}, + "--tw-divide-y-reverse": "1" + }, + ".divide-x-reverse > :not([hidden]) ~ :not([hidden])": { + "@defaults border-width": {}, + "--tw-divide-x-reverse": "1" + } + }); + }, + divideStyle: ({ addUtilities })=>{ + addUtilities({ + ".divide-solid > :not([hidden]) ~ :not([hidden])": { + "border-style": "solid" + }, + ".divide-dashed > :not([hidden]) ~ :not([hidden])": { + "border-style": "dashed" + }, + ".divide-dotted > :not([hidden]) ~ :not([hidden])": { + "border-style": "dotted" + }, + ".divide-double > :not([hidden]) ~ :not([hidden])": { + "border-style": "double" + }, + ".divide-none > :not([hidden]) ~ :not([hidden])": { + "border-style": "none" + } + }); + }, + divideColor: ({ matchUtilities , theme , corePlugins })=>{ + matchUtilities({ + divide: (value)=>{ + if (!corePlugins("divideOpacity")) { + return { + ["& > :not([hidden]) ~ :not([hidden])"]: { + "border-color": (0, _toColorValue.default)(value) + } + }; + } + return { + ["& > :not([hidden]) ~ :not([hidden])"]: (0, _withAlphaVariable.default)({ + color: value, + property: "border-color", + variable: "--tw-divide-opacity" + }) + }; + } + }, { + values: (({ DEFAULT: _ , ...colors })=>colors)((0, _flattenColorPalette.default)(theme("divideColor"))), + type: [ + "color", + "any" + ] + }); + }, + divideOpacity: ({ matchUtilities , theme })=>{ + matchUtilities({ + "divide-opacity": (value)=>{ + return { + [`& > :not([hidden]) ~ :not([hidden])`]: { + "--tw-divide-opacity": value + } + }; + } + }, { + values: theme("divideOpacity") + }); + }, + placeSelf: ({ addUtilities })=>{ + addUtilities({ + ".place-self-auto": { + "place-self": "auto" + }, + ".place-self-start": { + "place-self": "start" + }, + ".place-self-end": { + "place-self": "end" + }, + ".place-self-center": { + "place-self": "center" + }, + ".place-self-stretch": { + "place-self": "stretch" + } + }); + }, + alignSelf: ({ addUtilities })=>{ + addUtilities({ + ".self-auto": { + "align-self": "auto" + }, + ".self-start": { + "align-self": "flex-start" + }, + ".self-end": { + "align-self": "flex-end" + }, + ".self-center": { + "align-self": "center" + }, + ".self-stretch": { + "align-self": "stretch" + }, + ".self-baseline": { + "align-self": "baseline" + } + }); + }, + justifySelf: ({ addUtilities })=>{ + addUtilities({ + ".justify-self-auto": { + "justify-self": "auto" + }, + ".justify-self-start": { + "justify-self": "start" + }, + ".justify-self-end": { + "justify-self": "end" + }, + ".justify-self-center": { + "justify-self": "center" + }, + ".justify-self-stretch": { + "justify-self": "stretch" + } + }); + }, + overflow: ({ addUtilities })=>{ + addUtilities({ + ".overflow-auto": { + overflow: "auto" + }, + ".overflow-hidden": { + overflow: "hidden" + }, + ".overflow-clip": { + overflow: "clip" + }, + ".overflow-visible": { + overflow: "visible" + }, + ".overflow-scroll": { + overflow: "scroll" + }, + ".overflow-x-auto": { + "overflow-x": "auto" + }, + ".overflow-y-auto": { + "overflow-y": "auto" + }, + ".overflow-x-hidden": { + "overflow-x": "hidden" + }, + ".overflow-y-hidden": { + "overflow-y": "hidden" + }, + ".overflow-x-clip": { + "overflow-x": "clip" + }, + ".overflow-y-clip": { + "overflow-y": "clip" + }, + ".overflow-x-visible": { + "overflow-x": "visible" + }, + ".overflow-y-visible": { + "overflow-y": "visible" + }, + ".overflow-x-scroll": { + "overflow-x": "scroll" + }, + ".overflow-y-scroll": { + "overflow-y": "scroll" + } + }); + }, + overscrollBehavior: ({ addUtilities })=>{ + addUtilities({ + ".overscroll-auto": { + "overscroll-behavior": "auto" + }, + ".overscroll-contain": { + "overscroll-behavior": "contain" + }, + ".overscroll-none": { + "overscroll-behavior": "none" + }, + ".overscroll-y-auto": { + "overscroll-behavior-y": "auto" + }, + ".overscroll-y-contain": { + "overscroll-behavior-y": "contain" + }, + ".overscroll-y-none": { + "overscroll-behavior-y": "none" + }, + ".overscroll-x-auto": { + "overscroll-behavior-x": "auto" + }, + ".overscroll-x-contain": { + "overscroll-behavior-x": "contain" + }, + ".overscroll-x-none": { + "overscroll-behavior-x": "none" + } + }); + }, + scrollBehavior: ({ addUtilities })=>{ + addUtilities({ + ".scroll-auto": { + "scroll-behavior": "auto" + }, + ".scroll-smooth": { + "scroll-behavior": "smooth" + } + }); + }, + textOverflow: ({ addUtilities })=>{ + addUtilities({ + ".truncate": { + overflow: "hidden", + "text-overflow": "ellipsis", + "white-space": "nowrap" + }, + ".overflow-ellipsis": { + "text-overflow": "ellipsis" + }, + ".text-ellipsis": { + "text-overflow": "ellipsis" + }, + ".text-clip": { + "text-overflow": "clip" + } + }); + }, + hyphens: ({ addUtilities })=>{ + addUtilities({ + ".hyphens-none": { + hyphens: "none" + }, + ".hyphens-manual": { + hyphens: "manual" + }, + ".hyphens-auto": { + hyphens: "auto" + } + }); + }, + whitespace: ({ addUtilities })=>{ + addUtilities({ + ".whitespace-normal": { + "white-space": "normal" + }, + ".whitespace-nowrap": { + "white-space": "nowrap" + }, + ".whitespace-pre": { + "white-space": "pre" + }, + ".whitespace-pre-line": { + "white-space": "pre-line" + }, + ".whitespace-pre-wrap": { + "white-space": "pre-wrap" + }, + ".whitespace-break-spaces": { + "white-space": "break-spaces" + } + }); + }, + wordBreak: ({ addUtilities })=>{ + addUtilities({ + ".break-normal": { + "overflow-wrap": "normal", + "word-break": "normal" + }, + ".break-words": { + "overflow-wrap": "break-word" + }, + ".break-all": { + "word-break": "break-all" + }, + ".break-keep": { + "word-break": "keep-all" + } + }); + }, + borderRadius: (0, _createUtilityPlugin.default)("borderRadius", [ + [ + "rounded", + [ + "border-radius" + ] + ], + [ + [ + "rounded-s", + [ + "border-start-start-radius", + "border-end-start-radius" + ] + ], + [ + "rounded-e", + [ + "border-start-end-radius", + "border-end-end-radius" + ] + ], + [ + "rounded-t", + [ + "border-top-left-radius", + "border-top-right-radius" + ] + ], + [ + "rounded-r", + [ + "border-top-right-radius", + "border-bottom-right-radius" + ] + ], + [ + "rounded-b", + [ + "border-bottom-right-radius", + "border-bottom-left-radius" + ] + ], + [ + "rounded-l", + [ + "border-top-left-radius", + "border-bottom-left-radius" + ] + ] + ], + [ + [ + "rounded-ss", + [ + "border-start-start-radius" + ] + ], + [ + "rounded-se", + [ + "border-start-end-radius" + ] + ], + [ + "rounded-ee", + [ + "border-end-end-radius" + ] + ], + [ + "rounded-es", + [ + "border-end-start-radius" + ] + ], + [ + "rounded-tl", + [ + "border-top-left-radius" + ] + ], + [ + "rounded-tr", + [ + "border-top-right-radius" + ] + ], + [ + "rounded-br", + [ + "border-bottom-right-radius" + ] + ], + [ + "rounded-bl", + [ + "border-bottom-left-radius" + ] + ] + ] + ]), + borderWidth: (0, _createUtilityPlugin.default)("borderWidth", [ + [ + "border", + [ + [ + "@defaults border-width", + {} + ], + "border-width" + ] + ], + [ + [ + "border-x", + [ + [ + "@defaults border-width", + {} + ], + "border-left-width", + "border-right-width" + ] + ], + [ + "border-y", + [ + [ + "@defaults border-width", + {} + ], + "border-top-width", + "border-bottom-width" + ] + ] + ], + [ + [ + "border-s", + [ + [ + "@defaults border-width", + {} + ], + "border-inline-start-width" + ] + ], + [ + "border-e", + [ + [ + "@defaults border-width", + {} + ], + "border-inline-end-width" + ] + ], + [ + "border-t", + [ + [ + "@defaults border-width", + {} + ], + "border-top-width" + ] + ], + [ + "border-r", + [ + [ + "@defaults border-width", + {} + ], + "border-right-width" + ] + ], + [ + "border-b", + [ + [ + "@defaults border-width", + {} + ], + "border-bottom-width" + ] + ], + [ + "border-l", + [ + [ + "@defaults border-width", + {} + ], + "border-left-width" + ] + ] + ] + ], { + type: [ + "line-width", + "length" + ] + }), + borderStyle: ({ addUtilities })=>{ + addUtilities({ + ".border-solid": { + "border-style": "solid" + }, + ".border-dashed": { + "border-style": "dashed" + }, + ".border-dotted": { + "border-style": "dotted" + }, + ".border-double": { + "border-style": "double" + }, + ".border-hidden": { + "border-style": "hidden" + }, + ".border-none": { + "border-style": "none" + } + }); + }, + borderColor: ({ matchUtilities , theme , corePlugins })=>{ + matchUtilities({ + border: (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "border-color", + variable: "--tw-border-opacity" + }); + } + }, { + values: (({ DEFAULT: _ , ...colors })=>colors)((0, _flattenColorPalette.default)(theme("borderColor"))), + type: [ + "color", + "any" + ] + }); + matchUtilities({ + "border-x": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-left-color": (0, _toColorValue.default)(value), + "border-right-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: [ + "border-left-color", + "border-right-color" + ], + variable: "--tw-border-opacity" + }); + }, + "border-y": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-top-color": (0, _toColorValue.default)(value), + "border-bottom-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: [ + "border-top-color", + "border-bottom-color" + ], + variable: "--tw-border-opacity" + }); + } + }, { + values: (({ DEFAULT: _ , ...colors })=>colors)((0, _flattenColorPalette.default)(theme("borderColor"))), + type: [ + "color", + "any" + ] + }); + matchUtilities({ + "border-s": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-inline-start-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "border-inline-start-color", + variable: "--tw-border-opacity" + }); + }, + "border-e": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-inline-end-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "border-inline-end-color", + variable: "--tw-border-opacity" + }); + }, + "border-t": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-top-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "border-top-color", + variable: "--tw-border-opacity" + }); + }, + "border-r": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-right-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "border-right-color", + variable: "--tw-border-opacity" + }); + }, + "border-b": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-bottom-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "border-bottom-color", + variable: "--tw-border-opacity" + }); + }, + "border-l": (value)=>{ + if (!corePlugins("borderOpacity")) { + return { + "border-left-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "border-left-color", + variable: "--tw-border-opacity" + }); + } + }, { + values: (({ DEFAULT: _ , ...colors })=>colors)((0, _flattenColorPalette.default)(theme("borderColor"))), + type: [ + "color", + "any" + ] + }); + }, + borderOpacity: (0, _createUtilityPlugin.default)("borderOpacity", [ + [ + "border-opacity", + [ + "--tw-border-opacity" + ] + ] + ]), + backgroundColor: ({ matchUtilities , theme , corePlugins })=>{ + matchUtilities({ + bg: (value)=>{ + if (!corePlugins("backgroundOpacity")) { + return { + "background-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "background-color", + variable: "--tw-bg-opacity" + }); + } + }, { + values: (0, _flattenColorPalette.default)(theme("backgroundColor")), + type: [ + "color", + "any" + ] + }); + }, + backgroundOpacity: (0, _createUtilityPlugin.default)("backgroundOpacity", [ + [ + "bg-opacity", + [ + "--tw-bg-opacity" + ] + ] + ]), + backgroundImage: (0, _createUtilityPlugin.default)("backgroundImage", [ + [ + "bg", + [ + "background-image" + ] + ] + ], { + type: [ + "lookup", + "image", + "url" + ] + }), + gradientColorStops: (()=>{ + function transparentTo(value) { + return (0, _withAlphaVariable.withAlphaValue)(value, 0, "rgb(255 255 255 / 0)"); + } + return function({ matchUtilities , theme , addDefaults }) { + addDefaults("gradient-color-stops", { + "--tw-gradient-from-position": " ", + "--tw-gradient-via-position": " ", + "--tw-gradient-to-position": " " + }); + let options = { + values: (0, _flattenColorPalette.default)(theme("gradientColorStops")), + type: [ + "color", + "any" + ] + }; + let positionOptions = { + values: theme("gradientColorStopPositions"), + type: [ + "length", + "percentage" + ] + }; + matchUtilities({ + from: (value)=>{ + let transparentToValue = transparentTo(value); + return { + "@defaults gradient-color-stops": {}, + "--tw-gradient-from": `${(0, _toColorValue.default)(value)} var(--tw-gradient-from-position)`, + "--tw-gradient-to": `${transparentToValue} var(--tw-gradient-to-position)`, + "--tw-gradient-stops": `var(--tw-gradient-from), var(--tw-gradient-to)` + }; + } + }, options); + matchUtilities({ + from: (value)=>{ + return { + "--tw-gradient-from-position": value + }; + } + }, positionOptions); + matchUtilities({ + via: (value)=>{ + let transparentToValue = transparentTo(value); + return { + "@defaults gradient-color-stops": {}, + "--tw-gradient-to": `${transparentToValue} var(--tw-gradient-to-position)`, + "--tw-gradient-stops": `var(--tw-gradient-from), ${(0, _toColorValue.default)(value)} var(--tw-gradient-via-position), var(--tw-gradient-to)` + }; + } + }, options); + matchUtilities({ + via: (value)=>{ + return { + "--tw-gradient-via-position": value + }; + } + }, positionOptions); + matchUtilities({ + to: (value)=>({ + "@defaults gradient-color-stops": {}, + "--tw-gradient-to": `${(0, _toColorValue.default)(value)} var(--tw-gradient-to-position)` + }) + }, options); + matchUtilities({ + to: (value)=>{ + return { + "--tw-gradient-to-position": value + }; + } + }, positionOptions); + }; + })(), + boxDecorationBreak: ({ addUtilities })=>{ + addUtilities({ + ".decoration-slice": { + "box-decoration-break": "slice" + }, + ".decoration-clone": { + "box-decoration-break": "clone" + }, + ".box-decoration-slice": { + "box-decoration-break": "slice" + }, + ".box-decoration-clone": { + "box-decoration-break": "clone" + } + }); + }, + backgroundSize: (0, _createUtilityPlugin.default)("backgroundSize", [ + [ + "bg", + [ + "background-size" + ] + ] + ], { + type: [ + "lookup", + "length", + "percentage", + "size" + ] + }), + backgroundAttachment: ({ addUtilities })=>{ + addUtilities({ + ".bg-fixed": { + "background-attachment": "fixed" + }, + ".bg-local": { + "background-attachment": "local" + }, + ".bg-scroll": { + "background-attachment": "scroll" + } + }); + }, + backgroundClip: ({ addUtilities })=>{ + addUtilities({ + ".bg-clip-border": { + "background-clip": "border-box" + }, + ".bg-clip-padding": { + "background-clip": "padding-box" + }, + ".bg-clip-content": { + "background-clip": "content-box" + }, + ".bg-clip-text": { + "background-clip": "text" + } + }); + }, + backgroundPosition: (0, _createUtilityPlugin.default)("backgroundPosition", [ + [ + "bg", + [ + "background-position" + ] + ] + ], { + type: [ + "lookup", + [ + "position", + { + preferOnConflict: true + } + ] + ] + }), + backgroundRepeat: ({ addUtilities })=>{ + addUtilities({ + ".bg-repeat": { + "background-repeat": "repeat" + }, + ".bg-no-repeat": { + "background-repeat": "no-repeat" + }, + ".bg-repeat-x": { + "background-repeat": "repeat-x" + }, + ".bg-repeat-y": { + "background-repeat": "repeat-y" + }, + ".bg-repeat-round": { + "background-repeat": "round" + }, + ".bg-repeat-space": { + "background-repeat": "space" + } + }); + }, + backgroundOrigin: ({ addUtilities })=>{ + addUtilities({ + ".bg-origin-border": { + "background-origin": "border-box" + }, + ".bg-origin-padding": { + "background-origin": "padding-box" + }, + ".bg-origin-content": { + "background-origin": "content-box" + } + }); + }, + fill: ({ matchUtilities , theme })=>{ + matchUtilities({ + fill: (value)=>{ + return { + fill: (0, _toColorValue.default)(value) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("fill")), + type: [ + "color", + "any" + ] + }); + }, + stroke: ({ matchUtilities , theme })=>{ + matchUtilities({ + stroke: (value)=>{ + return { + stroke: (0, _toColorValue.default)(value) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("stroke")), + type: [ + "color", + "url", + "any" + ] + }); + }, + strokeWidth: (0, _createUtilityPlugin.default)("strokeWidth", [ + [ + "stroke", + [ + "stroke-width" + ] + ] + ], { + type: [ + "length", + "number", + "percentage" + ] + }), + objectFit: ({ addUtilities })=>{ + addUtilities({ + ".object-contain": { + "object-fit": "contain" + }, + ".object-cover": { + "object-fit": "cover" + }, + ".object-fill": { + "object-fit": "fill" + }, + ".object-none": { + "object-fit": "none" + }, + ".object-scale-down": { + "object-fit": "scale-down" + } + }); + }, + objectPosition: (0, _createUtilityPlugin.default)("objectPosition", [ + [ + "object", + [ + "object-position" + ] + ] + ]), + padding: (0, _createUtilityPlugin.default)("padding", [ + [ + "p", + [ + "padding" + ] + ], + [ + [ + "px", + [ + "padding-left", + "padding-right" + ] + ], + [ + "py", + [ + "padding-top", + "padding-bottom" + ] + ] + ], + [ + [ + "ps", + [ + "padding-inline-start" + ] + ], + [ + "pe", + [ + "padding-inline-end" + ] + ], + [ + "pt", + [ + "padding-top" + ] + ], + [ + "pr", + [ + "padding-right" + ] + ], + [ + "pb", + [ + "padding-bottom" + ] + ], + [ + "pl", + [ + "padding-left" + ] + ] + ] + ]), + textAlign: ({ addUtilities })=>{ + addUtilities({ + ".text-left": { + "text-align": "left" + }, + ".text-center": { + "text-align": "center" + }, + ".text-right": { + "text-align": "right" + }, + ".text-justify": { + "text-align": "justify" + }, + ".text-start": { + "text-align": "start" + }, + ".text-end": { + "text-align": "end" + } + }); + }, + textIndent: (0, _createUtilityPlugin.default)("textIndent", [ + [ + "indent", + [ + "text-indent" + ] + ] + ], { + supportsNegativeValues: true + }), + verticalAlign: ({ addUtilities , matchUtilities })=>{ + addUtilities({ + ".align-baseline": { + "vertical-align": "baseline" + }, + ".align-top": { + "vertical-align": "top" + }, + ".align-middle": { + "vertical-align": "middle" + }, + ".align-bottom": { + "vertical-align": "bottom" + }, + ".align-text-top": { + "vertical-align": "text-top" + }, + ".align-text-bottom": { + "vertical-align": "text-bottom" + }, + ".align-sub": { + "vertical-align": "sub" + }, + ".align-super": { + "vertical-align": "super" + } + }); + matchUtilities({ + align: (value)=>({ + "vertical-align": value + }) + }); + }, + fontFamily: ({ matchUtilities , theme })=>{ + matchUtilities({ + font: (value)=>{ + let [families, options = {}] = Array.isArray(value) && (0, _isPlainObject.default)(value[1]) ? value : [ + value + ]; + let { fontFeatureSettings , fontVariationSettings } = options; + return { + "font-family": Array.isArray(families) ? families.join(", ") : families, + ...fontFeatureSettings === undefined ? {} : { + "font-feature-settings": fontFeatureSettings + }, + ...fontVariationSettings === undefined ? {} : { + "font-variation-settings": fontVariationSettings + } + }; + } + }, { + values: theme("fontFamily"), + type: [ + "lookup", + "generic-name", + "family-name" + ] + }); + }, + fontSize: ({ matchUtilities , theme })=>{ + matchUtilities({ + text: (value, { modifier })=>{ + let [fontSize, options] = Array.isArray(value) ? value : [ + value + ]; + if (modifier) { + return { + "font-size": fontSize, + "line-height": modifier + }; + } + let { lineHeight , letterSpacing , fontWeight } = (0, _isPlainObject.default)(options) ? options : { + lineHeight: options + }; + return { + "font-size": fontSize, + ...lineHeight === undefined ? {} : { + "line-height": lineHeight + }, + ...letterSpacing === undefined ? {} : { + "letter-spacing": letterSpacing + }, + ...fontWeight === undefined ? {} : { + "font-weight": fontWeight + } + }; + } + }, { + values: theme("fontSize"), + modifiers: theme("lineHeight"), + type: [ + "absolute-size", + "relative-size", + "length", + "percentage" + ] + }); + }, + fontWeight: (0, _createUtilityPlugin.default)("fontWeight", [ + [ + "font", + [ + "fontWeight" + ] + ] + ], { + type: [ + "lookup", + "number", + "any" + ] + }), + textTransform: ({ addUtilities })=>{ + addUtilities({ + ".uppercase": { + "text-transform": "uppercase" + }, + ".lowercase": { + "text-transform": "lowercase" + }, + ".capitalize": { + "text-transform": "capitalize" + }, + ".normal-case": { + "text-transform": "none" + } + }); + }, + fontStyle: ({ addUtilities })=>{ + addUtilities({ + ".italic": { + "font-style": "italic" + }, + ".not-italic": { + "font-style": "normal" + } + }); + }, + fontVariantNumeric: ({ addDefaults , addUtilities })=>{ + let cssFontVariantNumericValue = "var(--tw-ordinal) var(--tw-slashed-zero) var(--tw-numeric-figure) var(--tw-numeric-spacing) var(--tw-numeric-fraction)"; + addDefaults("font-variant-numeric", { + "--tw-ordinal": " ", + "--tw-slashed-zero": " ", + "--tw-numeric-figure": " ", + "--tw-numeric-spacing": " ", + "--tw-numeric-fraction": " " + }); + addUtilities({ + ".normal-nums": { + "font-variant-numeric": "normal" + }, + ".ordinal": { + "@defaults font-variant-numeric": {}, + "--tw-ordinal": "ordinal", + "font-variant-numeric": cssFontVariantNumericValue + }, + ".slashed-zero": { + "@defaults font-variant-numeric": {}, + "--tw-slashed-zero": "slashed-zero", + "font-variant-numeric": cssFontVariantNumericValue + }, + ".lining-nums": { + "@defaults font-variant-numeric": {}, + "--tw-numeric-figure": "lining-nums", + "font-variant-numeric": cssFontVariantNumericValue + }, + ".oldstyle-nums": { + "@defaults font-variant-numeric": {}, + "--tw-numeric-figure": "oldstyle-nums", + "font-variant-numeric": cssFontVariantNumericValue + }, + ".proportional-nums": { + "@defaults font-variant-numeric": {}, + "--tw-numeric-spacing": "proportional-nums", + "font-variant-numeric": cssFontVariantNumericValue + }, + ".tabular-nums": { + "@defaults font-variant-numeric": {}, + "--tw-numeric-spacing": "tabular-nums", + "font-variant-numeric": cssFontVariantNumericValue + }, + ".diagonal-fractions": { + "@defaults font-variant-numeric": {}, + "--tw-numeric-fraction": "diagonal-fractions", + "font-variant-numeric": cssFontVariantNumericValue + }, + ".stacked-fractions": { + "@defaults font-variant-numeric": {}, + "--tw-numeric-fraction": "stacked-fractions", + "font-variant-numeric": cssFontVariantNumericValue + } + }); + }, + lineHeight: (0, _createUtilityPlugin.default)("lineHeight", [ + [ + "leading", + [ + "lineHeight" + ] + ] + ]), + letterSpacing: (0, _createUtilityPlugin.default)("letterSpacing", [ + [ + "tracking", + [ + "letterSpacing" + ] + ] + ], { + supportsNegativeValues: true + }), + textColor: ({ matchUtilities , theme , corePlugins })=>{ + matchUtilities({ + text: (value)=>{ + if (!corePlugins("textOpacity")) { + return { + color: (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "color", + variable: "--tw-text-opacity" + }); + } + }, { + values: (0, _flattenColorPalette.default)(theme("textColor")), + type: [ + "color", + "any" + ] + }); + }, + textOpacity: (0, _createUtilityPlugin.default)("textOpacity", [ + [ + "text-opacity", + [ + "--tw-text-opacity" + ] + ] + ]), + textDecoration: ({ addUtilities })=>{ + addUtilities({ + ".underline": { + "text-decoration-line": "underline" + }, + ".overline": { + "text-decoration-line": "overline" + }, + ".line-through": { + "text-decoration-line": "line-through" + }, + ".no-underline": { + "text-decoration-line": "none" + } + }); + }, + textDecorationColor: ({ matchUtilities , theme })=>{ + matchUtilities({ + decoration: (value)=>{ + return { + "text-decoration-color": (0, _toColorValue.default)(value) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("textDecorationColor")), + type: [ + "color", + "any" + ] + }); + }, + textDecorationStyle: ({ addUtilities })=>{ + addUtilities({ + ".decoration-solid": { + "text-decoration-style": "solid" + }, + ".decoration-double": { + "text-decoration-style": "double" + }, + ".decoration-dotted": { + "text-decoration-style": "dotted" + }, + ".decoration-dashed": { + "text-decoration-style": "dashed" + }, + ".decoration-wavy": { + "text-decoration-style": "wavy" + } + }); + }, + textDecorationThickness: (0, _createUtilityPlugin.default)("textDecorationThickness", [ + [ + "decoration", + [ + "text-decoration-thickness" + ] + ] + ], { + type: [ + "length", + "percentage" + ] + }), + textUnderlineOffset: (0, _createUtilityPlugin.default)("textUnderlineOffset", [ + [ + "underline-offset", + [ + "text-underline-offset" + ] + ] + ], { + type: [ + "length", + "percentage", + "any" + ] + }), + fontSmoothing: ({ addUtilities })=>{ + addUtilities({ + ".antialiased": { + "-webkit-font-smoothing": "antialiased", + "-moz-osx-font-smoothing": "grayscale" + }, + ".subpixel-antialiased": { + "-webkit-font-smoothing": "auto", + "-moz-osx-font-smoothing": "auto" + } + }); + }, + placeholderColor: ({ matchUtilities , theme , corePlugins })=>{ + matchUtilities({ + placeholder: (value)=>{ + if (!corePlugins("placeholderOpacity")) { + return { + "&::placeholder": { + color: (0, _toColorValue.default)(value) + } + }; + } + return { + "&::placeholder": (0, _withAlphaVariable.default)({ + color: value, + property: "color", + variable: "--tw-placeholder-opacity" + }) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("placeholderColor")), + type: [ + "color", + "any" + ] + }); + }, + placeholderOpacity: ({ matchUtilities , theme })=>{ + matchUtilities({ + "placeholder-opacity": (value)=>{ + return { + ["&::placeholder"]: { + "--tw-placeholder-opacity": value + } + }; + } + }, { + values: theme("placeholderOpacity") + }); + }, + caretColor: ({ matchUtilities , theme })=>{ + matchUtilities({ + caret: (value)=>{ + return { + "caret-color": (0, _toColorValue.default)(value) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("caretColor")), + type: [ + "color", + "any" + ] + }); + }, + accentColor: ({ matchUtilities , theme })=>{ + matchUtilities({ + accent: (value)=>{ + return { + "accent-color": (0, _toColorValue.default)(value) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("accentColor")), + type: [ + "color", + "any" + ] + }); + }, + opacity: (0, _createUtilityPlugin.default)("opacity", [ + [ + "opacity", + [ + "opacity" + ] + ] + ]), + backgroundBlendMode: ({ addUtilities })=>{ + addUtilities({ + ".bg-blend-normal": { + "background-blend-mode": "normal" + }, + ".bg-blend-multiply": { + "background-blend-mode": "multiply" + }, + ".bg-blend-screen": { + "background-blend-mode": "screen" + }, + ".bg-blend-overlay": { + "background-blend-mode": "overlay" + }, + ".bg-blend-darken": { + "background-blend-mode": "darken" + }, + ".bg-blend-lighten": { + "background-blend-mode": "lighten" + }, + ".bg-blend-color-dodge": { + "background-blend-mode": "color-dodge" + }, + ".bg-blend-color-burn": { + "background-blend-mode": "color-burn" + }, + ".bg-blend-hard-light": { + "background-blend-mode": "hard-light" + }, + ".bg-blend-soft-light": { + "background-blend-mode": "soft-light" + }, + ".bg-blend-difference": { + "background-blend-mode": "difference" + }, + ".bg-blend-exclusion": { + "background-blend-mode": "exclusion" + }, + ".bg-blend-hue": { + "background-blend-mode": "hue" + }, + ".bg-blend-saturation": { + "background-blend-mode": "saturation" + }, + ".bg-blend-color": { + "background-blend-mode": "color" + }, + ".bg-blend-luminosity": { + "background-blend-mode": "luminosity" + } + }); + }, + mixBlendMode: ({ addUtilities })=>{ + addUtilities({ + ".mix-blend-normal": { + "mix-blend-mode": "normal" + }, + ".mix-blend-multiply": { + "mix-blend-mode": "multiply" + }, + ".mix-blend-screen": { + "mix-blend-mode": "screen" + }, + ".mix-blend-overlay": { + "mix-blend-mode": "overlay" + }, + ".mix-blend-darken": { + "mix-blend-mode": "darken" + }, + ".mix-blend-lighten": { + "mix-blend-mode": "lighten" + }, + ".mix-blend-color-dodge": { + "mix-blend-mode": "color-dodge" + }, + ".mix-blend-color-burn": { + "mix-blend-mode": "color-burn" + }, + ".mix-blend-hard-light": { + "mix-blend-mode": "hard-light" + }, + ".mix-blend-soft-light": { + "mix-blend-mode": "soft-light" + }, + ".mix-blend-difference": { + "mix-blend-mode": "difference" + }, + ".mix-blend-exclusion": { + "mix-blend-mode": "exclusion" + }, + ".mix-blend-hue": { + "mix-blend-mode": "hue" + }, + ".mix-blend-saturation": { + "mix-blend-mode": "saturation" + }, + ".mix-blend-color": { + "mix-blend-mode": "color" + }, + ".mix-blend-luminosity": { + "mix-blend-mode": "luminosity" + }, + ".mix-blend-plus-lighter": { + "mix-blend-mode": "plus-lighter" + } + }); + }, + boxShadow: (()=>{ + let transformValue = (0, _transformThemeValue.default)("boxShadow"); + let defaultBoxShadow = [ + `var(--tw-ring-offset-shadow, 0 0 #0000)`, + `var(--tw-ring-shadow, 0 0 #0000)`, + `var(--tw-shadow)` + ].join(", "); + return function({ matchUtilities , addDefaults , theme }) { + addDefaults(" box-shadow", { + "--tw-ring-offset-shadow": "0 0 #0000", + "--tw-ring-shadow": "0 0 #0000", + "--tw-shadow": "0 0 #0000", + "--tw-shadow-colored": "0 0 #0000" + }); + matchUtilities({ + shadow: (value)=>{ + value = transformValue(value); + let ast = (0, _parseBoxShadowValue.parseBoxShadowValue)(value); + for (let shadow of ast){ + // Don't override color if the whole shadow is a variable + if (!shadow.valid) { + continue; + } + shadow.color = "var(--tw-shadow-color)"; + } + return { + "@defaults box-shadow": {}, + "--tw-shadow": value === "none" ? "0 0 #0000" : value, + "--tw-shadow-colored": value === "none" ? "0 0 #0000" : (0, _parseBoxShadowValue.formatBoxShadowValue)(ast), + "box-shadow": defaultBoxShadow + }; + } + }, { + values: theme("boxShadow"), + type: [ + "shadow" + ] + }); + }; + })(), + boxShadowColor: ({ matchUtilities , theme })=>{ + matchUtilities({ + shadow: (value)=>{ + return { + "--tw-shadow-color": (0, _toColorValue.default)(value), + "--tw-shadow": "var(--tw-shadow-colored)" + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("boxShadowColor")), + type: [ + "color", + "any" + ] + }); + }, + outlineStyle: ({ addUtilities })=>{ + addUtilities({ + ".outline-none": { + outline: "2px solid transparent", + "outline-offset": "2px" + }, + ".outline": { + "outline-style": "solid" + }, + ".outline-dashed": { + "outline-style": "dashed" + }, + ".outline-dotted": { + "outline-style": "dotted" + }, + ".outline-double": { + "outline-style": "double" + } + }); + }, + outlineWidth: (0, _createUtilityPlugin.default)("outlineWidth", [ + [ + "outline", + [ + "outline-width" + ] + ] + ], { + type: [ + "length", + "number", + "percentage" + ] + }), + outlineOffset: (0, _createUtilityPlugin.default)("outlineOffset", [ + [ + "outline-offset", + [ + "outline-offset" + ] + ] + ], { + type: [ + "length", + "number", + "percentage", + "any" + ], + supportsNegativeValues: true + }), + outlineColor: ({ matchUtilities , theme })=>{ + matchUtilities({ + outline: (value)=>{ + return { + "outline-color": (0, _toColorValue.default)(value) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("outlineColor")), + type: [ + "color", + "any" + ] + }); + }, + ringWidth: ({ matchUtilities , addDefaults , addUtilities , theme , config })=>{ + let ringColorDefault = (()=>{ + var _theme, _theme1; + if ((0, _featureFlags.flagEnabled)(config(), "respectDefaultRingColorOpacity")) { + return theme("ringColor.DEFAULT"); + } + let ringOpacityDefault = theme("ringOpacity.DEFAULT", "0.5"); + if (!((_theme = theme("ringColor")) === null || _theme === void 0 ? void 0 : _theme.DEFAULT)) { + return `rgb(147 197 253 / ${ringOpacityDefault})`; + } + return (0, _withAlphaVariable.withAlphaValue)((_theme1 = theme("ringColor")) === null || _theme1 === void 0 ? void 0 : _theme1.DEFAULT, ringOpacityDefault, `rgb(147 197 253 / ${ringOpacityDefault})`); + })(); + addDefaults("ring-width", { + "--tw-ring-inset": " ", + "--tw-ring-offset-width": theme("ringOffsetWidth.DEFAULT", "0px"), + "--tw-ring-offset-color": theme("ringOffsetColor.DEFAULT", "#fff"), + "--tw-ring-color": ringColorDefault, + "--tw-ring-offset-shadow": "0 0 #0000", + "--tw-ring-shadow": "0 0 #0000", + "--tw-shadow": "0 0 #0000", + "--tw-shadow-colored": "0 0 #0000" + }); + matchUtilities({ + ring: (value)=>{ + return { + "@defaults ring-width": {}, + "--tw-ring-offset-shadow": `var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color)`, + "--tw-ring-shadow": `var(--tw-ring-inset) 0 0 0 calc(${value} + var(--tw-ring-offset-width)) var(--tw-ring-color)`, + "box-shadow": [ + `var(--tw-ring-offset-shadow)`, + `var(--tw-ring-shadow)`, + `var(--tw-shadow, 0 0 #0000)` + ].join(", ") + }; + } + }, { + values: theme("ringWidth"), + type: "length" + }); + addUtilities({ + ".ring-inset": { + "@defaults ring-width": {}, + "--tw-ring-inset": "inset" + } + }); + }, + ringColor: ({ matchUtilities , theme , corePlugins })=>{ + matchUtilities({ + ring: (value)=>{ + if (!corePlugins("ringOpacity")) { + return { + "--tw-ring-color": (0, _toColorValue.default)(value) + }; + } + return (0, _withAlphaVariable.default)({ + color: value, + property: "--tw-ring-color", + variable: "--tw-ring-opacity" + }); + } + }, { + values: Object.fromEntries(Object.entries((0, _flattenColorPalette.default)(theme("ringColor"))).filter(([modifier])=>modifier !== "DEFAULT")), + type: [ + "color", + "any" + ] + }); + }, + ringOpacity: (helpers)=>{ + let { config } = helpers; + return (0, _createUtilityPlugin.default)("ringOpacity", [ + [ + "ring-opacity", + [ + "--tw-ring-opacity" + ] + ] + ], { + filterDefault: !(0, _featureFlags.flagEnabled)(config(), "respectDefaultRingColorOpacity") + })(helpers); + }, + ringOffsetWidth: (0, _createUtilityPlugin.default)("ringOffsetWidth", [ + [ + "ring-offset", + [ + "--tw-ring-offset-width" + ] + ] + ], { + type: "length" + }), + ringOffsetColor: ({ matchUtilities , theme })=>{ + matchUtilities({ + "ring-offset": (value)=>{ + return { + "--tw-ring-offset-color": (0, _toColorValue.default)(value) + }; + } + }, { + values: (0, _flattenColorPalette.default)(theme("ringOffsetColor")), + type: [ + "color", + "any" + ] + }); + }, + blur: ({ matchUtilities , theme })=>{ + matchUtilities({ + blur: (value)=>{ + return { + "--tw-blur": `blur(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("blur") + }); + }, + brightness: ({ matchUtilities , theme })=>{ + matchUtilities({ + brightness: (value)=>{ + return { + "--tw-brightness": `brightness(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("brightness") + }); + }, + contrast: ({ matchUtilities , theme })=>{ + matchUtilities({ + contrast: (value)=>{ + return { + "--tw-contrast": `contrast(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("contrast") + }); + }, + dropShadow: ({ matchUtilities , theme })=>{ + matchUtilities({ + "drop-shadow": (value)=>{ + return { + "--tw-drop-shadow": Array.isArray(value) ? value.map((v)=>`drop-shadow(${v})`).join(" ") : `drop-shadow(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("dropShadow") + }); + }, + grayscale: ({ matchUtilities , theme })=>{ + matchUtilities({ + grayscale: (value)=>{ + return { + "--tw-grayscale": `grayscale(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("grayscale") + }); + }, + hueRotate: ({ matchUtilities , theme })=>{ + matchUtilities({ + "hue-rotate": (value)=>{ + return { + "--tw-hue-rotate": `hue-rotate(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("hueRotate"), + supportsNegativeValues: true + }); + }, + invert: ({ matchUtilities , theme })=>{ + matchUtilities({ + invert: (value)=>{ + return { + "--tw-invert": `invert(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("invert") + }); + }, + saturate: ({ matchUtilities , theme })=>{ + matchUtilities({ + saturate: (value)=>{ + return { + "--tw-saturate": `saturate(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("saturate") + }); + }, + sepia: ({ matchUtilities , theme })=>{ + matchUtilities({ + sepia: (value)=>{ + return { + "--tw-sepia": `sepia(${value})`, + "@defaults filter": {}, + filter: cssFilterValue + }; + } + }, { + values: theme("sepia") + }); + }, + filter: ({ addDefaults , addUtilities })=>{ + addDefaults("filter", { + "--tw-blur": " ", + "--tw-brightness": " ", + "--tw-contrast": " ", + "--tw-grayscale": " ", + "--tw-hue-rotate": " ", + "--tw-invert": " ", + "--tw-saturate": " ", + "--tw-sepia": " ", + "--tw-drop-shadow": " " + }); + addUtilities({ + ".filter": { + "@defaults filter": {}, + filter: cssFilterValue + }, + ".filter-none": { + filter: "none" + } + }); + }, + backdropBlur: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-blur": (value)=>{ + return { + "--tw-backdrop-blur": `blur(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropBlur") + }); + }, + backdropBrightness: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-brightness": (value)=>{ + return { + "--tw-backdrop-brightness": `brightness(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropBrightness") + }); + }, + backdropContrast: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-contrast": (value)=>{ + return { + "--tw-backdrop-contrast": `contrast(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropContrast") + }); + }, + backdropGrayscale: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-grayscale": (value)=>{ + return { + "--tw-backdrop-grayscale": `grayscale(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropGrayscale") + }); + }, + backdropHueRotate: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-hue-rotate": (value)=>{ + return { + "--tw-backdrop-hue-rotate": `hue-rotate(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropHueRotate"), + supportsNegativeValues: true + }); + }, + backdropInvert: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-invert": (value)=>{ + return { + "--tw-backdrop-invert": `invert(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropInvert") + }); + }, + backdropOpacity: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-opacity": (value)=>{ + return { + "--tw-backdrop-opacity": `opacity(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropOpacity") + }); + }, + backdropSaturate: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-saturate": (value)=>{ + return { + "--tw-backdrop-saturate": `saturate(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropSaturate") + }); + }, + backdropSepia: ({ matchUtilities , theme })=>{ + matchUtilities({ + "backdrop-sepia": (value)=>{ + return { + "--tw-backdrop-sepia": `sepia(${value})`, + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }; + } + }, { + values: theme("backdropSepia") + }); + }, + backdropFilter: ({ addDefaults , addUtilities })=>{ + addDefaults("backdrop-filter", { + "--tw-backdrop-blur": " ", + "--tw-backdrop-brightness": " ", + "--tw-backdrop-contrast": " ", + "--tw-backdrop-grayscale": " ", + "--tw-backdrop-hue-rotate": " ", + "--tw-backdrop-invert": " ", + "--tw-backdrop-opacity": " ", + "--tw-backdrop-saturate": " ", + "--tw-backdrop-sepia": " " + }); + addUtilities({ + ".backdrop-filter": { + "@defaults backdrop-filter": {}, + "backdrop-filter": cssBackdropFilterValue + }, + ".backdrop-filter-none": { + "backdrop-filter": "none" + } + }); + }, + transitionProperty: ({ matchUtilities , theme })=>{ + let defaultTimingFunction = theme("transitionTimingFunction.DEFAULT"); + let defaultDuration = theme("transitionDuration.DEFAULT"); + matchUtilities({ + transition: (value)=>{ + return { + "transition-property": value, + ...value === "none" ? {} : { + "transition-timing-function": defaultTimingFunction, + "transition-duration": defaultDuration + } + }; + } + }, { + values: theme("transitionProperty") + }); + }, + transitionDelay: (0, _createUtilityPlugin.default)("transitionDelay", [ + [ + "delay", + [ + "transitionDelay" + ] + ] + ]), + transitionDuration: (0, _createUtilityPlugin.default)("transitionDuration", [ + [ + "duration", + [ + "transitionDuration" + ] + ] + ], { + filterDefault: true + }), + transitionTimingFunction: (0, _createUtilityPlugin.default)("transitionTimingFunction", [ + [ + "ease", + [ + "transitionTimingFunction" + ] + ] + ], { + filterDefault: true + }), + willChange: (0, _createUtilityPlugin.default)("willChange", [ + [ + "will-change", + [ + "will-change" + ] + ] + ]), + content: (0, _createUtilityPlugin.default)("content", [ + [ + "content", + [ + "--tw-content", + [ + "content", + "var(--tw-content)" + ] + ] + ] + ]) +}; diff --git a/node_modules/tailwindcss/lib/css/LICENSE b/node_modules/tailwindcss/lib/css/LICENSE new file mode 100644 index 0000000..a1fb039 --- /dev/null +++ b/node_modules/tailwindcss/lib/css/LICENSE @@ -0,0 +1,25 @@ +MIT License + +Copyright (c) Nicolas Gallagher +Copyright (c) Jonathan Neal +Copyright (c) Sindre Sorhus (sindresorhus.com) +Copyright (c) Adam Wathan +Copyright (c) Jonathan Reinink + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/tailwindcss/lib/css/preflight.css b/node_modules/tailwindcss/lib/css/preflight.css new file mode 100644 index 0000000..fab875d --- /dev/null +++ b/node_modules/tailwindcss/lib/css/preflight.css @@ -0,0 +1,369 @@ +/* +1. Prevent padding and border from affecting element width. (https://github.com/mozdevs/cssremedy/issues/4) +2. Allow adding a border to an element by just adding a border-width. (https://github.com/tailwindcss/tailwindcss/pull/116) +*/ + +*, +::before, +::after { + box-sizing: border-box; /* 1 */ + border-width: 0; /* 2 */ + border-style: solid; /* 2 */ + border-color: theme('borderColor.DEFAULT', currentColor); /* 2 */ +} + +::before, +::after { + --tw-content: ''; +} + +/* +1. Use a consistent sensible line-height in all browsers. +2. Prevent adjustments of font size after orientation changes in iOS. +3. Use a more readable tab size. +4. Use the user's configured `sans` font-family by default. +5. Use the user's configured `sans` font-feature-settings by default. +6. Use the user's configured `sans` font-variation-settings by default. +*/ + +html { + line-height: 1.5; /* 1 */ + -webkit-text-size-adjust: 100%; /* 2 */ + -moz-tab-size: 4; /* 3 */ + tab-size: 4; /* 3 */ + font-family: theme('fontFamily.sans', ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"); /* 4 */ + font-feature-settings: theme('fontFamily.sans[1].fontFeatureSettings', normal); /* 5 */ + font-variation-settings: theme('fontFamily.sans[1].fontVariationSettings', normal); /* 6 */ +} + +/* +1. Remove the margin in all browsers. +2. Inherit line-height from `html` so users can set them as a class directly on the `html` element. +*/ + +body { + margin: 0; /* 1 */ + line-height: inherit; /* 2 */ +} + +/* +1. Add the correct height in Firefox. +2. Correct the inheritance of border color in Firefox. (https://bugzilla.mozilla.org/show_bug.cgi?id=190655) +3. Ensure horizontal rules are visible by default. +*/ + +hr { + height: 0; /* 1 */ + color: inherit; /* 2 */ + border-top-width: 1px; /* 3 */ +} + +/* +Add the correct text decoration in Chrome, Edge, and Safari. +*/ + +abbr:where([title]) { + text-decoration: underline dotted; +} + +/* +Remove the default font size and weight for headings. +*/ + +h1, +h2, +h3, +h4, +h5, +h6 { + font-size: inherit; + font-weight: inherit; +} + +/* +Reset links to optimize for opt-in styling instead of opt-out. +*/ + +a { + color: inherit; + text-decoration: inherit; +} + +/* +Add the correct font weight in Edge and Safari. +*/ + +b, +strong { + font-weight: bolder; +} + +/* +1. Use the user's configured `mono` font family by default. +2. Correct the odd `em` font sizing in all browsers. +*/ + +code, +kbd, +samp, +pre { + font-family: theme('fontFamily.mono', ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace); /* 1 */ + font-size: 1em; /* 2 */ +} + +/* +Add the correct font size in all browsers. +*/ + +small { + font-size: 80%; +} + +/* +Prevent `sub` and `sup` elements from affecting the line height in all browsers. +*/ + +sub, +sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} + +sub { + bottom: -0.25em; +} + +sup { + top: -0.5em; +} + +/* +1. Remove text indentation from table contents in Chrome and Safari. (https://bugs.chromium.org/p/chromium/issues/detail?id=999088, https://bugs.webkit.org/show_bug.cgi?id=201297) +2. Correct table border color inheritance in all Chrome and Safari. (https://bugs.chromium.org/p/chromium/issues/detail?id=935729, https://bugs.webkit.org/show_bug.cgi?id=195016) +3. Remove gaps between table borders by default. +*/ + +table { + text-indent: 0; /* 1 */ + border-color: inherit; /* 2 */ + border-collapse: collapse; /* 3 */ +} + +/* +1. Change the font styles in all browsers. +2. Remove the margin in Firefox and Safari. +3. Remove default padding in all browsers. +*/ + +button, +input, +optgroup, +select, +textarea { + font-family: inherit; /* 1 */ + font-size: 100%; /* 1 */ + font-weight: inherit; /* 1 */ + line-height: inherit; /* 1 */ + color: inherit; /* 1 */ + margin: 0; /* 2 */ + padding: 0; /* 3 */ +} + +/* +Remove the inheritance of text transform in Edge and Firefox. +*/ + +button, +select { + text-transform: none; +} + +/* +1. Correct the inability to style clickable types in iOS and Safari. +2. Remove default button styles. +*/ + +button, +[type='button'], +[type='reset'], +[type='submit'] { + -webkit-appearance: button; /* 1 */ + background-color: transparent; /* 2 */ + background-image: none; /* 2 */ +} + +/* +Use the modern Firefox focus style for all focusable elements. +*/ + +:-moz-focusring { + outline: auto; +} + +/* +Remove the additional `:invalid` styles in Firefox. (https://github.com/mozilla/gecko-dev/blob/2f9eacd9d3d995c937b4251a5557d95d494c9be1/layout/style/res/forms.css#L728-L737) +*/ + +:-moz-ui-invalid { + box-shadow: none; +} + +/* +Add the correct vertical alignment in Chrome and Firefox. +*/ + +progress { + vertical-align: baseline; +} + +/* +Correct the cursor style of increment and decrement buttons in Safari. +*/ + +::-webkit-inner-spin-button, +::-webkit-outer-spin-button { + height: auto; +} + +/* +1. Correct the odd appearance in Chrome and Safari. +2. Correct the outline style in Safari. +*/ + +[type='search'] { + -webkit-appearance: textfield; /* 1 */ + outline-offset: -2px; /* 2 */ +} + +/* +Remove the inner padding in Chrome and Safari on macOS. +*/ + +::-webkit-search-decoration { + -webkit-appearance: none; +} + +/* +1. Correct the inability to style clickable types in iOS and Safari. +2. Change font properties to `inherit` in Safari. +*/ + +::-webkit-file-upload-button { + -webkit-appearance: button; /* 1 */ + font: inherit; /* 2 */ +} + +/* +Add the correct display in Chrome and Safari. +*/ + +summary { + display: list-item; +} + +/* +Removes the default spacing and border for appropriate elements. +*/ + +blockquote, +dl, +dd, +h1, +h2, +h3, +h4, +h5, +h6, +hr, +figure, +p, +pre { + margin: 0; +} + +fieldset { + margin: 0; + padding: 0; +} + +legend { + padding: 0; +} + +ol, +ul, +menu { + list-style: none; + margin: 0; + padding: 0; +} + +/* +Prevent resizing textareas horizontally by default. +*/ + +textarea { + resize: vertical; +} + +/* +1. Reset the default placeholder opacity in Firefox. (https://github.com/tailwindlabs/tailwindcss/issues/3300) +2. Set the default placeholder color to the user's configured gray 400 color. +*/ + +input::placeholder, +textarea::placeholder { + opacity: 1; /* 1 */ + color: theme('colors.gray.400', #9ca3af); /* 2 */ +} + +/* +Set the default cursor for buttons. +*/ + +button, +[role="button"] { + cursor: pointer; +} + +/* +Make sure disabled buttons don't get the pointer cursor. +*/ +:disabled { + cursor: default; +} + +/* +1. Make replaced elements `display: block` by default. (https://github.com/mozdevs/cssremedy/issues/14) +2. Add `vertical-align: middle` to align replaced elements more sensibly by default. (https://github.com/jensimmons/cssremedy/issues/14#issuecomment-634934210) + This can trigger a poorly considered lint error in some tools but is included by design. +*/ + +img, +svg, +video, +canvas, +audio, +iframe, +embed, +object { + display: block; /* 1 */ + vertical-align: middle; /* 2 */ +} + +/* +Constrain images and videos to the parent width and preserve their intrinsic aspect ratio. (https://github.com/mozdevs/cssremedy/issues/14) +*/ + +img, +video { + max-width: 100%; + height: auto; +} + +/* Make elements with the HTML hidden attribute stay hidden by default */ +[hidden] { + display: none; +} diff --git a/node_modules/tailwindcss/lib/featureFlags.js b/node_modules/tailwindcss/lib/featureFlags.js new file mode 100644 index 0000000..c2e42d2 --- /dev/null +++ b/node_modules/tailwindcss/lib/featureFlags.js @@ -0,0 +1,83 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +function _export(target, all) { + for(var name in all)Object.defineProperty(target, name, { + enumerable: true, + get: all[name] + }); +} +_export(exports, { + flagEnabled: function() { + return flagEnabled; + }, + issueFlagNotices: function() { + return issueFlagNotices; + }, + default: function() { + return _default; + } +}); +const _picocolors = /*#__PURE__*/ _interop_require_default(require("picocolors")); +const _log = /*#__PURE__*/ _interop_require_default(require("./util/log")); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +let defaults = { + optimizeUniversalDefaults: false, + generalizedModifiers: true, + get disableColorOpacityUtilitiesByDefault () { + return false; + }, + get relativeContentPathsByDefault () { + return false; + } +}; +let featureFlags = { + future: [ + "hoverOnlyWhenSupported", + "respectDefaultRingColorOpacity", + "disableColorOpacityUtilitiesByDefault", + "relativeContentPathsByDefault" + ], + experimental: [ + "optimizeUniversalDefaults", + "generalizedModifiers" + ] +}; +function flagEnabled(config, flag) { + if (featureFlags.future.includes(flag)) { + var _config_future; + var _config_future_flag, _ref; + return config.future === "all" || ((_ref = (_config_future_flag = config === null || config === void 0 ? void 0 : (_config_future = config.future) === null || _config_future === void 0 ? void 0 : _config_future[flag]) !== null && _config_future_flag !== void 0 ? _config_future_flag : defaults[flag]) !== null && _ref !== void 0 ? _ref : false); + } + if (featureFlags.experimental.includes(flag)) { + var _config_experimental; + var _config_experimental_flag, _ref1; + return config.experimental === "all" || ((_ref1 = (_config_experimental_flag = config === null || config === void 0 ? void 0 : (_config_experimental = config.experimental) === null || _config_experimental === void 0 ? void 0 : _config_experimental[flag]) !== null && _config_experimental_flag !== void 0 ? _config_experimental_flag : defaults[flag]) !== null && _ref1 !== void 0 ? _ref1 : false); + } + return false; +} +function experimentalFlagsEnabled(config) { + if (config.experimental === "all") { + return featureFlags.experimental; + } + var _config_experimental; + return Object.keys((_config_experimental = config === null || config === void 0 ? void 0 : config.experimental) !== null && _config_experimental !== void 0 ? _config_experimental : {}).filter((flag)=>featureFlags.experimental.includes(flag) && config.experimental[flag]); +} +function issueFlagNotices(config) { + if (process.env.JEST_WORKER_ID !== undefined) { + return; + } + if (experimentalFlagsEnabled(config).length > 0) { + let changes = experimentalFlagsEnabled(config).map((s)=>_picocolors.default.yellow(s)).join(", "); + _log.default.warn("experimental-flags-enabled", [ + `You have enabled experimental features: ${changes}`, + "Experimental features in Tailwind CSS are not covered by semver, may introduce breaking changes, and can change at any time." + ]); + } +} +const _default = featureFlags; diff --git a/node_modules/tailwindcss/lib/index.js b/node_modules/tailwindcss/lib/index.js new file mode 100644 index 0000000..c947d97 --- /dev/null +++ b/node_modules/tailwindcss/lib/index.js @@ -0,0 +1,2 @@ +"use strict"; +module.exports = require("./plugin"); diff --git a/node_modules/tailwindcss/lib/lib/cacheInvalidation.js b/node_modules/tailwindcss/lib/lib/cacheInvalidation.js new file mode 100644 index 0000000..c247179 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/cacheInvalidation.js @@ -0,0 +1,92 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "hasContentChanged", { + enumerable: true, + get: function() { + return hasContentChanged; + } +}); +const _crypto = /*#__PURE__*/ _interop_require_default(require("crypto")); +const _sharedState = /*#__PURE__*/ _interop_require_wildcard(require("./sharedState")); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function _getRequireWildcardCache(nodeInterop) { + if (typeof WeakMap !== "function") return null; + var cacheBabelInterop = new WeakMap(); + var cacheNodeInterop = new WeakMap(); + return (_getRequireWildcardCache = function(nodeInterop) { + return nodeInterop ? cacheNodeInterop : cacheBabelInterop; + })(nodeInterop); +} +function _interop_require_wildcard(obj, nodeInterop) { + if (!nodeInterop && obj && obj.__esModule) { + return obj; + } + if (obj === null || typeof obj !== "object" && typeof obj !== "function") { + return { + default: obj + }; + } + var cache = _getRequireWildcardCache(nodeInterop); + if (cache && cache.has(obj)) { + return cache.get(obj); + } + var newObj = {}; + var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; + for(var key in obj){ + if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { + var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; + if (desc && (desc.get || desc.set)) { + Object.defineProperty(newObj, key, desc); + } else { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + if (cache) { + cache.set(obj, newObj); + } + return newObj; +} +/** + * Calculate the hash of a string. + * + * This doesn't need to be cryptographically secure or + * anything like that since it's used only to detect + * when the CSS changes to invalidate the context. + * + * This is wrapped in a try/catch because it's really dependent + * on how Node itself is build and the environment and OpenSSL + * version / build that is installed on the user's machine. + * + * Based on the environment this can just outright fail. + * + * See https://github.com/nodejs/node/issues/40455 + * + * @param {string} str + */ function getHash(str) { + try { + return _crypto.default.createHash("md5").update(str, "utf-8").digest("binary"); + } catch (err) { + return ""; + } +} +function hasContentChanged(sourcePath, root) { + let css = root.toString(); + // We only care about files with @tailwind directives + // Other files use an existing context + if (!css.includes("@tailwind")) { + return false; + } + let existingHash = _sharedState.sourceHashMap.get(sourcePath); + let rootHash = getHash(css); + let didChange = existingHash !== rootHash; + _sharedState.sourceHashMap.set(sourcePath, rootHash); + return didChange; +} diff --git a/node_modules/tailwindcss/lib/lib/collapseAdjacentRules.js b/node_modules/tailwindcss/lib/lib/collapseAdjacentRules.js new file mode 100644 index 0000000..e900872 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/collapseAdjacentRules.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "default", { + enumerable: true, + get: function() { + return collapseAdjacentRules; + } +}); +let comparisonMap = { + atrule: [ + "name", + "params" + ], + rule: [ + "selector" + ] +}; +let types = new Set(Object.keys(comparisonMap)); +function collapseAdjacentRules() { + function collapseRulesIn(root) { + let currentRule = null; + root.each((node)=>{ + if (!types.has(node.type)) { + currentRule = null; + return; + } + if (currentRule === null) { + currentRule = node; + return; + } + let properties = comparisonMap[node.type]; + var _node_property, _currentRule_property; + if (node.type === "atrule" && node.name === "font-face") { + currentRule = node; + } else if (properties.every((property)=>((_node_property = node[property]) !== null && _node_property !== void 0 ? _node_property : "").replace(/\s+/g, " ") === ((_currentRule_property = currentRule[property]) !== null && _currentRule_property !== void 0 ? _currentRule_property : "").replace(/\s+/g, " "))) { + // An AtRule may not have children (for example if we encounter duplicate @import url(…) rules) + if (node.nodes) { + currentRule.append(node.nodes); + } + node.remove(); + } else { + currentRule = node; + } + }); + // After we've collapsed adjacent rules & at-rules, we need to collapse + // adjacent rules & at-rules that are children of at-rules. + // We do not care about nesting rules because Tailwind CSS + // explicitly does not handle rule nesting on its own as + // the user is expected to use a nesting plugin + root.each((node)=>{ + if (node.type === "atrule") { + collapseRulesIn(node); + } + }); + } + return (root)=>{ + collapseRulesIn(root); + }; +} diff --git a/node_modules/tailwindcss/lib/lib/collapseDuplicateDeclarations.js b/node_modules/tailwindcss/lib/lib/collapseDuplicateDeclarations.js new file mode 100644 index 0000000..70a1ad1 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/collapseDuplicateDeclarations.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "default", { + enumerable: true, + get: function() { + return collapseDuplicateDeclarations; + } +}); +function collapseDuplicateDeclarations() { + return (root)=>{ + root.walkRules((node)=>{ + let seen = new Map(); + let droppable = new Set([]); + let byProperty = new Map(); + node.walkDecls((decl)=>{ + // This could happen if we have nested selectors. In that case the + // parent will loop over all its declarations but also the declarations + // of nested rules. With this we ensure that we are shallowly checking + // declarations. + if (decl.parent !== node) { + return; + } + if (seen.has(decl.prop)) { + // Exact same value as what we have seen so far + if (seen.get(decl.prop).value === decl.value) { + // Keep the last one, drop the one we've seen so far + droppable.add(seen.get(decl.prop)); + // Override the existing one with the new value. This is necessary + // so that if we happen to have more than one declaration with the + // same value, that we keep removing the previous one. Otherwise we + // will only remove the *first* one. + seen.set(decl.prop, decl); + return; + } + // Not the same value, so we need to check if we can merge it so + // let's collect it first. + if (!byProperty.has(decl.prop)) { + byProperty.set(decl.prop, new Set()); + } + byProperty.get(decl.prop).add(seen.get(decl.prop)); + byProperty.get(decl.prop).add(decl); + } + seen.set(decl.prop, decl); + }); + // Drop all the duplicate declarations with the exact same value we've + // already seen so far. + for (let decl of droppable){ + decl.remove(); + } + // Analyze the declarations based on its unit, drop all the declarations + // with the same unit but the last one in the list. + for (let declarations of byProperty.values()){ + let byUnit = new Map(); + for (let decl of declarations){ + let unit = resolveUnit(decl.value); + if (unit === null) { + continue; + } + if (!byUnit.has(unit)) { + byUnit.set(unit, new Set()); + } + byUnit.get(unit).add(decl); + } + for (let declarations of byUnit.values()){ + // Get all but the last one + let removableDeclarations = Array.from(declarations).slice(0, -1); + for (let decl of removableDeclarations){ + decl.remove(); + } + } + } + }); + }; +} +let UNITLESS_NUMBER = Symbol("unitless-number"); +function resolveUnit(input) { + let result = /^-?\d*.?\d+([\w%]+)?$/g.exec(input); + if (result) { + var _result_; + return (_result_ = result[1]) !== null && _result_ !== void 0 ? _result_ : UNITLESS_NUMBER; + } + return null; +} diff --git a/node_modules/tailwindcss/lib/lib/content.js b/node_modules/tailwindcss/lib/lib/content.js new file mode 100644 index 0000000..b37e360 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/content.js @@ -0,0 +1,181 @@ +// @ts-check +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +function _export(target, all) { + for(var name in all)Object.defineProperty(target, name, { + enumerable: true, + get: all[name] + }); +} +_export(exports, { + parseCandidateFiles: function() { + return parseCandidateFiles; + }, + resolvedChangedContent: function() { + return resolvedChangedContent; + } +}); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _path = /*#__PURE__*/ _interop_require_default(require("path")); +const _isglob = /*#__PURE__*/ _interop_require_default(require("is-glob")); +const _fastglob = /*#__PURE__*/ _interop_require_default(require("fast-glob")); +const _normalizepath = /*#__PURE__*/ _interop_require_default(require("normalize-path")); +const _parseGlob = require("../util/parseGlob"); +const _sharedState = require("./sharedState"); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function parseCandidateFiles(context, tailwindConfig) { + let files = tailwindConfig.content.files; + // Normalize the file globs + files = files.filter((filePath)=>typeof filePath === "string"); + files = files.map(_normalizepath.default); + // Split into included and excluded globs + let tasks = _fastglob.default.generateTasks(files); + /** @type {ContentPath[]} */ let included = []; + /** @type {ContentPath[]} */ let excluded = []; + for (const task of tasks){ + included.push(...task.positive.map((filePath)=>parseFilePath(filePath, false))); + excluded.push(...task.negative.map((filePath)=>parseFilePath(filePath, true))); + } + let paths = [ + ...included, + ...excluded + ]; + // Resolve paths relative to the config file or cwd + paths = resolveRelativePaths(context, paths); + // Resolve symlinks if possible + paths = paths.flatMap(resolvePathSymlinks); + // Update cached patterns + paths = paths.map(resolveGlobPattern); + return paths; +} +/** + * + * @param {string} filePath + * @param {boolean} ignore + * @returns {ContentPath} + */ function parseFilePath(filePath, ignore) { + let contentPath = { + original: filePath, + base: filePath, + ignore, + pattern: filePath, + glob: null + }; + if ((0, _isglob.default)(filePath)) { + Object.assign(contentPath, (0, _parseGlob.parseGlob)(filePath)); + } + return contentPath; +} +/** + * + * @param {ContentPath} contentPath + * @returns {ContentPath} + */ function resolveGlobPattern(contentPath) { + // This is required for Windows support to properly pick up Glob paths. + // Afaik, this technically shouldn't be needed but there's probably + // some internal, direct path matching with a normalized path in + // a package which can't handle mixed directory separators + let base = (0, _normalizepath.default)(contentPath.base); + // If the user's file path contains any special characters (like parens) for instance fast-glob + // is like "OOOH SHINY" and treats them as such. So we have to escape the base path to fix this + base = _fastglob.default.escapePath(base); + contentPath.pattern = contentPath.glob ? `${base}/${contentPath.glob}` : base; + contentPath.pattern = contentPath.ignore ? `!${contentPath.pattern}` : contentPath.pattern; + return contentPath; +} +/** + * Resolve each path relative to the config file (when possible) if the experimental flag is enabled + * Otherwise, resolve relative to the current working directory + * + * @param {any} context + * @param {ContentPath[]} contentPaths + * @returns {ContentPath[]} + */ function resolveRelativePaths(context, contentPaths) { + let resolveFrom = []; + // Resolve base paths relative to the config file (when possible) if the experimental flag is enabled + if (context.userConfigPath && context.tailwindConfig.content.relative) { + resolveFrom = [ + _path.default.dirname(context.userConfigPath) + ]; + } + return contentPaths.map((contentPath)=>{ + contentPath.base = _path.default.resolve(...resolveFrom, contentPath.base); + return contentPath; + }); +} +/** + * Resolve the symlink for the base directory / file in each path + * These are added as additional dependencies to watch for changes because + * some tools (like webpack) will only watch the actual file or directory + * but not the symlink itself even in projects that use monorepos. + * + * @param {ContentPath} contentPath + * @returns {ContentPath[]} + */ function resolvePathSymlinks(contentPath) { + let paths = [ + contentPath + ]; + try { + let resolvedPath = _fs.default.realpathSync(contentPath.base); + if (resolvedPath !== contentPath.base) { + paths.push({ + ...contentPath, + base: resolvedPath + }); + } + } catch { + // TODO: log this? + } + return paths; +} +function resolvedChangedContent(context, candidateFiles, fileModifiedMap) { + let changedContent = context.tailwindConfig.content.files.filter((item)=>typeof item.raw === "string").map(({ raw , extension ="html" })=>({ + content: raw, + extension + })); + let [changedFiles, mTimesToCommit] = resolveChangedFiles(candidateFiles, fileModifiedMap); + for (let changedFile of changedFiles){ + let extension = _path.default.extname(changedFile).slice(1); + changedContent.push({ + file: changedFile, + extension + }); + } + return [ + changedContent, + mTimesToCommit + ]; +} +/** + * + * @param {ContentPath[]} candidateFiles + * @param {Map} fileModifiedMap + * @returns {[Set, Map]} + */ function resolveChangedFiles(candidateFiles, fileModifiedMap) { + let paths = candidateFiles.map((contentPath)=>contentPath.pattern); + let mTimesToCommit = new Map(); + let changedFiles = new Set(); + _sharedState.env.DEBUG && console.time("Finding changed files"); + let files = _fastglob.default.sync(paths, { + absolute: true + }); + for (let file of files){ + let prevModified = fileModifiedMap.get(file) || -Infinity; + let modified = _fs.default.statSync(file).mtimeMs; + if (modified > prevModified) { + changedFiles.add(file); + mTimesToCommit.set(file, modified); + } + } + _sharedState.env.DEBUG && console.timeEnd("Finding changed files"); + return [ + changedFiles, + mTimesToCommit + ]; +} diff --git a/node_modules/tailwindcss/lib/lib/defaultExtractor.js b/node_modules/tailwindcss/lib/lib/defaultExtractor.js new file mode 100644 index 0000000..198e5a7 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/defaultExtractor.js @@ -0,0 +1,243 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "defaultExtractor", { + enumerable: true, + get: function() { + return defaultExtractor; + } +}); +const _featureFlags = require("../featureFlags"); +const _regex = /*#__PURE__*/ _interop_require_wildcard(require("./regex")); +function _getRequireWildcardCache(nodeInterop) { + if (typeof WeakMap !== "function") return null; + var cacheBabelInterop = new WeakMap(); + var cacheNodeInterop = new WeakMap(); + return (_getRequireWildcardCache = function(nodeInterop) { + return nodeInterop ? cacheNodeInterop : cacheBabelInterop; + })(nodeInterop); +} +function _interop_require_wildcard(obj, nodeInterop) { + if (!nodeInterop && obj && obj.__esModule) { + return obj; + } + if (obj === null || typeof obj !== "object" && typeof obj !== "function") { + return { + default: obj + }; + } + var cache = _getRequireWildcardCache(nodeInterop); + if (cache && cache.has(obj)) { + return cache.get(obj); + } + var newObj = {}; + var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; + for(var key in obj){ + if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { + var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; + if (desc && (desc.get || desc.set)) { + Object.defineProperty(newObj, key, desc); + } else { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + if (cache) { + cache.set(obj, newObj); + } + return newObj; +} +function defaultExtractor(context) { + let patterns = Array.from(buildRegExps(context)); + /** + * @param {string} content + */ return (content)=>{ + /** @type {(string|string)[]} */ let results = []; + for (let pattern of patterns){ + var _content_match; + results = [ + ...results, + ...(_content_match = content.match(pattern)) !== null && _content_match !== void 0 ? _content_match : [] + ]; + } + return results.filter((v)=>v !== undefined).map(clipAtBalancedParens); + }; +} +function* buildRegExps(context) { + let separator = context.tailwindConfig.separator; + let variantGroupingEnabled = (0, _featureFlags.flagEnabled)(context.tailwindConfig, "variantGrouping"); + let prefix = context.tailwindConfig.prefix !== "" ? _regex.optional(_regex.pattern([ + /-?/, + _regex.escape(context.tailwindConfig.prefix) + ])) : ""; + let utility = _regex.any([ + // Arbitrary properties (without square brackets) + /\[[^\s:'"`]+:[^\s\[\]]+\]/, + // Arbitrary properties with balanced square brackets + // This is a targeted fix to continue to allow theme() + // with square brackets to work in arbitrary properties + // while fixing a problem with the regex matching too much + /\[[^\s:'"`]+:[^\s]+?\[[^\s]+\][^\s]+?\]/, + // Utilities + _regex.pattern([ + // Utility Name / Group Name + /-?(?:\w+)/, + // Normal/Arbitrary values + _regex.optional(_regex.any([ + _regex.pattern([ + // Arbitrary values + /-(?:\w+-)*\[[^\s:]+\]/, + // Not immediately followed by an `{[(` + /(?![{([]])/, + // optionally followed by an opacity modifier + /(?:\/[^\s'"`\\><$]*)?/ + ]), + _regex.pattern([ + // Arbitrary values + /-(?:\w+-)*\[[^\s]+\]/, + // Not immediately followed by an `{[(` + /(?![{([]])/, + // optionally followed by an opacity modifier + /(?:\/[^\s'"`\\$]*)?/ + ]), + // Normal values w/o quotes — may include an opacity modifier + /[-\/][^\s'"`\\$={><]*/ + ])) + ]) + ]); + let variantPatterns = [ + // Without quotes + _regex.any([ + // This is here to provide special support for the `@` variant + _regex.pattern([ + /@\[[^\s"'`]+\](\/[^\s"'`]+)?/, + separator + ]), + _regex.pattern([ + /([^\s"'`\[\\]+-)?\[[^\s"'`]+\]/, + separator + ]), + _regex.pattern([ + /[^\s"'`\[\\]+/, + separator + ]) + ]), + // With quotes allowed + _regex.any([ + _regex.pattern([ + /([^\s"'`\[\\]+-)?\[[^\s`]+\]/, + separator + ]), + _regex.pattern([ + /[^\s`\[\\]+/, + separator + ]) + ]) + ]; + for (const variantPattern of variantPatterns){ + yield _regex.pattern([ + // Variants + "((?=((", + variantPattern, + ")+))\\2)?", + // Important (optional) + /!?/, + prefix, + variantGroupingEnabled ? _regex.any([ + // Or any of those things but grouped separated by commas + _regex.pattern([ + /\(/, + utility, + _regex.zeroOrMore([ + /,/, + utility + ]), + /\)/ + ]), + // Arbitrary properties, constrained utilities, arbitrary values, etc… + utility + ]) : utility + ]); + } + // 5. Inner matches + yield /[^<>"'`\s.(){}[\]#=%$]*[^<>"'`\s.(){}[\]#=%:$]/g; +} +// We want to capture any "special" characters +// AND the characters immediately following them (if there is one) +let SPECIALS = /([\[\]'"`])([^\[\]'"`])?/g; +let ALLOWED_CLASS_CHARACTERS = /[^"'`\s<>\]]+/; +/** + * Clips a string ensuring that parentheses, quotes, etc… are balanced + * Used for arbitrary values only + * + * We will go past the end of the balanced parens until we find a non-class character + * + * Depth matching behavior: + * w-[calc(100%-theme('spacing[some_key][1.5]'))]'] + * ┬ ┬ ┬┬ ┬ ┬┬ ┬┬┬┬┬┬┬ + * 1 2 3 4 34 3 210 END + * ╰────┴──────────┴────────┴────────┴┴───┴─┴┴┴ + * + * @param {string} input + */ function clipAtBalancedParens(input) { + // We are care about this for arbitrary values + if (!input.includes("-[")) { + return input; + } + let depth = 0; + let openStringTypes = []; + // Find all parens, brackets, quotes, etc + // Stop when we end at a balanced pair + // This is naive and will treat mismatched parens as balanced + // This shouldn't be a problem in practice though + let matches = input.matchAll(SPECIALS); + // We can't use lookbehind assertions because we have to support Safari + // So, instead, we've emulated it using capture groups and we'll re-work the matches to accommodate + matches = Array.from(matches).flatMap((match)=>{ + const [, ...groups] = match; + return groups.map((group, idx)=>Object.assign([], match, { + index: match.index + idx, + 0: group + })); + }); + for (let match of matches){ + let char = match[0]; + let inStringType = openStringTypes[openStringTypes.length - 1]; + if (char === inStringType) { + openStringTypes.pop(); + } else if (char === "'" || char === '"' || char === "`") { + openStringTypes.push(char); + } + if (inStringType) { + continue; + } else if (char === "[") { + depth++; + continue; + } else if (char === "]") { + depth--; + continue; + } + // We've gone one character past the point where we should stop + // This means that there was an extra closing `]` + // We'll clip to just before it + if (depth < 0) { + return input.substring(0, match.index - 1); + } + // We've finished balancing the brackets but there still may be characters that can be included + // For example in the class `text-[#336699]/[.35]` + // The depth goes to `0` at the closing `]` but goes up again at the `[` + // If we're at zero and encounter a non-class character then we clip the class there + if (depth === 0 && !ALLOWED_CLASS_CHARACTERS.test(char)) { + return input.substring(0, match.index); + } + } + return input; +} // Regular utilities + // {{modifier}:}*{namespace}{-{suffix}}*{/{opacityModifier}}? + // Arbitrary values + // {{modifier}:}*{namespace}-[{arbitraryValue}]{/{opacityModifier}}? + // arbitraryValue: no whitespace, balanced quotes unless within quotes, balanced brackets unless within quotes + // Arbitrary properties + // {{modifier}:}*[{validCssPropertyName}:{arbitraryValue}] diff --git a/node_modules/tailwindcss/lib/lib/detectNesting.js b/node_modules/tailwindcss/lib/lib/detectNesting.js new file mode 100644 index 0000000..c6d0626 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/detectNesting.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "default", { + enumerable: true, + get: function() { + return _default; + } +}); +function isRoot(node) { + return node.type === "root"; +} +function isAtLayer(node) { + return node.type === "atrule" && node.name === "layer"; +} +function _default(_context) { + return (root, result)=>{ + let found = false; + root.walkAtRules("tailwind", (node)=>{ + if (found) return false; + if (node.parent && !(isRoot(node.parent) || isAtLayer(node.parent))) { + found = true; + node.warn(result, [ + "Nested @tailwind rules were detected, but are not supported.", + "Consider using a prefix to scope Tailwind's classes: https://tailwindcss.com/docs/configuration#prefix", + "Alternatively, use the important selector strategy: https://tailwindcss.com/docs/configuration#selector-strategy" + ].join("\n")); + return false; + } + }); + root.walkRules((rule)=>{ + if (found) return false; + rule.walkRules((nestedRule)=>{ + found = true; + nestedRule.warn(result, [ + "Nested CSS was detected, but CSS nesting has not been configured correctly.", + "Please enable a CSS nesting plugin *before* Tailwind in your configuration.", + "See how here: https://tailwindcss.com/docs/using-with-preprocessors#nesting" + ].join("\n")); + return false; + }); + }); + }; +} diff --git a/node_modules/tailwindcss/lib/lib/evaluateTailwindFunctions.js b/node_modules/tailwindcss/lib/lib/evaluateTailwindFunctions.js new file mode 100644 index 0000000..4ed8766 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/evaluateTailwindFunctions.js @@ -0,0 +1,236 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "default", { + enumerable: true, + get: function() { + return _default; + } +}); +const _dlv = /*#__PURE__*/ _interop_require_default(require("dlv")); +const _didyoumean = /*#__PURE__*/ _interop_require_default(require("didyoumean")); +const _transformThemeValue = /*#__PURE__*/ _interop_require_default(require("../util/transformThemeValue")); +const _postcssvalueparser = /*#__PURE__*/ _interop_require_default(require("postcss-value-parser")); +const _normalizeScreens = require("../util/normalizeScreens"); +const _buildMediaQuery = /*#__PURE__*/ _interop_require_default(require("../util/buildMediaQuery")); +const _toPath = require("../util/toPath"); +const _withAlphaVariable = require("../util/withAlphaVariable"); +const _pluginUtils = require("../util/pluginUtils"); +const _log = /*#__PURE__*/ _interop_require_default(require("../util/log")); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function isObject(input) { + return typeof input === "object" && input !== null; +} +function findClosestExistingPath(theme, path) { + let parts = (0, _toPath.toPath)(path); + do { + parts.pop(); + if ((0, _dlv.default)(theme, parts) !== undefined) break; + }while (parts.length); + return parts.length ? parts : undefined; +} +function pathToString(path) { + if (typeof path === "string") return path; + return path.reduce((acc, cur, i)=>{ + if (cur.includes(".")) return `${acc}[${cur}]`; + return i === 0 ? cur : `${acc}.${cur}`; + }, ""); +} +function list(items) { + return items.map((key)=>`'${key}'`).join(", "); +} +function listKeys(obj) { + return list(Object.keys(obj)); +} +function validatePath(config, path, defaultValue, themeOpts = {}) { + const pathString = Array.isArray(path) ? pathToString(path) : path.replace(/^['"]+|['"]+$/g, ""); + const pathSegments = Array.isArray(path) ? path : (0, _toPath.toPath)(pathString); + const value = (0, _dlv.default)(config.theme, pathSegments, defaultValue); + if (value === undefined) { + let error = `'${pathString}' does not exist in your theme config.`; + const parentSegments = pathSegments.slice(0, -1); + const parentValue = (0, _dlv.default)(config.theme, parentSegments); + if (isObject(parentValue)) { + const validKeys = Object.keys(parentValue).filter((key)=>validatePath(config, [ + ...parentSegments, + key + ]).isValid); + const suggestion = (0, _didyoumean.default)(pathSegments[pathSegments.length - 1], validKeys); + if (suggestion) { + error += ` Did you mean '${pathToString([ + ...parentSegments, + suggestion + ])}'?`; + } else if (validKeys.length > 0) { + error += ` '${pathToString(parentSegments)}' has the following valid keys: ${list(validKeys)}`; + } + } else { + const closestPath = findClosestExistingPath(config.theme, pathString); + if (closestPath) { + const closestValue = (0, _dlv.default)(config.theme, closestPath); + if (isObject(closestValue)) { + error += ` '${pathToString(closestPath)}' has the following keys: ${listKeys(closestValue)}`; + } else { + error += ` '${pathToString(closestPath)}' is not an object.`; + } + } else { + error += ` Your theme has the following top-level keys: ${listKeys(config.theme)}`; + } + } + return { + isValid: false, + error + }; + } + if (!(typeof value === "string" || typeof value === "number" || typeof value === "function" || value instanceof String || value instanceof Number || Array.isArray(value))) { + let error = `'${pathString}' was found but does not resolve to a string.`; + if (isObject(value)) { + let validKeys = Object.keys(value).filter((key)=>validatePath(config, [ + ...pathSegments, + key + ]).isValid); + if (validKeys.length) { + error += ` Did you mean something like '${pathToString([ + ...pathSegments, + validKeys[0] + ])}'?`; + } + } + return { + isValid: false, + error + }; + } + const [themeSection] = pathSegments; + return { + isValid: true, + value: (0, _transformThemeValue.default)(themeSection)(value, themeOpts) + }; +} +function extractArgs(node, vNodes, functions) { + vNodes = vNodes.map((vNode)=>resolveVNode(node, vNode, functions)); + let args = [ + "" + ]; + for (let vNode of vNodes){ + if (vNode.type === "div" && vNode.value === ",") { + args.push(""); + } else { + args[args.length - 1] += _postcssvalueparser.default.stringify(vNode); + } + } + return args; +} +function resolveVNode(node, vNode, functions) { + if (vNode.type === "function" && functions[vNode.value] !== undefined) { + let args = extractArgs(node, vNode.nodes, functions); + vNode.type = "word"; + vNode.value = functions[vNode.value](node, ...args); + } + return vNode; +} +function resolveFunctions(node, input, functions) { + return (0, _postcssvalueparser.default)(input).walk((vNode)=>{ + resolveVNode(node, vNode, functions); + }).toString(); +} +let nodeTypePropertyMap = { + atrule: "params", + decl: "value" +}; +/** + * @param {string} path + * @returns {Iterable<[path: string, alpha: string|undefined]>} + */ function* toPaths(path) { + // Strip quotes from beginning and end of string + // This allows the alpha value to be present inside of quotes + path = path.replace(/^['"]+|['"]+$/g, ""); + let matches = path.match(/^([^\s]+)(?![^\[]*\])(?:\s*\/\s*([^\/\s]+))$/); + let alpha = undefined; + yield [ + path, + undefined + ]; + if (matches) { + path = matches[1]; + alpha = matches[2]; + yield [ + path, + alpha + ]; + } +} +/** + * + * @param {any} config + * @param {string} path + * @param {any} defaultValue + */ function resolvePath(config, path, defaultValue) { + const results = Array.from(toPaths(path)).map(([path, alpha])=>{ + return Object.assign(validatePath(config, path, defaultValue, { + opacityValue: alpha + }), { + resolvedPath: path, + alpha + }); + }); + var _results_find; + return (_results_find = results.find((result)=>result.isValid)) !== null && _results_find !== void 0 ? _results_find : results[0]; +} +function _default(context) { + let config = context.tailwindConfig; + let functions = { + theme: (node, path, ...defaultValue)=>{ + let { isValid , value , error , alpha } = resolvePath(config, path, defaultValue.length ? defaultValue : undefined); + if (!isValid) { + var _parentNode_raws_tailwind; + let parentNode = node.parent; + let candidate = (_parentNode_raws_tailwind = parentNode === null || parentNode === void 0 ? void 0 : parentNode.raws.tailwind) === null || _parentNode_raws_tailwind === void 0 ? void 0 : _parentNode_raws_tailwind.candidate; + if (parentNode && candidate !== undefined) { + // Remove this utility from any caches + context.markInvalidUtilityNode(parentNode); + // Remove the CSS node from the markup + parentNode.remove(); + // Show a warning + _log.default.warn("invalid-theme-key-in-class", [ + `The utility \`${candidate}\` contains an invalid theme value and was not generated.` + ]); + return; + } + throw node.error(error); + } + let maybeColor = (0, _pluginUtils.parseColorFormat)(value); + let isColorFunction = maybeColor !== undefined && typeof maybeColor === "function"; + if (alpha !== undefined || isColorFunction) { + if (alpha === undefined) { + alpha = 1.0; + } + value = (0, _withAlphaVariable.withAlphaValue)(maybeColor, alpha, maybeColor); + } + return value; + }, + screen: (node, screen)=>{ + screen = screen.replace(/^['"]+/g, "").replace(/['"]+$/g, ""); + let screens = (0, _normalizeScreens.normalizeScreens)(config.theme.screens); + let screenDefinition = screens.find(({ name })=>name === screen); + if (!screenDefinition) { + throw node.error(`The '${screen}' screen does not exist in your theme.`); + } + return (0, _buildMediaQuery.default)(screenDefinition); + } + }; + return (root)=>{ + root.walk((node)=>{ + let property = nodeTypePropertyMap[node.type]; + if (property === undefined) { + return; + } + node[property] = resolveFunctions(node, node[property], functions); + }); + }; +} diff --git a/node_modules/tailwindcss/lib/lib/expandApplyAtRules.js b/node_modules/tailwindcss/lib/lib/expandApplyAtRules.js new file mode 100644 index 0000000..19a2c5a --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/expandApplyAtRules.js @@ -0,0 +1,534 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "default", { + enumerable: true, + get: function() { + return expandApplyAtRules; + } +}); +const _postcss = /*#__PURE__*/ _interop_require_default(require("postcss")); +const _postcssselectorparser = /*#__PURE__*/ _interop_require_default(require("postcss-selector-parser")); +const _generateRules = require("./generateRules"); +const _escapeClassName = /*#__PURE__*/ _interop_require_default(require("../util/escapeClassName")); +const _applyImportantSelector = require("../util/applyImportantSelector"); +const _pseudoElements = require("../util/pseudoElements"); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +/** @typedef {Map} ApplyCache */ function extractClasses(node) { + /** @type {Map>} */ let groups = new Map(); + let container = _postcss.default.root({ + nodes: [ + node.clone() + ] + }); + container.walkRules((rule)=>{ + (0, _postcssselectorparser.default)((selectors)=>{ + selectors.walkClasses((classSelector)=>{ + let parentSelector = classSelector.parent.toString(); + let classes = groups.get(parentSelector); + if (!classes) { + groups.set(parentSelector, classes = new Set()); + } + classes.add(classSelector.value); + }); + }).processSync(rule.selector); + }); + let normalizedGroups = Array.from(groups.values(), (classes)=>Array.from(classes)); + let classes = normalizedGroups.flat(); + return Object.assign(classes, { + groups: normalizedGroups + }); +} +let selectorExtractor = (0, _postcssselectorparser.default)(); +/** + * @param {string} ruleSelectors + */ function extractSelectors(ruleSelectors) { + return selectorExtractor.astSync(ruleSelectors); +} +function extractBaseCandidates(candidates, separator) { + let baseClasses = new Set(); + for (let candidate of candidates){ + baseClasses.add(candidate.split(separator).pop()); + } + return Array.from(baseClasses); +} +function prefix(context, selector) { + let prefix = context.tailwindConfig.prefix; + return typeof prefix === "function" ? prefix(selector) : prefix + selector; +} +function* pathToRoot(node) { + yield node; + while(node.parent){ + yield node.parent; + node = node.parent; + } +} +/** + * Only clone the node itself and not its children + * + * @param {*} node + * @param {*} overrides + * @returns + */ function shallowClone(node, overrides = {}) { + let children = node.nodes; + node.nodes = []; + let tmp = node.clone(overrides); + node.nodes = children; + return tmp; +} +/** + * Clone just the nodes all the way to the top that are required to represent + * this singular rule in the tree. + * + * For example, if we have CSS like this: + * ```css + * @media (min-width: 768px) { + * @supports (display: grid) { + * .foo { + * display: grid; + * grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); + * } + * } + * + * @supports (backdrop-filter: blur(1px)) { + * .bar { + * backdrop-filter: blur(1px); + * } + * } + * + * .baz { + * color: orange; + * } + * } + * ``` + * + * And we're cloning `.bar` it'll return a cloned version of what's required for just that single node: + * + * ```css + * @media (min-width: 768px) { + * @supports (backdrop-filter: blur(1px)) { + * .bar { + * backdrop-filter: blur(1px); + * } + * } + * } + * ``` + * + * @param {import('postcss').Node} node + */ function nestedClone(node) { + for (let parent of pathToRoot(node)){ + if (node === parent) { + continue; + } + if (parent.type === "root") { + break; + } + node = shallowClone(parent, { + nodes: [ + node + ] + }); + } + return node; +} +/** + * @param {import('postcss').Root} root + */ function buildLocalApplyCache(root, context) { + /** @type {ApplyCache} */ let cache = new Map(); + root.walkRules((rule)=>{ + // Ignore rules generated by Tailwind + for (let node of pathToRoot(rule)){ + var _node_raws_tailwind; + if (((_node_raws_tailwind = node.raws.tailwind) === null || _node_raws_tailwind === void 0 ? void 0 : _node_raws_tailwind.layer) !== undefined) { + return; + } + } + // Clone what's required to represent this singular rule in the tree + let container = nestedClone(rule); + let sort = context.offsets.create("user"); + for (let className of extractClasses(rule)){ + let list = cache.get(className) || []; + cache.set(className, list); + list.push([ + { + layer: "user", + sort, + important: false + }, + container + ]); + } + }); + return cache; +} +/** + * @returns {ApplyCache} + */ function buildApplyCache(applyCandidates, context) { + for (let candidate of applyCandidates){ + if (context.notClassCache.has(candidate) || context.applyClassCache.has(candidate)) { + continue; + } + if (context.classCache.has(candidate)) { + context.applyClassCache.set(candidate, context.classCache.get(candidate).map(([meta, rule])=>[ + meta, + rule.clone() + ])); + continue; + } + let matches = Array.from((0, _generateRules.resolveMatches)(candidate, context)); + if (matches.length === 0) { + context.notClassCache.add(candidate); + continue; + } + context.applyClassCache.set(candidate, matches); + } + return context.applyClassCache; +} +/** + * Build a cache only when it's first used + * + * @param {() => ApplyCache} buildCacheFn + * @returns {ApplyCache} + */ function lazyCache(buildCacheFn) { + let cache = null; + return { + get: (name)=>{ + cache = cache || buildCacheFn(); + return cache.get(name); + }, + has: (name)=>{ + cache = cache || buildCacheFn(); + return cache.has(name); + } + }; +} +/** + * Take a series of multiple caches and merge + * them so they act like one large cache + * + * @param {ApplyCache[]} caches + * @returns {ApplyCache} + */ function combineCaches(caches) { + return { + get: (name)=>caches.flatMap((cache)=>cache.get(name) || []), + has: (name)=>caches.some((cache)=>cache.has(name)) + }; +} +function extractApplyCandidates(params) { + let candidates = params.split(/[\s\t\n]+/g); + if (candidates[candidates.length - 1] === "!important") { + return [ + candidates.slice(0, -1), + true + ]; + } + return [ + candidates, + false + ]; +} +function processApply(root, context, localCache) { + let applyCandidates = new Set(); + // Collect all @apply rules and candidates + let applies = []; + root.walkAtRules("apply", (rule)=>{ + let [candidates] = extractApplyCandidates(rule.params); + for (let util of candidates){ + applyCandidates.add(util); + } + applies.push(rule); + }); + // Start the @apply process if we have rules with @apply in them + if (applies.length === 0) { + return; + } + // Fill up some caches! + let applyClassCache = combineCaches([ + localCache, + buildApplyCache(applyCandidates, context) + ]); + /** + * When we have an apply like this: + * + * .abc { + * @apply hover:font-bold; + * } + * + * What we essentially will do is resolve to this: + * + * .abc { + * @apply .hover\:font-bold:hover { + * font-weight: 500; + * } + * } + * + * Notice that the to-be-applied class is `.hover\:font-bold:hover` and that the utility candidate was `hover:font-bold`. + * What happens in this function is that we prepend a `.` and escape the candidate. + * This will result in `.hover\:font-bold` + * Which means that we can replace `.hover\:font-bold` with `.abc` in `.hover\:font-bold:hover` resulting in `.abc:hover` + * + * @param {string} selector + * @param {string} utilitySelectors + * @param {string} candidate + */ function replaceSelector(selector, utilitySelectors, candidate) { + let selectorList = extractSelectors(selector); + let utilitySelectorsList = extractSelectors(utilitySelectors); + let candidateList = extractSelectors(`.${(0, _escapeClassName.default)(candidate)}`); + let candidateClass = candidateList.nodes[0].nodes[0]; + selectorList.each((sel)=>{ + /** @type {Set} */ let replaced = new Set(); + utilitySelectorsList.each((utilitySelector)=>{ + let hasReplaced = false; + utilitySelector = utilitySelector.clone(); + utilitySelector.walkClasses((node)=>{ + if (node.value !== candidateClass.value) { + return; + } + // Don't replace multiple instances of the same class + // This is theoretically correct but only partially + // We'd need to generate every possible permutation of the replacement + // For example with `.foo + .foo { … }` and `section { @apply foo; }` + // We'd need to generate all of these: + // - `.foo + .foo` + // - `.foo + section` + // - `section + .foo` + // - `section + section` + if (hasReplaced) { + return; + } + // Since you can only `@apply` class names this is sufficient + // We want to replace the matched class name with the selector the user is using + // Ex: Replace `.text-blue-500` with `.foo.bar:is(.something-cool)` + node.replaceWith(...sel.nodes.map((node)=>node.clone())); + // Record that we did something and we want to use this new selector + replaced.add(utilitySelector); + hasReplaced = true; + }); + }); + // Sort tag names before class names (but only sort each group (separated by a combinator) + // separately and not in total) + // This happens when replacing `.bar` in `.foo.bar` with a tag like `section` + for (let sel of replaced){ + let groups = [ + [] + ]; + for (let node of sel.nodes){ + if (node.type === "combinator") { + groups.push(node); + groups.push([]); + } else { + let last = groups[groups.length - 1]; + last.push(node); + } + } + sel.nodes = []; + for (let group of groups){ + if (Array.isArray(group)) { + group.sort((a, b)=>{ + if (a.type === "tag" && b.type === "class") { + return -1; + } else if (a.type === "class" && b.type === "tag") { + return 1; + } else if (a.type === "class" && b.type === "pseudo" && b.value.startsWith("::")) { + return -1; + } else if (a.type === "pseudo" && a.value.startsWith("::") && b.type === "class") { + return 1; + } + return 0; + }); + } + sel.nodes = sel.nodes.concat(group); + } + } + sel.replaceWith(...replaced); + }); + return selectorList.toString(); + } + let perParentApplies = new Map(); + // Collect all apply candidates and their rules + for (let apply of applies){ + let [candidates] = perParentApplies.get(apply.parent) || [ + [], + apply.source + ]; + perParentApplies.set(apply.parent, [ + candidates, + apply.source + ]); + let [applyCandidates, important] = extractApplyCandidates(apply.params); + if (apply.parent.type === "atrule") { + if (apply.parent.name === "screen") { + let screenType = apply.parent.params; + throw apply.error(`@apply is not supported within nested at-rules like @screen. We suggest you write this as @apply ${applyCandidates.map((c)=>`${screenType}:${c}`).join(" ")} instead.`); + } + throw apply.error(`@apply is not supported within nested at-rules like @${apply.parent.name}. You can fix this by un-nesting @${apply.parent.name}.`); + } + for (let applyCandidate of applyCandidates){ + if ([ + prefix(context, "group"), + prefix(context, "peer") + ].includes(applyCandidate)) { + // TODO: Link to specific documentation page with error code. + throw apply.error(`@apply should not be used with the '${applyCandidate}' utility`); + } + if (!applyClassCache.has(applyCandidate)) { + throw apply.error(`The \`${applyCandidate}\` class does not exist. If \`${applyCandidate}\` is a custom class, make sure it is defined within a \`@layer\` directive.`); + } + let rules = applyClassCache.get(applyCandidate); + candidates.push([ + applyCandidate, + important, + rules + ]); + } + } + for (let [parent, [candidates, atApplySource]] of perParentApplies){ + let siblings = []; + for (let [applyCandidate, important, rules] of candidates){ + let potentialApplyCandidates = [ + applyCandidate, + ...extractBaseCandidates([ + applyCandidate + ], context.tailwindConfig.separator) + ]; + for (let [meta, node] of rules){ + let parentClasses = extractClasses(parent); + let nodeClasses = extractClasses(node); + // When we encounter a rule like `.dark .a, .b { … }` we only want to be left with `[.dark, .a]` if the base applyCandidate is `.a` or with `[.b]` if the base applyCandidate is `.b` + // So we've split them into groups + nodeClasses = nodeClasses.groups.filter((classList)=>classList.some((className)=>potentialApplyCandidates.includes(className))).flat(); + // Add base utility classes from the @apply node to the list of + // classes to check whether it intersects and therefore results in a + // circular dependency or not. + // + // E.g.: + // .foo { + // @apply hover:a; // This applies "a" but with a modifier + // } + // + // We only have to do that with base classes of the `node`, not of the `parent` + // E.g.: + // .hover\:foo { + // @apply bar; + // } + // .bar { + // @apply foo; + // } + // + // This should not result in a circular dependency because we are + // just applying `.foo` and the rule above is `.hover\:foo` which is + // unrelated. However, if we were to apply `hover:foo` then we _did_ + // have to include this one. + nodeClasses = nodeClasses.concat(extractBaseCandidates(nodeClasses, context.tailwindConfig.separator)); + let intersects = parentClasses.some((selector)=>nodeClasses.includes(selector)); + if (intersects) { + throw node.error(`You cannot \`@apply\` the \`${applyCandidate}\` utility here because it creates a circular dependency.`); + } + let root = _postcss.default.root({ + nodes: [ + node.clone() + ] + }); + // Make sure every node in the entire tree points back at the @apply rule that generated it + root.walk((node)=>{ + node.source = atApplySource; + }); + let canRewriteSelector = node.type !== "atrule" || node.type === "atrule" && node.name !== "keyframes"; + if (canRewriteSelector) { + root.walkRules((rule)=>{ + // Let's imagine you have the following structure: + // + // .foo { + // @apply bar; + // } + // + // @supports (a: b) { + // .bar { + // color: blue + // } + // + // .something-unrelated {} + // } + // + // In this case we want to apply `.bar` but it happens to be in + // an atrule node. We clone that node instead of the nested one + // because we still want that @supports rule to be there once we + // applied everything. + // + // However it happens to be that the `.something-unrelated` is + // also in that same shared @supports atrule. This is not good, + // and this should not be there. The good part is that this is + // a clone already and it can be safely removed. The question is + // how do we know we can remove it. Basically what we can do is + // match it against the applyCandidate that you want to apply. If + // it doesn't match the we can safely delete it. + // + // If we didn't do this, then the `replaceSelector` function + // would have replaced this with something that didn't exist and + // therefore it removed the selector altogether. In this specific + // case it would result in `{}` instead of `.something-unrelated {}` + if (!extractClasses(rule).some((candidate)=>candidate === applyCandidate)) { + rule.remove(); + return; + } + // Strip the important selector from the parent selector if at the beginning + let importantSelector = typeof context.tailwindConfig.important === "string" ? context.tailwindConfig.important : null; + // We only want to move the "important" selector if this is a Tailwind-generated utility + // We do *not* want to do this for user CSS that happens to be structured the same + let isGenerated = parent.raws.tailwind !== undefined; + let parentSelector = isGenerated && importantSelector && parent.selector.indexOf(importantSelector) === 0 ? parent.selector.slice(importantSelector.length) : parent.selector; + rule.selector = replaceSelector(parentSelector, rule.selector, applyCandidate); + // And then re-add it if it was removed + if (importantSelector && parentSelector !== parent.selector) { + rule.selector = (0, _applyImportantSelector.applyImportantSelector)(rule.selector, importantSelector); + } + rule.walkDecls((d)=>{ + d.important = meta.important || important; + }); + // Move pseudo elements to the end of the selector (if necessary) + let selector = (0, _postcssselectorparser.default)().astSync(rule.selector); + selector.each((sel)=>(0, _pseudoElements.movePseudos)(sel)); + rule.selector = selector.toString(); + }); + } + // It could be that the node we were inserted was removed because the class didn't match + // If that was the *only* rule in the parent, then we have nothing add so we skip it + if (!root.nodes[0]) { + continue; + } + // Insert it + siblings.push([ + meta.sort, + root.nodes[0] + ]); + } + } + // Inject the rules, sorted, correctly + let nodes = context.offsets.sort(siblings).map((s)=>s[1]); + // `parent` refers to the node at `.abc` in: .abc { @apply mt-2 } + parent.after(nodes); + } + for (let apply of applies){ + // If there are left-over declarations, just remove the @apply + if (apply.parent.nodes.length > 1) { + apply.remove(); + } else { + // The node is empty, drop the full node + apply.parent.remove(); + } + } + // Do it again, in case we have other `@apply` rules + processApply(root, context, localCache); +} +function expandApplyAtRules(context) { + return (root)=>{ + // Build a cache of the user's CSS so we can use it to resolve classes used by @apply + let localCache = lazyCache(()=>buildLocalApplyCache(root, context)); + processApply(root, context, localCache); + }; +} diff --git a/node_modules/tailwindcss/lib/lib/expandTailwindAtRules.js b/node_modules/tailwindcss/lib/lib/expandTailwindAtRules.js new file mode 100644 index 0000000..34ea144 --- /dev/null +++ b/node_modules/tailwindcss/lib/lib/expandTailwindAtRules.js @@ -0,0 +1,274 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "default", { + enumerable: true, + get: function() { + return expandTailwindAtRules; + } +}); +const _fs = /*#__PURE__*/ _interop_require_default(require("fs")); +const _quicklru = /*#__PURE__*/ _interop_require_default(require("@alloc/quick-lru")); +const _sharedState = /*#__PURE__*/ _interop_require_wildcard(require("./sharedState")); +const _generateRules = require("./generateRules"); +const _log = /*#__PURE__*/ _interop_require_default(require("../util/log")); +const _cloneNodes = /*#__PURE__*/ _interop_require_default(require("../util/cloneNodes")); +const _defaultExtractor = require("./defaultExtractor"); +function _interop_require_default(obj) { + return obj && obj.__esModule ? obj : { + default: obj + }; +} +function _getRequireWildcardCache(nodeInterop) { + if (typeof WeakMap !== "function") return null; + var cacheBabelInterop = new WeakMap(); + var cacheNodeInterop = new WeakMap(); + return (_getRequireWildcardCache = function(nodeInterop) { + return nodeInterop ? cacheNodeInterop : cacheBabelInterop; + })(nodeInterop); +} +function _interop_require_wildcard(obj, nodeInterop) { + if (!nodeInterop && obj && obj.__esModule) { + return obj; + } + if (obj === null || typeof obj !== "object" && typeof obj !== "function") { + return { + default: obj + }; + } + var cache = _getRequireWildcardCache(nodeInterop); + if (cache && cache.has(obj)) { + return cache.get(obj); + } + var newObj = {}; + var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; + for(var key in obj){ + if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { + var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; + if (desc && (desc.get || desc.set)) { + Object.defineProperty(newObj, key, desc); + } else { + newObj[key] = obj[key]; + } + } + } + newObj.default = obj; + if (cache) { + cache.set(obj, newObj); + } + return newObj; +} +let env = _sharedState.env; +const builtInExtractors = { + DEFAULT: _defaultExtractor.defaultExtractor +}; +const builtInTransformers = { + DEFAULT: (content)=>content, + svelte: (content)=>content.replace(/(?:^|\s)class:/g, " ") +}; +function getExtractor(context, fileExtension) { + let extractors = context.tailwindConfig.content.extract; + return extractors[fileExtension] || extractors.DEFAULT || builtInExtractors[fileExtension] || builtInExtractors.DEFAULT(context); +} +function getTransformer(tailwindConfig, fileExtension) { + let transformers = tailwindConfig.content.transform; + return transformers[fileExtension] || transformers.DEFAULT || builtInTransformers[fileExtension] || builtInTransformers.DEFAULT; +} +let extractorCache = new WeakMap(); +// Scans template contents for possible classes. This is a hot path on initial build but +// not too important for subsequent builds. The faster the better though — if we can speed +// up these regexes by 50% that could cut initial build time by like 20%. +function getClassCandidates(content, extractor, candidates, seen) { + if (!extractorCache.has(extractor)) { + extractorCache.set(extractor, new _quicklru.default({ + maxSize: 25000 + })); + } + for (let line of content.split("\n")){ + line = line.trim(); + if (seen.has(line)) { + continue; + } + seen.add(line); + if (extractorCache.get(extractor).has(line)) { + for (let match of extractorCache.get(extractor).get(line)){ + candidates.add(match); + } + } else { + let extractorMatches = extractor(line).filter((s)=>s !== "!*"); + let lineMatchesSet = new Set(extractorMatches); + for (let match of lineMatchesSet){ + candidates.add(match); + } + extractorCache.get(extractor).set(line, lineMatchesSet); + } + } +} +/** + * + * @param {[import('./offsets.js').RuleOffset, import('postcss').Node][]} rules + * @param {*} context + */ function buildStylesheet(rules, context) { + let sortedRules = context.offsets.sort(rules); + let returnValue = { + base: new Set(), + defaults: new Set(), + components: new Set(), + utilities: new Set(), + variants: new Set() + }; + for (let [sort, rule] of sortedRules){ + returnValue[sort.layer].add(rule); + } + return returnValue; +} +function expandTailwindAtRules(context) { + return (root)=>{ + let layerNodes = { + base: null, + components: null, + utilities: null, + variants: null + }; + root.walkAtRules((rule)=>{ + // Make sure this file contains Tailwind directives. If not, we can save + // a lot of work and bail early. Also we don't have to register our touch + // file as a dependency since the output of this CSS does not depend on + // the source of any templates. Think Vue -

OOPS! Page not found

+

OOPS! Page not found

\ No newline at end of file diff --git a/public/index.html b/public/index.html index 17bc20d..d75f7e0 100644 --- a/public/index.html +++ b/public/index.html @@ -1,43 +1,64 @@ GLink - + + + + - + +

Link Shortener

+

Hello There!

-

Link Shortener

-
- * -

+ - * - +
+ + * + : +
+ +
+
+
+ + * + : + +
+ +
+
+ -

- -

- - - -

- - + + + +

-
- -
+ +
diff --git a/public/src/index.js b/public/src/index.js index 4f5753d..6352cf7 100644 --- a/public/src/index.js +++ b/public/src/index.js @@ -30,21 +30,10 @@ function valueRequested() { } function checkboxCallback(event) { - const radiusLabel = document.getElementById("radiusLabel"); - const radiusSelect = document.getElementById("radiusSelect"); - const mandatoryRadius = document.getElementById("mandatory-radius"); if (event.currentTarget.checked) { - radiusLabel.hidden = false; - mandatoryRadius.hidden = false; - radiusSelect.hidden = false; - radiusSelect.required = true; + document.getElementById("radiusSelect").required = true; valueRequested(); getLocation(); - } else { - radiusLabel.hidden = true; - mandatoryRadius.hidden = true; - radiusSelect.hidden = true; - radiusSelect.required = false; } } @@ -68,108 +57,108 @@ function showPosition(position) { } // submit.addEventListener('click', validate); - function validate() { - //e.preventDefault(); - const checked = document.getElementById("restricted"); - if (checked.checked) { - const lat = document.getElementById("latitude"); - const long = document.getElementById("longitude"); - if (lat.value === "" || long.value === "") { - /* wait */ - return false - } +function validate() { + //e.preventDefault(); + const checked = document.getElementById("restricted"); + if (checked.checked) { + const lat = document.getElementById("latitude"); + const long = document.getElementById("longitude"); + if (lat.value === "" || long.value === "") { + /* wait */ + return false } + } - const url = document.getElementById("URL"); - let glink = document.getElementById("GLink"); - const error = document.getElementById("error"); - // if (!url) { - // /* Flag */ - // } - let valid = true; - const domainExp = new RegExp("^http(s)*:\\/\\/[a-zA-Z0-9\\-]+(\\.[a-zA-Z0-9\\-]+)+$"); - const filepathExp = new RegExp("^[a-zA-Z]+$"); - const glinkExp = new RegExp("^[a-zA-Z]*$"); - let glinkStr = glink.value; - let count = 0; - let index = -1; - let domain = ""; - let filepath = ""; - for (let i = 0; i < url.value.length; i++) { - if (url.value.charAt(i) == '/') { - count++; - } - if (count == 3) { - index = i; - break; - } + const url = document.getElementById("URL"); + let glink = document.getElementById("GLink"); + const error = document.getElementById("error"); + // if (!url) { + // /* Flag */ + // } + let valid = true; + const domainExp = new RegExp("^http(s)*:\\/\\/[a-zA-Z0-9\\-]+(\\.[a-zA-Z0-9\\-]+)+$"); + const filepathExp = new RegExp("^[a-zA-Z]+$"); + const glinkExp = new RegExp("^[a-zA-Z]*$"); + let glinkStr = glink.value; + let count = 0; + let index = -1; + let domain = ""; + let filepath = ""; + for (let i = 0; i < url.value.length; i++) { + if (url.value.charAt(i) == '/') { + count++; } - if (count >= 3) { - domain = url.value.substring(0, index); - if (index == url.value.length - 1) { - filepath = url.value.charAt(index); - } else { - filepath = url.value.substring(index, url.value.length - 1); - } - alert("Domain is " + domain + " filepath is " + filepath); + if (count == 3) { + index = i; + break; + } + } + if (count >= 3) { + domain = url.value.substring(0, index); + if (index == url.value.length - 1) { + filepath = url.value.charAt(index); } else { - domain = url.value; + filepath = url.value.substring(index, url.value.length - 1); } - console.log(domain); + alert("Domain is " + domain + " filepath is " + filepath); + } else { + domain = url.value; + } + console.log(domain); - if (glinkStr === "") { - var result = window.confirm("You have left the glink field blank. A random one will be generated for you."); - if (result === false) { - return false; - } + if (glinkStr === "") { + var result = window.confirm("You have left the glink field blank. A random one will be generated for you."); + if (result === false) { + return false; } + } - if (domain.match(domainExp) && glinkStr.match(glinkExp))/** and is available? */{ - if (error.classList.contains("visible")) { - error.classList.remove("visible"); + if (domain.match(domainExp) && glinkStr.match(glinkExp))/** and is available? */{ + if (error.classList.contains("visible")) { + error.classList.remove("visible"); + } + if (url.classList.contains("invalid")) { + url.classList.remove("invalid"); + } + url.classList.add("valid"); + if (glink.classList.contains("invalid")) { + glink.classList.remove("invalid"); + } + glink.classList.add("valid"); + error.setAttribute('aria-hidden', true); + error.setAttribute('aria-invalid', false); + console.log("Valid with url= " +url.value+ " glink=" +glink); + return valid; + } else { + /*flag*/ + error.classList.add("visible"); + + //error.classList.add("hidden"); + if (!domain.match(domainExp)) { + if (url.classList.contains("valid")) { + url.classList.remove("valid"); } + url.classList.add("invalid"); + } else { if (url.classList.contains("invalid")) { url.classList.remove("invalid"); } url.classList.add("valid"); + } + if (!glinkStr.match(glinkExp)) { + if (glink.classList.contains("valid")) { + glink.classList.remove("valid"); + } + glink.classList.add("invalid"); + } else { if (glink.classList.contains("invalid")) { glink.classList.remove("invalid"); } glink.classList.add("valid"); - error.setAttribute('aria-hidden', true); - error.setAttribute('aria-invalid', false); - console.log("Valid with url= " +url.value+ " glink=" +glink); - return valid; - } else { - /*flag*/ - error.classList.add("visible"); - - //error.classList.add("hidden"); - if (!domain.match(domainExp)) { - if (url.classList.contains("valid")) { - url.classList.remove("valid"); - } - url.classList.add("invalid"); - } else { - if (url.classList.contains("invalid")) { - url.classList.remove("invalid"); - } - url.classList.add("valid"); - } - if (!glinkStr.match(glinkExp)) { - if (glink.classList.contains("valid")) { - glink.classList.remove("valid"); - } - glink.classList.add("invalid"); - } else { - if (glink.classList.contains("invalid")) { - glink.classList.remove("invalid"); - } - glink.classList.add("valid"); - } - error.setAttribute('aria-hidden', false); - error.setAttribute('aria-invalid', true); - return false; } + error.setAttribute('aria-hidden', false); + error.setAttribute('aria-invalid', true); + return false; + } } diff --git a/public/src/redirect.js b/public/src/redirect.js new file mode 100644 index 0000000..ebac737 --- /dev/null +++ b/public/src/redirect.js @@ -0,0 +1,17 @@ + +let latitude = document.getElementById("latitude"); +let longitude = document.getElementById("longitude"); +console.log("entered"); +latitude.setValue = function (newValue) { + this.value = newValue; + document.getElementById("form").submit(); +} +if (navigator.geolocation) { + navigator.geolocation.getCurrentPosition(function (position) { + longitude.value = position.coords.longitude; + latitude.setValue(position.coords.latitude); + }); +} else { + console.log("Your browser does not support geolocation."); +} + diff --git a/result.js b/result.js index 6de0c5d..3095273 100644 --- a/result.js +++ b/result.js @@ -1,5 +1,4 @@ const path = require('path'); -//const http = require('http'); const express = require('express'); const app = express(); const staticPath = path.join(__dirname, "/public"); @@ -10,9 +9,9 @@ const client = new cassandra.Client({ localDataCenter: 'datacenter1', keyspace: 'glink' }); -//var server = http.createServer(app); let id = 1; /* Ideally should initialize id to be nextFromDB or write to file and read */ const port = 63342; // Port that the server listens on */ +const RADIUS_OF_EARTH_IN_MILES = 3958.7614580848; app.use( bodyParser.json() ); app.use(bodyParser.urlencoded({ @@ -62,9 +61,6 @@ function filter(path) { if (path.charAt(0) === '/') { path = path.substring(1); } - if (path.charAt(path.length - 1) === '/' && path.length > 1) { - path = path.substring(0, path.length - 1); - } return path; } else { console.log("failed check. path is " + path); @@ -72,49 +68,74 @@ function filter(path) { } } -app.get('/', (request, response) => { - response.render("index.html"); - }) +function calculateDistance(lat1, lat2, long1, long2) { + console.log(lat1 + " " + lat2 + " " + long1 + " " + long2); + lat1 = lat1 * (Math.PI / 180); + lat2 = lat2 * (Math.PI / 180); + long1 = long1 * (Math.PI / 180); + long2 = long2 * (Math.PI / 180); + + /* 2 asin(((lat2 - lat1)/2) ^ 2)*/ + return (2 * Math.asin(Math.sqrt(Math.pow(Math.sin((lat2 - lat1)/2), 2) + Math.pow(Math.sin((long2 - long1)/2), 2) * Math.cos(lat1) * Math.cos(lat2))) * RADIUS_OF_EARTH_IN_MILES); +} +// app.get('/', (request, response) => { +// response.render("index.html"); +// }) app.get('/node_modules/', (request, response) => { - response.sendFile(path.join(staticPath, "/error.html")); + response.redirect("./error.html"); }) app.get('/public/', (request, response) => { - response.sendFile(path.join(staticPath, "/error.html")); + response.redirect("./error.html"); }) -const query = "INSERT INTO data (id, url, glink) VALUES (?, ?, ?)"; +const query = "INSERT INTO data (id, url, glink, time, isGeo, radius, latitude, longitude) VALUES (?, ?, ?, toTimestamp(now()), ?, ?, ?, ?)"; -app.post('/add', function(req, res) { +app.post('/__add', function(req, res) { let input_url = req.body.url; let input_glink = req.body.glink; let input_checkbox = req.body.restricted; let input_radius = req.body.radiusSelect; let input_latitude = req.body.latitude; let input_longitude = req.body.longitude; - console.log("Received query " + input_url + " and " + input_glink); - let currID = nextId(); + let geoBool = false; + let geoString = "off"; + //console.log("Received query " + input_url + " and " + input_glink); + if (input_checkbox) { + geoString = "on"; + geoBool = true; + } else { + input_radius = null; + input_latitude = null; + input_longitude = null; + } + if (input_glink === "") { input_glink = getRandomGLink(); - client.execute(query, [currID, input_url, input_glink], {prepare: true}, function(err,result) { + let currID = nextId(); + console.log(currID, input_url, input_glink, geoBool, input_radius, input_latitude, input_longitude) + client.execute(query, [currID, input_url, input_glink, geoBool, input_radius, input_latitude, input_longitude], {prepare: true}, function(err,result) { if (err) { - res.send("

" + err.message + "

"); + res.send("

" + err.message + "

"); } else { - res.send("

New entry has been added with url = " + req.body.url + " and glink = " + input_glink + "

"); + res.send("

New entry has been added with url = " + req.body.url + " and glink = " + input_glink + " with geolocation turned " + geoString + "

"); } }); } else { let selectQuery = "SELECT id FROM data where glink = ? allow filtering"; + console.log(input_glink); client.execute(selectQuery, [input_glink],{} ,function(err, result) { if (result.rows.length === 0) { - client.execute(query, [currID, input_url, input_glink], {prepare: true}, function(err,result) { + let currID = nextId(); + console.log("values are: " + currID, input_url, input_glink, geoBool, input_radius, input_latitude, input_longitude); + client.execute(query, [currID, input_url, input_glink, geoBool, input_radius, input_latitude, input_longitude], {prepare: true}, function(err,result) { if (err) { - res.send("

{err.message}

"); + res.send("

" + err.message + "

"); } else { res.send("" + "

" + - "New entry has been added with url = " + req.body.url + " and glink = " /*+ req.body.glink*/ + "" + + "New entry has been added with geolocation turned " + geoString + " and url = " + req.body.url + " and glink = " + "

" + "" + "
" + @@ -128,36 +149,81 @@ app.post('/add', function(req, res) { } }); } else { - res.send("

This glink has already been registered. Please try a different glink

"); + res.send("

This glink has already been registered. Please try a different glink

"); } }); } }) - -/* Redirect requests to corresponding entry in database */ -app.get('/*', (request, response) => { - let req_path = request.path; - req_path = filter(req_path); - console.log("Path is " +req_path); - if (req_path == null) { - response.sendFile(path.join(staticPath, "/error.html")); - return; - } - - let selQry = "select url from data where glink = ? allow filtering"; +app.post('/__check', function(req, res) { + let user_latitude = req.body.latitude; + let user_longitude = req.body.longitude; + let req_path = req.body.glink; + let selQry = "select url, latitude, longitude, radius from data where glink = ? allow filtering"; client.execute(selQry, [req_path], {}, function(err, result) { if (result.rows.length === 0) { - console.log("Failed"); - response.sendFile(path.join(staticPath, "/error.html")); + res.redirect("/error.html"); } else { let page = result.rows[0]["url"]; - console.log(page); - response.writeHead(301, {Location: page}); - response.end(); + let latitude = result.rows[0]["latitude"]; + let longitude = result.rows[0]["longitude"]; + let radius = result.rows[0]["radius"]; + console.log(user_latitude + user_longitude); + let distance = calculateDistance(user_latitude, latitude, user_longitude, longitude); + console.log(distance + " " + radius); + if (distance < radius) { + console.log("inside radius"); + res.writeHead(301, {Location: page}); + res.end(); + } else { + res.redirect("./error.html "); + console.log("Outside radius"); + } } }) + + + +}) +/* Redirect requests to corresponding entry in database */ +app.get('/*', (request, response, cb) => { + let original_request = request.path; + if (original_request.charAt(original_request.length - 1) === '/' && original_request.length > 1) { + original_request = original_request.substring(0, original_request.length - 1); + } + let req_path = filter(original_request); + if (!req_path) { + console.log(staticPath + original_request); + response.redirect("./error.html"); + return cb(""); + } else { + let geoQry = "select isGeo from data where glink = ? allow filtering"; + client.execute(geoQry, [req_path], {}, function (err, result) { + if (result.rows.length === 0) { + response.redirect("/error.html"); + } else { + let isGeo = result.rows[0]["isgeo"]; + if (isGeo) { + response.send("

Redirecting you to the website! Please wait ...

"); + response.end(); + } else { + let selQry = "select url from data where glink = ? allow filtering"; + client.execute(selQry, [req_path], {}, function (err, result) { + if (result.rows.length === 0) { + response.redirect("/error.html"); + } else { + let page = result.rows[0]["url"]; + response.writeHead(301, {Location: page}); + response.end(); + } + + }) + } + + } + }) + } }) app.listen(port, function(){ console.log("server listening on port 63342"); diff --git a/tailwind.config.js b/tailwind.config.js new file mode 100644 index 0000000..9f70914 --- /dev/null +++ b/tailwind.config.js @@ -0,0 +1,21 @@ +/** @type {import('tailwindcss').Config} */ +module.exports = { + content: ["public/*.{html,js}"], + theme: { + extend: { + fontFamily: { + rubik: ['Rubik', 'sans-serif'], + grenzeGotisch: ['GrenzeGotisch', 'serif'], + fingerpaint: ['FingerPaint', 'sans-serif'], + narrow: ['PTSansNarrow', 'sans-serif'], + }, + }, + }, + variants: { + extend: { + width: ["responsive", "hover", "focus"], + }, + }, + plugins: [], +} +